1 开发环境
1、系统:Windows
2、IDE工具:Eclipse-Juno
3、HBase-0.98.4-hadoop1
2 前提条件
1、已经按照“Hadoop”目录中“HBase安装”文章,搭建起了HBase集群,并且集群已经启动。
3 项目布局
4 开发流程
4.1 新建项目
可以普通项目,也可以Web项目。此处新建Java Web项目:CloudDataBase。
4.2 添加JAR包
进入HBase文件下的lib目录,拷贝该目录下的所有Jar包,复制到项目CloudDataBase\WebContent\WEB-INF\lib文件夹下:
若是普通java,那么可以再项目上点击右键,选择Propertie,在弹出的快捷菜单中选择“Java Build Path”对话框,再单击Libraries选项卡,在该选项卡下单击“Add External JARs”按钮,定位到HBase文件下的lib目录,并选取其中的所有Jar包。
(Propertie->Java Build Path->Libraries-> Add External JARs)
4.3 添加hbase-site.xml配置文件
在工程根目录下创建一个名字为“conf”的文件夹,从已经搭建好的HBase集群中,找到$HBASE_HOME/conf/目录中的hbase-site.xml文件,并将其复制到该文件夹中(比如,我搭建好的集群,该文件在Linux服务器中“/opt/hbase-0.98.4/conf/”目录下。注意,需要是已经配置好的HBase集群中的hbase-site.xml文件,刚下载的HBase部署文件中的该文件是不能使用的)。
再次右键点击项目,选择Propertie->Java Build Path->Libraries->Add Class Folder,然后选择conf文件夹。
4.4 在windows中添加集群的hosts信息
windows下开发HBase应用程序,而HBase部署在linux环境中,在运行调试时会出现无法找到主机,类似异常信息如下:java.net.UnknownHostException: unknown host: master
解决方案:
在C:\WINDOWS\system32\drivers\etc\hosts文件中添加如下信息
[server_IP] [server_name]
比如,针对我之前搭建的集群,添加的内容如下:
4.5 编写代码
package com.whensee.service;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class HBaseTestCase {
//声明静态配置 HBaseConfiguration
static Configuration cfg = HBaseConfiguration.create();
//创建一张表,通过HBaseAdmin HTableDescriptor来创建
public static void create(String tablename,String columnFamily) throws Exception{
HBaseAdmin admin = new HBaseAdmin(cfg);
if(admin.tableExists(tablename)){
System.out.println("table Exists!");
//System.exit(0);
}else{
HTableDescriptor tableDesc = new HTableDescriptor(tablename);
tableDesc.addFamily(new HColumnDescriptor(columnFamily));
admin.createTable(tableDesc);
System.out.println("create table success!");
}
}
//添加一条数据,通过HTable Put 为已经存在的表来添加数据
public static void put(String tablename,String row,
String columnFamily,String column,String data) throws Exception{
HTable table = new HTable(cfg,tablename);
Put p1 = new Put(Bytes.toBytes(row));
p1.add(Bytes.toBytes(columnFamily),Bytes.toBytes(column),Bytes.toBytes(data));
table.put(p1);
System.out.println("put '"+row+"','"+columnFamily+":"+column+"','"+data+"'");
}
public static void get(String tablename,String row) throws IOException{
HTable table = new HTable(cfg,tablename);
Get g = new Get(Bytes.toBytes(row));
Result result = table.get(g);
System.out.println("Get: "+result);
}
//显示所有数据,通过HTable Scan来获取已有表的信息
public static void scan(String tablename) throws Exception{
HTable table = new HTable(cfg,tablename);
Scan s = new Scan();
ResultScanner rs = table.getScanner(s);
for(Result r:rs){
System.out.println("Scan: "+r);
}
}
public static boolean delete(String tablename) throws IOException{
HBaseAdmin admin = new HBaseAdmin(cfg);
if(admin.tableExists(tablename)){
try{
admin.disableTable(tablename);
admin.deleteTable(tablename);
}catch(Exception ex){
ex.printStackTrace();
return false;
}
}
return true;
}
public static void main(String [] args){
String tablename="tb_test";
String columnFamily="cf_user";
try{
HBaseTestCase.create(tablename, columnFamily);
HBaseTestCase.put(tablename, "row005", columnFamily, "001", "data");
HBaseTestCase.get(tablename, "row001");
HBaseTestCase.scan(tablename);
// if(true==HBaseTestCase.delete(tablename)) {
// System.out.println("Delete table:"+tablename+"success!");
// }
}
catch (Exception e){
e.printStackTrace();
}
}
}
package com.whensee.service;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class HBaseTestCase {
//声明静态配置 HBaseConfiguration
static Configuration cfg = HBaseConfiguration.create();
//创建一张表,通过HBaseAdmin HTableDescriptor来创建
public static void create(String tablename,String columnFamily) throws Exception{
HBaseAdmin admin = new HBaseAdmin(cfg);
if(admin.tableExists(tablename)){
System.out.println("table Exists!");
//System.exit(0);
}else{
HTableDescriptor tableDesc = new HTableDescriptor(tablename);
tableDesc.addFamily(new HColumnDescriptor(columnFamily));
admin.createTable(tableDesc);
System.out.println("create table success!");
}
}
//添加一条数据,通过HTable Put 为已经存在的表来添加数据
public static void put(String tablename,String row,
String columnFamily,String column,String data) throws Exception{
HTable table = new HTable(cfg,tablename);
Put p1 = new Put(Bytes.toBytes(row));
p1.add(Bytes.toBytes(columnFamily),Bytes.toBytes(column),Bytes.toBytes(data));
table.put(p1);
System.out.println("put '"+row+"','"+columnFamily+":"+column+"','"+data+"'");
}
public static void get(String tablename,String row) throws IOException{
HTable table = new HTable(cfg,tablename);
Get g = new Get(Bytes.toBytes(row));
Result result = table.get(g);
System.out.println("Get: "+result);
}
//显示所有数据,通过HTable Scan来获取已有表的信息
public static void scan(String tablename) throws Exception{
HTable table = new HTable(cfg,tablename);
Scan s = new Scan();
ResultScanner rs = table.getScanner(s);
for(Result r:rs){
System.out.println("Scan: "+r);
}
}
public static boolean delete(String tablename) throws IOException{
HBaseAdmin admin = new HBaseAdmin(cfg);
if(admin.tableExists(tablename)){
try{
admin.disableTable(tablename);
admin.deleteTable(tablename);
}catch(Exception ex){
ex.printStackTrace();
return false;
}
}
return true;
}
public static void main(String [] args){
String tablename="tb_test";
String columnFamily="cf_user";
try{
HBaseTestCase.create(tablename, columnFamily);
HBaseTestCase.put(tablename, "row005", columnFamily, "001", "data");
HBaseTestCase.get(tablename, "row001");
HBaseTestCase.scan(tablename);
// if(true==HBaseTestCase.delete(tablename)) {
// System.out.println("Delete table:"+tablename+"success!");
// }
}
catch (Exception e){
e.printStackTrace();
}
}
}
4.6 运行
put ‘row005′,’cf_user:001′,’data’Get: keyvalues={row001/cf_user:001/1406736598838/Put/vlen=12/mvcc=0, row001/cf_user:002/1406736463964/Put/vlen=9/mvcc=0, row001/cf_user:003/1406736709965/Put/vlen=10/mvcc=0}Scan: keyvalues={row001/cf_user:001/1406736598838/Put/vlen=12/mvcc=0, row001/cf_user:002/1406736463964/Put/vlen=9/mvcc=0, row001/cf_user:003/1406736709965/Put/vlen=10/mvcc=0}
Scan: keyvalues={row002/cf_user:003/1406737192418/Put/vlen=11/mvcc=0, row002/cf_user:004/1406821566287/Put/vlen=6/mvcc=0}
Scan: keyvalues={row005/cf_user:001/1407473255193/Put/vlen=4/mvcc=0, row005/cf_user:004/1406774836964/Put/vlen=4/mvcc=0}
put ‘row005′,’cf_user:001′,’data’Get: keyvalues={row001/cf_user:001/1406736598838/Put/vlen=12/mvcc=0, row001/cf_user:002/1406736463964/Put/vlen=9/mvcc=0, row001/cf_user:003/1406736709965/Put/vlen=10/mvcc=0}Scan: keyvalues={row001/cf_user:001/1406736598838/Put/vlen=12/mvcc=0, row001/cf_user:002/1406736463964/Put/vlen=9/mvcc=0, row001/cf_user:003/1406736709965/Put/vlen=10/mvcc=0}
Scan: keyvalues={row002/cf_user:003/1406737192418/Put/vlen=11/mvcc=0, row002/cf_user:004/1406821566287/Put/vlen=6/mvcc=0}
Scan: keyvalues={row005/cf_user:001/1407473255193/Put/vlen=4/mvcc=0, row005/cf_user:004/1406774836964/Put/vlen=4/mvcc=0}