import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
public class TestHBaseJave {
private static Configuration conf = null;
/**
* 初始化配置
*/
static {
conf = HBaseConfiguration.create();
conf.addResource("hbase-site.xml"); //this is default,so don't have to write this here
}
public static void main(String[] args) throws IOException {
System.out.println("HelloWorld");
String[] cfs = new String[1];
cfs[0] = "data";
createTable("Test",cfs);
writeRow("Test",cfs);
}
/**
* 创建表操作
* @throws IOException
*/
public static void createTable(String tableName, String[] cfs) throws IOException{
HBaseAdmin admin = new HBaseAdmin(conf);
if(admin.tableExists(tableName)){
System.out.println("this table is already exist!");
} else {
HTableDescriptor htd = new HTableDescriptor(tableName);
for(int i=0; i<cfs.length;i++){
htd.addFamily(new HColumnDescriptor(cfs[i]));
}
admin.createTable(htd);
System.out.println("create table successfully");
}
}
/**
* 删除表操作
* @param tablename
* @throws IOException
*/
public static void deleteTable(String tableName) throws IOException{
HBaseAdmin admin = new HBaseAdmin(conf);
admin.disableTable(tableName);
admin.deleteTable(tableName);
System.out.println("delete table successfully");
}
/**
* 插入一行记录
* @param tablename
* @param cfs
*/
public static void writeRow(String tableName,String[] cfs) throws IOException{
HTable table = new HTable(conf,tableName);
Put put = new Put(Bytes.toBytes("rows1"));
for(int i=0; i<cfs.length;i++){
put.add(Bytes.toBytes(cfs[i]),Bytes.toBytes("1"),Bytes.toBytes("value_1"));
table.put(put);
}
}
/**
* 删除一行记录
* @param tablename
* @param rowkey
* @throws IOException
*/
public static void deleteRow(String tableName, String rowKey) throws IOException {
HTable table = new HTable(conf, tableName);
Delete d1 = new Delete(rowKey.getBytes());
table.delete(d1);
System.out.println("删除行成功!");
}
/**
* 查找一行记录
* @param tablename
* @param rowkey
*/
public static void selectRow(String tableName, String rowKey) throws IOException {
HTable table = new HTable(conf, tableName);
Get g = new Get(rowKey.getBytes());
Result rs = table.get(g);
for (KeyValue kv : rs.raw()) {
System.out.print(new String(kv.getRow()) + " ");
System.out.print(new String(kv.getFamily()) + ":");
System.out.print(new String(kv.getQualifier()) + " ");
System.out.print(kv.getTimestamp() + " ");
System.out.println(new String(kv.getValue()));
}
}
/**
* 查询表中所有行
* @param tablename
*/
public static void scaner(String tableName) throws IOException {
HTable table = new HTable(conf, tableName);
Scan s = new Scan();
ResultScanner rs = table.getScanner(s);
for (Result r : rs) {
KeyValue[] kv = r.raw();
for (int i = 0; i < kv.length; i++) {
System.out.print(new String(kv[i].getRow()) + " ");
System.out.print(new String(kv[i].getFamily()) + ":");
System.out.print(new String(kv[i].getQualifier()) + " ");
System.out.print(kv[i].getTimestamp() + " ");
System.out.println(new String(kv[i].getValue()));
}
}
}
}
说明:
我们用org.apache.hadoop.hbase.HBase.Configurationn创建一个实例org.apache.hadoop.conf.Configuration
这个Configuration会返回一个读入classpath下的hbase-site.xml 和 hbase-default.xml中HBase配置信息的Configuration;
此Configuration还将用于创建HBaseAdmin和HTable(在org.apache.hadoop.hbase.client包中)
HBaseAdmin 用于管理HBase集群,create,drop,list,enable and disable tables. add and drop table column families.
HTable 用于访问指定的表;add, update, and delete data from an individual table.
Configuration实例指向了集群上(执行这些代码的集群)的这些类;
要创建一个表,需要创建一个HBaseAdmin实例,让它来创建表;
我们用org.apache.hadoop.hbase.HTableDescriptor和org.apache.hadoop.hbase.HColumnDescriptor来修改表的模式;
要对表操作,需创建HTable实例。
Result是一个row对应的所有或部分cell,一个row对应一个Result;
KeyValue对应一个cell;
ResultScanner是Result的集合。