zoukankan      html  css  js  c++  java
  • HBase编程 API入门系列之modify(管理端而言)(10)

     

      这里,我带领大家,学习更高级的,因为,在开发中,尽量不能去服务器上修改表。

      所以,在管理端来修改HBase表。采用线程池的方式(也是生产开发里首推的)

    package zhouls.bigdata.HbaseProject.Pool;

    import java.io.IOException;
    import java.util.concurrent.ExecutorService;
    import java.util.concurrent.Executors;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.HConnection;
    import org.apache.hadoop.hbase.client.HConnectionManager;


    public class TableConnection {
    private TableConnection(){
    }
    private static HConnection connection = null;
    public static HConnection getConnection(){
    if(connection == null){
    ExecutorService pool = Executors.newFixedThreadPool(10);//建立一个固定大小的线程池
    Configuration conf = HBaseConfiguration.create();
    conf.set("hbase.zookeeper.quorum","HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    try{
    connection = HConnectionManager.createConnection(conf,pool);//创建连接时,拿到配置文件和线程池
    }catch (IOException e){
    }
    }
    return connection;
    }
    }

    1、修改HBase表

    暂时,有错误

    package zhouls.bigdata.HbaseProject.Pool;

    import java.io.IOException;

    import zhouls.bigdata.HbaseProject.Pool.TableConnection;

    import javax.xml.transform.Result;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.MasterNotRunningException;
    import org.apache.hadoop.hbase.NamespaceDescriptor;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.ZooKeeperConnectionException;
    import org.apache.hadoop.hbase.client.Delete;
    import org.apache.hadoop.hbase.client.Get;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.HTableInterface;
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.util.Bytes;

    public class HBaseTest {

    public static void main(String[] args) throws Exception {
    // HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
    // Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
    // put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
    // table.put(put);
    // table.close();

    // Get get = new Get(Bytes.toBytes("row_04"));
    // get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();

    // Delete delete = new Delete(Bytes.toBytes("row_2"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
    // table.delete(delete);
    // table.close();


    // Delete delete = new Delete(Bytes.toBytes("row_04"));
    //// delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    // delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();


    // Scan scan = new Scan();
    // scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
    // scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
    // scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // ResultScanner rst = table.getScanner(scan);//整个循环
    // System.out.println(rst.toString());
    // for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    // {
    // for(Cell cell:next.rawCells()){//某个row key下的循坏
    // System.out.println(next.toString());
    // System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    // System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    // System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    // }
    // }
    // table.close();

    HBaseTest hbasetest =new HBaseTest();
    // hbasetest.insertValue();
    // hbasetest.getValue();
    // hbasetest.delete();
    // hbasetest.scanValue();
    hbasetest.createTable("test_table3", "f");//先判断表是否存在,再来创建HBase表(生产开发首推)
    // hbasetest.deleteTable("test_table4");//先判断表是否存在,再来删除HBase表(生产开发首推)
    // hbasetest.modifyTable("test_table","row_02","f",'f:age');
    }


    //生产开发中,建议这样用线程池做
    // public void insertValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Put put = new Put(Bytes.toBytes("row_01"));//行键是row_01
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy0"));
    // table.put(put);
    // table.close();
    // }



    //生产开发中,建议这样用线程池做
    // public void getValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Get get = new Get(Bytes.toBytes("row_03"));
    // get.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();
    // }
    //

    //生产开发中,建议这样用线程池做
    // public void delete() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Delete delete = new Delete(Bytes.toBytes("row_01"));
    // delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    //// delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();
    //
    // }

    //生产开发中,建议这样用线程池做
    // public void scanValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Scan scan = new Scan();
    // scan.setStartRow(Bytes.toBytes("row_02"));//包含开始行键
    // scan.setStopRow(Bytes.toBytes("row_04"));//不包含结束行键
    // scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // ResultScanner rst = table.getScanner(scan);//整个循环
    // System.out.println(rst.toString());
    // for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    // {
    // for(Cell cell:next.rawCells()){//某个row key下的循坏
    // System.out.println(next.toString());
    // System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    // System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    // System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    // }
    // }
    // table.close();
    // }
    //


    //生产开发中,建议这样用线程池做
    public void createTable(String tableName,String family) throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
    Configuration conf = HBaseConfiguration.create(getConfig());
    HBaseAdmin admin = new HBaseAdmin(conf);
    HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tableName));
    HColumnDescriptor hcd = new HColumnDescriptor(family);
    hcd.setMaxVersions(3);
    // hcd.set//很多的带创建操作,我这里只抛砖引玉的作用
    tableDesc.addFamily(hcd);
    if (!admin.tableExists(tableName)){
    admin.createTable(tableDesc);
    }else{
    System.out.println(tableName + "exist");
    }
    admin.close();
    }


    public void modifyTable(String tableName,String rowkey,String family,HColumnDescriptor hColumnDescriptor) throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
    Configuration conf = HBaseConfiguration.create(getConfig());
    HBaseAdmin admin = new HBaseAdmin(conf);
    // HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tableName));
    HColumnDescriptor hcd = new HColumnDescriptor(family);
    // NamespaceDescriptor nsd = admin.getNamespaceDescriptor(tableName);
    // nsd.setConfiguration("hbase.namespace.quota.maxregion", "10");
    // nsd.setConfiguration("hbase.namespace.quota.maxtables", "10");
    if (admin.tableExists(tableName)){
    admin.modifyColumn(tableName, hcd);
    // admin.modifyTable(tableName, tableDesc);
    // admin.modifyNamespace(nsd);
    }else{
    System.out.println(tableName + "not exist");
    }
    admin.close();
    }


    //生产开发中,建议这样用线程池做
    // public void deleteTable(String tableName)throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
    // Configuration conf = HBaseConfiguration.create(getConfig());
    // HBaseAdmin admin = new HBaseAdmin(conf);
    // if (admin.tableExists(tableName)){
    // admin.disableTable(tableName);
    // admin.deleteTable(tableName);
    // }else{
    // System.out.println(tableName + "not exist");
    // }
    // admin.close();
    // }




    public static Configuration getConfig(){
    Configuration configuration = new Configuration();
    // conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    return configuration;
    }
    }

  • 相关阅读:
    创建线程方法
    List 接口
    implements 的用法
    import和export 的使用方法
    js 实现 a == 'juejin' && a == 666 && a == 888
    position的属性运用
    css calc()函数 长度运算
    .net5一分钟入门
    css 如何让大小不同的图片表现一致,同时自适应呢?
    sqlserver isnull(),Count(),sum(),month(getdate()) 的用法
  • 原文地址:https://www.cnblogs.com/zlslch/p/6159995.html
Copyright © 2011-2022 走看看