zoukankan      html  css  js  c++  java
  • HBase编程 API入门系列之delete(管理端而言)(9)

      

     大家,若是看过我前期的这篇博客的话,则

    HBase编程 API入门之delete(客户端而言)

      就知道,在这篇博文里,我是在客户端里删除HBase表的。

      这里,我带领大家,学习更高级的,因为,在开发中,尽量不能客户端上删除表。

      所以,在管理端来删除HBase表。采用线程池的方式(也是生产开发里首推的)

    package zhouls.bigdata.HbaseProject.Pool;

    import java.io.IOException;
    import java.util.concurrent.ExecutorService;
    import java.util.concurrent.Executors;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.HConnection;
    import org.apache.hadoop.hbase.client.HConnectionManager;


    public class TableConnection {
    private TableConnection(){
    }
    private static HConnection connection = null;
    public static HConnection getConnection(){
    if(connection == null){
    ExecutorService pool = Executors.newFixedThreadPool(10);//建立一个固定大小的线程池
    Configuration conf = HBaseConfiguration.create();
    conf.set("hbase.zookeeper.quorum","HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    try{
    connection = HConnectionManager.createConnection(conf,pool);//创建连接时,拿到配置文件和线程池
    }catch (IOException e){
    }
    }
    return connection;
    }
    }

    1、删除不存在的HBase表

    hbase(main):062:0> list
    TABLE
    test_table
    test_table2
    test_table3
    test_table4
    4 row(s) in 0.1540 seconds

    => ["test_table", "test_table2", "test_table3", "test_table4"]
    hbase(main):063:0>

     

    package zhouls.bigdata.HbaseProject.Pool;

    import java.io.IOException;

    import zhouls.bigdata.HbaseProject.Pool.TableConnection;

    import javax.xml.transform.Result;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.MasterNotRunningException;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.ZooKeeperConnectionException;
    import org.apache.hadoop.hbase.client.Delete;
    import org.apache.hadoop.hbase.client.Get;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.HTableInterface;
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.util.Bytes;

    public class HBaseTest {

    public static void main(String[] args) throws Exception {
    // HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
    // Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
    // put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
    // table.put(put);
    // table.close();

    // Get get = new Get(Bytes.toBytes("row_04"));
    // get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();

    // Delete delete = new Delete(Bytes.toBytes("row_2"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
    // table.delete(delete);
    // table.close();


    // Delete delete = new Delete(Bytes.toBytes("row_04"));
    //// delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    // delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();


    // Scan scan = new Scan();
    // scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
    // scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
    // scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // ResultScanner rst = table.getScanner(scan);//整个循环
    // System.out.println(rst.toString());
    // for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    // {
    // for(Cell cell:next.rawCells()){//某个row key下的循坏
    // System.out.println(next.toString());
    // System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    // System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    // System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    // }
    // }
    // table.close();

    HBaseTest hbasetest =new HBaseTest();
    // hbasetest.insertValue();
    // hbasetest.getValue();
    // hbasetest.delete();
    // hbasetest.scanValue();
    // hbasetest.createTable("test_table4", "f");
    hbasetest.deleteTable("test_table5");//假如这里,删除不存在的表

    }


    //生产开发中,建议这样用线程池做
    // public void insertValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Put put = new Put(Bytes.toBytes("row_01"));//行键是row_01
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy0"));
    // table.put(put);
    // table.close();
    // }



    //生产开发中,建议这样用线程池做
    // public void getValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Get get = new Get(Bytes.toBytes("row_03"));
    // get.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();
    // }
    //

    //生产开发中,建议这样用线程池做
    // public void delete() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Delete delete = new Delete(Bytes.toBytes("row_01"));
    // delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    //// delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();
    //
    // }

    //生产开发中,建议这样用线程池做
    // public void scanValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Scan scan = new Scan();
    // scan.setStartRow(Bytes.toBytes("row_02"));//包含开始行键
    // scan.setStopRow(Bytes.toBytes("row_04"));//不包含结束行键
    // scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // ResultScanner rst = table.getScanner(scan);//整个循环
    // System.out.println(rst.toString());
    // for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    // {
    // for(Cell cell:next.rawCells()){//某个row key下的循坏
    // System.out.println(next.toString());
    // System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    // System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    // System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    // }
    // }
    // table.close();
    // }
    //


    //生产开发中,建议这样用线程池做
    // public void createTable(String tableName,String family) throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
    // Configuration conf = HBaseConfiguration.create(getConfig());
    // HBaseAdmin admin = new HBaseAdmin(conf);
    // HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tableName));
    // HColumnDescriptor hcd = new HColumnDescriptor(family);
    // hcd.setMaxVersions(3);
    //// hcd.set//很多的带创建操作,我这里只抛砖引玉的作用
    // tableDesc.addFamily(hcd);
    // admin.createTable(tableDesc);
    // admin.close();
    // }



    public void deleteTable(String tableName)throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
    Configuration conf = HBaseConfiguration.create(getConfig());
    HBaseAdmin admin = new HBaseAdmin(conf);
    admin.disableTable(tableName);
    admin.deleteTable(tableName);
    admin.close();
    }




    public static Configuration getConfig(){
    Configuration configuration = new Configuration();
    // conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    return configuration;
    }
    }

    2016-12-11 16:04:48,141 INFO [org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x1417e278 connecting to ZooKeeper ensemble=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181
    2016-12-11 16:04:48,150 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT
    2016-12-11 16:04:48,150 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:host.name=WIN-BQOBV63OBNM
    2016-12-11 16:04:48,150 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.version=1.7.0_51
    2016-12-11 16:04:48,150 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.vendor=Oracle Corporation
    2016-12-11 16:04:48,150 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.home=C:Program FilesJavajdk1.7.0_51jre
    2016-12-11 16:04:48,150 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.class.path=D:CodeMyEclipseJavaCodeHbaseProjectin;D:SoftWarehbase-1.2.3libactivation-1.1.jar;D:SoftWarehbase-1.2.3libaopalliance-1.0.jar;D:SoftWarehbase-1.2.3libapacheds-i18n-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapacheds-kerberos-codec-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapi-asn1-api-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libapi-util-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libasm-3.1.jar;D:SoftWarehbase-1.2.3libavro-1.7.4.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-1.7.0.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-core-1.8.0.jar;D:SoftWarehbase-1.2.3libcommons-cli-1.2.jar;D:SoftWarehbase-1.2.3libcommons-codec-1.9.jar;D:SoftWarehbase-1.2.3libcommons-collections-3.2.2.jar;D:SoftWarehbase-1.2.3libcommons-compress-1.4.1.jar;D:SoftWarehbase-1.2.3libcommons-configuration-1.6.jar;D:SoftWarehbase-1.2.3libcommons-daemon-1.0.13.jar;D:SoftWarehbase-1.2.3libcommons-digester-1.8.jar;D:SoftWarehbase-1.2.3libcommons-el-1.0.jar;D:SoftWarehbase-1.2.3libcommons-httpclient-3.1.jar;D:SoftWarehbase-1.2.3libcommons-io-2.4.jar;D:SoftWarehbase-1.2.3libcommons-lang-2.6.jar;D:SoftWarehbase-1.2.3libcommons-logging-1.2.jar;D:SoftWarehbase-1.2.3libcommons-math-2.2.jar;D:SoftWarehbase-1.2.3libcommons-math3-3.1.1.jar;D:SoftWarehbase-1.2.3libcommons-net-3.1.jar;D:SoftWarehbase-1.2.3libdisruptor-3.3.0.jar;D:SoftWarehbase-1.2.3libfindbugs-annotations-1.3.9-1.jar;D:SoftWarehbase-1.2.3libguava-12.0.1.jar;D:SoftWarehbase-1.2.3libguice-3.0.jar;D:SoftWarehbase-1.2.3libguice-servlet-3.0.jar;D:SoftWarehbase-1.2.3libhadoop-annotations-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-auth-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-hdfs-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-app-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-core-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-jobclient-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-shuffle-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-api-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-server-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-client-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-examples-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-external-blockcache-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop2-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-prefix-tree-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-procedure-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-protocol-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-resource-bundle-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-rest-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-shell-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-thrift-1.2.3.jar;D:SoftWarehbase-1.2.3libhtrace-core-3.1.0-incubating.jar;D:SoftWarehbase-1.2.3libhttpclient-4.2.5.jar;D:SoftWarehbase-1.2.3libhttpcore-4.4.1.jar;D:SoftWarehbase-1.2.3libjackson-core-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-jaxrs-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-mapper-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-xc-1.9.13.jar;D:SoftWarehbase-1.2.3libjamon-runtime-2.4.1.jar;D:SoftWarehbase-1.2.3libjasper-compiler-5.5.23.jar;D:SoftWarehbase-1.2.3libjasper-runtime-5.5.23.jar;D:SoftWarehbase-1.2.3libjavax.inject-1.jar;D:SoftWarehbase-1.2.3libjava-xmlbuilder-0.4.jar;D:SoftWarehbase-1.2.3libjaxb-api-2.2.2.jar;D:SoftWarehbase-1.2.3libjaxb-impl-2.2.3-1.jar;D:SoftWarehbase-1.2.3libjcodings-1.0.8.jar;D:SoftWarehbase-1.2.3libjersey-client-1.9.jar;D:SoftWarehbase-1.2.3libjersey-core-1.9.jar;D:SoftWarehbase-1.2.3libjersey-guice-1.9.jar;D:SoftWarehbase-1.2.3libjersey-json-1.9.jar;D:SoftWarehbase-1.2.3libjersey-server-1.9.jar;D:SoftWarehbase-1.2.3libjets3t-0.9.0.jar;D:SoftWarehbase-1.2.3libjettison-1.3.3.jar;D:SoftWarehbase-1.2.3libjetty-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-sslengine-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-util-6.1.26.jar;D:SoftWarehbase-1.2.3libjoni-2.1.2.jar;D:SoftWarehbase-1.2.3libjruby-complete-1.6.8.jar;D:SoftWarehbase-1.2.3libjsch-0.1.42.jar;D:SoftWarehbase-1.2.3libjsp-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjsp-api-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjunit-4.12.jar;D:SoftWarehbase-1.2.3libleveldbjni-all-1.8.jar;D:SoftWarehbase-1.2.3liblibthrift-0.9.3.jar;D:SoftWarehbase-1.2.3liblog4j-1.2.17.jar;D:SoftWarehbase-1.2.3libmetrics-core-2.2.0.jar;D:SoftWarehbase-1.2.3lib etty-all-4.0.23.Final.jar;D:SoftWarehbase-1.2.3libparanamer-2.3.jar;D:SoftWarehbase-1.2.3libprotobuf-java-2.5.0.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5-6.1.14.jar;D:SoftWarehbase-1.2.3libslf4j-api-1.7.7.jar;D:SoftWarehbase-1.2.3libslf4j-log4j12-1.7.5.jar;D:SoftWarehbase-1.2.3libsnappy-java-1.0.4.1.jar;D:SoftWarehbase-1.2.3libspymemcached-2.11.6.jar;D:SoftWarehbase-1.2.3libxmlenc-0.52.jar;D:SoftWarehbase-1.2.3libxz-1.0.jar;D:SoftWarehbase-1.2.3libzookeeper-3.4.6.jar
    2016-12-11 16:04:48,151 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.library.path=C:Program FilesJavajdk1.7.0_51in;C:WindowsSunJavain;C:Windowssystem32;C:Windows;C:ProgramDataOracleJavajavapath;C:Python27;C:Python27Scripts;C:Windowssystem32;C:Windows;C:WindowsSystem32Wbem;C:WindowsSystem32WindowsPowerShellv1.0;D:SoftWareMATLAB R2013a untimewin64;D:SoftWareMATLAB R2013ain;C:Program Files (x86)IDM Computer SolutionsUltraCompare;C:Program FilesJavajdk1.7.0_51in;C:Program FilesJavajdk1.7.0_51jrein;D:SoftWareapache-ant-1.9.0in;HADOOP_HOMEin;D:SoftWareapache-maven-3.3.9in;D:SoftWareScalain;D:SoftWareScalajrein;%MYSQL_HOMEin;D:SoftWareMySQL ServerMySQL Server 5.0in;D:SoftWareapache-tomcat-7.0.69in;%C:WindowsSystem32;%C:WindowsSysWOW64;D:SoftWareSSH Secure Shell;.
    2016-12-11 16:04:48,151 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.io.tmpdir=C:UsersADMINI~1AppDataLocalTemp
    2016-12-11 16:04:48,151 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.compiler=<NA>
    2016-12-11 16:04:48,151 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.name=Windows 7
    2016-12-11 16:04:48,152 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.arch=amd64
    2016-12-11 16:04:48,152 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.version=6.1
    2016-12-11 16:04:48,152 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.name=Administrator
    2016-12-11 16:04:48,152 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.home=C:UsersAdministrator
    2016-12-11 16:04:48,152 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.dir=D:CodeMyEclipseJavaCodeHbaseProject
    2016-12-11 16:04:48,153 INFO [org.apache.zookeeper.ZooKeeper] - Initiating client connection, connectString=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181 sessionTimeout=90000 watcher=hconnection-0x1417e2780x0, quorum=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181, baseZNode=/hbase
    2016-12-11 16:04:48,199 INFO [org.apache.zookeeper.ClientCnxn] - Opening socket connection to server HadoopMaster/192.168.80.10:2181. Will not attempt to authenticate using SASL (unknown error)
    2016-12-11 16:04:48,203 INFO [org.apache.zookeeper.ClientCnxn] - Socket connection established to HadoopMaster/192.168.80.10:2181, initiating session
    2016-12-11 16:04:48,762 INFO [org.apache.zookeeper.ClientCnxn] - Session establishment complete on server HadoopMaster/192.168.80.10:2181, sessionid = 0x1582556e7c50027, negotiated timeout = 40000
    2016-12-11 16:04:50,731 INFO [org.apache.hadoop.hbase.client.HBaseAdmin] - Started disable of test_table5
    Exception in thread "main" org.apache.hadoop.hbase.TableNotFoundException: test_table5
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
    at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
    at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
    at org.apache.hadoop.hbase.util.ForeignExceptionUtil.toIOException(ForeignExceptionUtil.java:45)
    at org.apache.hadoop.hbase.client.HBaseAdmin$ProcedureFuture.convertResult(HBaseAdmin.java:4621)
    at org.apache.hadoop.hbase.client.HBaseAdmin$ProcedureFuture.waitProcedureResult(HBaseAdmin.java:4579)
    at org.apache.hadoop.hbase.client.HBaseAdmin$ProcedureFuture.get(HBaseAdmin.java:4512)
    at org.apache.hadoop.hbase.client.HBaseAdmin.disableTable(HBaseAdmin.java:1331)
    at org.apache.hadoop.hbase.client.HBaseAdmin.disableTable(HBaseAdmin.java:1352)
    at zhouls.bigdata.HbaseProject.Pool.HBaseTest.deleteTable(HBaseTest.java:164)
    at zhouls.bigdata.HbaseProject.Pool.HBaseTest.main(HBaseTest.java:82)
    Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hbase.TableNotFoundException): test_table5
    at org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.prepareDisable(DisableTableProcedure.java:281)
    at org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.executeFromState(DisableTableProcedure.java:133)
    at org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.executeFromState(DisableTableProcedure.java:54)
    at org.apache.hadoop.hbase.procedure2.StateMachineProcedure.execute(StateMachineProcedure.java:119)
    at org.apache.hadoop.hbase.procedure2.Procedure.doExecute(Procedure.java:498)
    at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execProcedure(ProcedureExecutor.java:1061)
    at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execLoop(ProcedureExecutor.java:856)
    at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execLoop(ProcedureExecutor.java:809)
    at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.access$400(ProcedureExecutor.java:75)
    at org.apache.hadoop.hbase.procedure2.ProcedureExecutor$2.run(ProcedureExecutor.java:495)

    2、删除存在的HBase表

    package zhouls.bigdata.HbaseProject.Pool;

    import java.io.IOException;

    import zhouls.bigdata.HbaseProject.Pool.TableConnection;

    import javax.xml.transform.Result;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.MasterNotRunningException;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.ZooKeeperConnectionException;
    import org.apache.hadoop.hbase.client.Delete;
    import org.apache.hadoop.hbase.client.Get;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.HTableInterface;
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.util.Bytes;

    public class HBaseTest {

    public static void main(String[] args) throws Exception {
    // HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
    // Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
    // put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
    // table.put(put);
    // table.close();

    // Get get = new Get(Bytes.toBytes("row_04"));
    // get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();

    // Delete delete = new Delete(Bytes.toBytes("row_2"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
    // table.delete(delete);
    // table.close();


    // Delete delete = new Delete(Bytes.toBytes("row_04"));
    //// delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    // delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();


    // Scan scan = new Scan();
    // scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
    // scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
    // scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // ResultScanner rst = table.getScanner(scan);//整个循环
    // System.out.println(rst.toString());
    // for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    // {
    // for(Cell cell:next.rawCells()){//某个row key下的循坏
    // System.out.println(next.toString());
    // System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    // System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    // System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    // }
    // }
    // table.close();

    HBaseTest hbasetest =new HBaseTest();
    // hbasetest.insertValue();
    // hbasetest.getValue();
    // hbasetest.delete();
    // hbasetest.scanValue();
    // hbasetest.createTable("test_table4", "f");
    hbasetest.deleteTable("test_table4");//删除存在的表

    }


    //生产开发中,建议这样用线程池做
    // public void insertValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Put put = new Put(Bytes.toBytes("row_01"));//行键是row_01
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy0"));
    // table.put(put);
    // table.close();
    // }



    //生产开发中,建议这样用线程池做
    // public void getValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Get get = new Get(Bytes.toBytes("row_03"));
    // get.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();
    // }
    //

    //生产开发中,建议这样用线程池做
    // public void delete() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Delete delete = new Delete(Bytes.toBytes("row_01"));
    // delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    //// delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();
    //
    // }

    //生产开发中,建议这样用线程池做
    // public void scanValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Scan scan = new Scan();
    // scan.setStartRow(Bytes.toBytes("row_02"));//包含开始行键
    // scan.setStopRow(Bytes.toBytes("row_04"));//不包含结束行键
    // scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // ResultScanner rst = table.getScanner(scan);//整个循环
    // System.out.println(rst.toString());
    // for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    // {
    // for(Cell cell:next.rawCells()){//某个row key下的循坏
    // System.out.println(next.toString());
    // System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    // System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    // System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    // }
    // }
    // table.close();
    // }
    //


    //生产开发中,建议这样用线程池做
    // public void createTable(String tableName,String family) throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
    // Configuration conf = HBaseConfiguration.create(getConfig());
    // HBaseAdmin admin = new HBaseAdmin(conf);
    // HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tableName));
    // HColumnDescriptor hcd = new HColumnDescriptor(family);
    // hcd.setMaxVersions(3);
    //// hcd.set//很多的带创建操作,我这里只抛砖引玉的作用
    // tableDesc.addFamily(hcd);
    // admin.createTable(tableDesc);
    // admin.close();
    // }



    public void deleteTable(String tableName)throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
    Configuration conf = HBaseConfiguration.create(getConfig());
    HBaseAdmin admin = new HBaseAdmin(conf);
    admin.disableTable(tableName);
    admin.deleteTable(tableName);
    admin.close();
    }




    public static Configuration getConfig(){
    Configuration configuration = new Configuration();
    // conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    return configuration;
    }
    }

    3、先判断表是否存在,再来删除HBase表(生产开发首推)

     

    package zhouls.bigdata.HbaseProject.Pool;

    import java.io.IOException;

    import zhouls.bigdata.HbaseProject.Pool.TableConnection;

    import javax.xml.transform.Result;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.MasterNotRunningException;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.ZooKeeperConnectionException;
    import org.apache.hadoop.hbase.client.Delete;
    import org.apache.hadoop.hbase.client.Get;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.HTableInterface;
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.util.Bytes;

    public class HBaseTest {

    public static void main(String[] args) throws Exception {
    // HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
    // Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
    // put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
    // table.put(put);
    // table.close();

    // Get get = new Get(Bytes.toBytes("row_04"));
    // get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();

    // Delete delete = new Delete(Bytes.toBytes("row_2"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
    // table.delete(delete);
    // table.close();


    // Delete delete = new Delete(Bytes.toBytes("row_04"));
    //// delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    // delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();


    // Scan scan = new Scan();
    // scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
    // scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
    // scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // ResultScanner rst = table.getScanner(scan);//整个循环
    // System.out.println(rst.toString());
    // for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    // {
    // for(Cell cell:next.rawCells()){//某个row key下的循坏
    // System.out.println(next.toString());
    // System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    // System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    // System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    // }
    // }
    // table.close();

    HBaseTest hbasetest =new HBaseTest();
    // hbasetest.insertValue();
    // hbasetest.getValue();
    // hbasetest.delete();
    // hbasetest.scanValue();
    // hbasetest.createTable("test_table4", "f");
    hbasetest.deleteTable("test_table4");//先判断表是否存在,再来删除HBase表(生产开发首推)

    }


    //生产开发中,建议这样用线程池做
    // public void insertValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Put put = new Put(Bytes.toBytes("row_01"));//行键是row_01
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy0"));
    // table.put(put);
    // table.close();
    // }



    //生产开发中,建议这样用线程池做
    // public void getValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Get get = new Get(Bytes.toBytes("row_03"));
    // get.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();
    // }
    //

    //生产开发中,建议这样用线程池做
    // public void delete() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Delete delete = new Delete(Bytes.toBytes("row_01"));
    // delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    //// delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();
    //
    // }

    //生产开发中,建议这样用线程池做
    // public void scanValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Scan scan = new Scan();
    // scan.setStartRow(Bytes.toBytes("row_02"));//包含开始行键
    // scan.setStopRow(Bytes.toBytes("row_04"));//不包含结束行键
    // scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // ResultScanner rst = table.getScanner(scan);//整个循环
    // System.out.println(rst.toString());
    // for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    // {
    // for(Cell cell:next.rawCells()){//某个row key下的循坏
    // System.out.println(next.toString());
    // System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    // System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    // System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    // }
    // }
    // table.close();
    // }
    //


    //生产开发中,建议这样用线程池做
    // public void createTable(String tableName,String family) throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
    // Configuration conf = HBaseConfiguration.create(getConfig());
    // HBaseAdmin admin = new HBaseAdmin(conf);
    // HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tableName));
    // HColumnDescriptor hcd = new HColumnDescriptor(family);
    // hcd.setMaxVersions(3);
    //// hcd.set//很多的带创建操作,我这里只抛砖引玉的作用
    // tableDesc.addFamily(hcd);
    // admin.createTable(tableDesc);
    // admin.close();
    // }



    public void deleteTable(String tableName)throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
    Configuration conf = HBaseConfiguration.create(getConfig());
    HBaseAdmin admin = new HBaseAdmin(conf);
    if (admin.tableExists(tableName)){
    admin.disableTable(tableName);
    admin.deleteTable(tableName);
    }else{
    System.out.println(tableName + "not exist");
    }
    admin.close();
    }




    public static Configuration getConfig(){
    Configuration configuration = new Configuration();
    // conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    return configuration;
    }
    }

    2016-12-11 16:27:50,172 INFO [org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x75e56da connecting to ZooKeeper ensemble=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181
    2016-12-11 16:27:50,187 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT
    2016-12-11 16:27:50,187 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:host.name=WIN-BQOBV63OBNM
    2016-12-11 16:27:50,187 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.version=1.7.0_51
    2016-12-11 16:27:50,187 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.vendor=Oracle Corporation
    2016-12-11 16:27:50,187 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.home=C:Program FilesJavajdk1.7.0_51jre
    2016-12-11 16:27:50,187 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.class.path=D:CodeMyEclipseJavaCodeHbaseProjectin;D:SoftWarehbase-1.2.3libactivation-1.1.jar;D:SoftWarehbase-1.2.3libaopalliance-1.0.jar;D:SoftWarehbase-1.2.3libapacheds-i18n-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapacheds-kerberos-codec-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapi-asn1-api-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libapi-util-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libasm-3.1.jar;D:SoftWarehbase-1.2.3libavro-1.7.4.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-1.7.0.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-core-1.8.0.jar;D:SoftWarehbase-1.2.3libcommons-cli-1.2.jar;D:SoftWarehbase-1.2.3libcommons-codec-1.9.jar;D:SoftWarehbase-1.2.3libcommons-collections-3.2.2.jar;D:SoftWarehbase-1.2.3libcommons-compress-1.4.1.jar;D:SoftWarehbase-1.2.3libcommons-configuration-1.6.jar;D:SoftWarehbase-1.2.3libcommons-daemon-1.0.13.jar;D:SoftWarehbase-1.2.3libcommons-digester-1.8.jar;D:SoftWarehbase-1.2.3libcommons-el-1.0.jar;D:SoftWarehbase-1.2.3libcommons-httpclient-3.1.jar;D:SoftWarehbase-1.2.3libcommons-io-2.4.jar;D:SoftWarehbase-1.2.3libcommons-lang-2.6.jar;D:SoftWarehbase-1.2.3libcommons-logging-1.2.jar;D:SoftWarehbase-1.2.3libcommons-math-2.2.jar;D:SoftWarehbase-1.2.3libcommons-math3-3.1.1.jar;D:SoftWarehbase-1.2.3libcommons-net-3.1.jar;D:SoftWarehbase-1.2.3libdisruptor-3.3.0.jar;D:SoftWarehbase-1.2.3libfindbugs-annotations-1.3.9-1.jar;D:SoftWarehbase-1.2.3libguava-12.0.1.jar;D:SoftWarehbase-1.2.3libguice-3.0.jar;D:SoftWarehbase-1.2.3libguice-servlet-3.0.jar;D:SoftWarehbase-1.2.3libhadoop-annotations-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-auth-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-hdfs-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-app-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-core-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-jobclient-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-shuffle-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-api-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-server-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-client-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-examples-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-external-blockcache-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop2-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-prefix-tree-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-procedure-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-protocol-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-resource-bundle-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-rest-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-shell-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-thrift-1.2.3.jar;D:SoftWarehbase-1.2.3libhtrace-core-3.1.0-incubating.jar;D:SoftWarehbase-1.2.3libhttpclient-4.2.5.jar;D:SoftWarehbase-1.2.3libhttpcore-4.4.1.jar;D:SoftWarehbase-1.2.3libjackson-core-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-jaxrs-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-mapper-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-xc-1.9.13.jar;D:SoftWarehbase-1.2.3libjamon-runtime-2.4.1.jar;D:SoftWarehbase-1.2.3libjasper-compiler-5.5.23.jar;D:SoftWarehbase-1.2.3libjasper-runtime-5.5.23.jar;D:SoftWarehbase-1.2.3libjavax.inject-1.jar;D:SoftWarehbase-1.2.3libjava-xmlbuilder-0.4.jar;D:SoftWarehbase-1.2.3libjaxb-api-2.2.2.jar;D:SoftWarehbase-1.2.3libjaxb-impl-2.2.3-1.jar;D:SoftWarehbase-1.2.3libjcodings-1.0.8.jar;D:SoftWarehbase-1.2.3libjersey-client-1.9.jar;D:SoftWarehbase-1.2.3libjersey-core-1.9.jar;D:SoftWarehbase-1.2.3libjersey-guice-1.9.jar;D:SoftWarehbase-1.2.3libjersey-json-1.9.jar;D:SoftWarehbase-1.2.3libjersey-server-1.9.jar;D:SoftWarehbase-1.2.3libjets3t-0.9.0.jar;D:SoftWarehbase-1.2.3libjettison-1.3.3.jar;D:SoftWarehbase-1.2.3libjetty-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-sslengine-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-util-6.1.26.jar;D:SoftWarehbase-1.2.3libjoni-2.1.2.jar;D:SoftWarehbase-1.2.3libjruby-complete-1.6.8.jar;D:SoftWarehbase-1.2.3libjsch-0.1.42.jar;D:SoftWarehbase-1.2.3libjsp-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjsp-api-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjunit-4.12.jar;D:SoftWarehbase-1.2.3libleveldbjni-all-1.8.jar;D:SoftWarehbase-1.2.3liblibthrift-0.9.3.jar;D:SoftWarehbase-1.2.3liblog4j-1.2.17.jar;D:SoftWarehbase-1.2.3libmetrics-core-2.2.0.jar;D:SoftWarehbase-1.2.3lib etty-all-4.0.23.Final.jar;D:SoftWarehbase-1.2.3libparanamer-2.3.jar;D:SoftWarehbase-1.2.3libprotobuf-java-2.5.0.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5-6.1.14.jar;D:SoftWarehbase-1.2.3libslf4j-api-1.7.7.jar;D:SoftWarehbase-1.2.3libslf4j-log4j12-1.7.5.jar;D:SoftWarehbase-1.2.3libsnappy-java-1.0.4.1.jar;D:SoftWarehbase-1.2.3libspymemcached-2.11.6.jar;D:SoftWarehbase-1.2.3libxmlenc-0.52.jar;D:SoftWarehbase-1.2.3libxz-1.0.jar;D:SoftWarehbase-1.2.3libzookeeper-3.4.6.jar
    2016-12-11 16:27:50,188 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.library.path=C:Program FilesJavajdk1.7.0_51in;C:WindowsSunJavain;C:Windowssystem32;C:Windows;C:ProgramDataOracleJavajavapath;C:Python27;C:Python27Scripts;C:Windowssystem32;C:Windows;C:WindowsSystem32Wbem;C:WindowsSystem32WindowsPowerShellv1.0;D:SoftWareMATLAB R2013a untimewin64;D:SoftWareMATLAB R2013ain;C:Program Files (x86)IDM Computer SolutionsUltraCompare;C:Program FilesJavajdk1.7.0_51in;C:Program FilesJavajdk1.7.0_51jrein;D:SoftWareapache-ant-1.9.0in;HADOOP_HOMEin;D:SoftWareapache-maven-3.3.9in;D:SoftWareScalain;D:SoftWareScalajrein;%MYSQL_HOMEin;D:SoftWareMySQL ServerMySQL Server 5.0in;D:SoftWareapache-tomcat-7.0.69in;%C:WindowsSystem32;%C:WindowsSysWOW64;D:SoftWareSSH Secure Shell;.
    2016-12-11 16:27:50,188 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.io.tmpdir=C:UsersADMINI~1AppDataLocalTemp
    2016-12-11 16:27:50,188 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.compiler=<NA>
    2016-12-11 16:27:50,188 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.name=Windows 7
    2016-12-11 16:27:50,188 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.arch=amd64
    2016-12-11 16:27:50,188 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.version=6.1
    2016-12-11 16:27:50,188 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.name=Administrator
    2016-12-11 16:27:50,188 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.home=C:UsersAdministrator
    2016-12-11 16:27:50,189 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.dir=D:CodeMyEclipseJavaCodeHbaseProject
    2016-12-11 16:27:50,190 INFO [org.apache.zookeeper.ZooKeeper] - Initiating client connection, connectString=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181 sessionTimeout=90000 watcher=hconnection-0x75e56da0x0, quorum=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181, baseZNode=/hbase
    2016-12-11 16:27:50,251 INFO [org.apache.zookeeper.ClientCnxn] - Opening socket connection to server HadoopSlave1/192.168.80.11:2181. Will not attempt to authenticate using SASL (unknown error)
    2016-12-11 16:27:50,253 INFO [org.apache.zookeeper.ClientCnxn] - Socket connection established to HadoopSlave1/192.168.80.11:2181, initiating session
    2016-12-11 16:27:50,269 INFO [org.apache.zookeeper.ClientCnxn] - Session establishment complete on server HadoopSlave1/192.168.80.11:2181, sessionid = 0x25872b4d2c50021, negotiated timeout = 40000
    test_table4not exist
    2016-12-11 16:27:51,483 INFO [org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation] - Closing zookeeper sessionid=0x25872b4d2c50021
    2016-12-11 16:27:51,725 INFO [org.apache.zookeeper.ZooKeeper] - Session: 0x25872b4d2c50021 closed
    2016-12-11 16:27:51,735 INFO [org.apache.zookeeper.ClientCnxn] - EventThread shut down

  • 相关阅读:
    面向对象编程
    面向对象编程进阶
    pycharm常用快捷键
    面向对象
    深拷贝和浅拷贝
    hashlib模块
    日志配置
    常用模块大全
    正则详解
    软件目录规范
  • 原文地址:https://www.cnblogs.com/zlslch/p/6159857.html
Copyright © 2011-2022 走看看