zoukankan      html  css  js  c++  java
  • 我的第一次HBase客户端编程API

    代码如下:

    package com.el.hbase.test;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.client.*;
    import org.apache.hadoop.hbase.filter.*;
    import org.apache.hadoop.hbase.util.Bytes;
    import org.joda.time.DateTime;
    
    import java.util.ArrayList;
    import java.util.List;
    
    /**
     * Created by dengshengcai on 2017/7/5.
     */
    public class Test1 {
    
        public static void main(String[] args)
        {
            Test1 test1 = new Test1();
    
    //        test1.dropTable();
    //        test1.createTable();
    //        test1.putData();
    //        test1.getData();
            test1.scanTable();
        }
    
    
        Configuration conf = HBaseConfiguration.create();
    
        static final String TABLE_NAME ="testdsc_table1";
        static final String COLUMN_FAMILY = "fa";
    
        /**
         * 创建表
         */
        public void createTable()
        {
            try (Connection connection = ConnectionFactory.createConnection(conf);
                 Admin admin = connection.getAdmin()) {
    
                HTableDescriptor table =
                        new HTableDescriptor(TableName.valueOf(TABLE_NAME));
                table.addFamily(new HColumnDescriptor(COLUMN_FAMILY));
    
                if (!admin.tableExists(table.getTableName())) {
                    System.out.print("Creating table. ");
                    admin.createTable(table);
                    System.out.println(" Done.");
                }
    
                admin.close();
                connection.close();
            }
            catch(Exception e)
            {
                e.printStackTrace();
            }
        }
    
        /**
         * 删除表
         */
        public void dropTable()
        {
            try (Connection connection = ConnectionFactory.createConnection(conf);
                 Admin admin = connection.getAdmin())
            {
                HTableDescriptor table =
                        new HTableDescriptor(TableName.valueOf(TABLE_NAME));
                table.addFamily(new HColumnDescriptor(COLUMN_FAMILY));
    
                if (admin.tableExists(table.getTableName())) {
                    System.out.print("Dropping table. ");
                    // a table must be disabled before it can be dropped
                    admin.disableTable(table.getTableName());
                    admin.deleteTable(table.getTableName());
                    System.out.println(" Done.");
                }
    
                admin.close();
                connection.close();
            }
            catch(Exception e)
            {
                e.printStackTrace();
            }
        }
    
        public void putData() {
            try (Connection connection = ConnectionFactory.createConnection(conf)) {
                Table table = connection.getTable(TableName.valueOf(TABLE_NAME));
                //加入row key
                String dateStr;
                String se_dateStr;
                DateTime dt_up;
                DateTime dt_se;
                List<Put> list;
                int count = 3600 * 23;
                int times = 365 * 3;
    
                dt_up = new DateTime(2017, 1, 1, 0, 0, 1, 0);
                for (int t = 0; t < times; t++) {
                    dt_se = dt_up.plusDays(1);
                    dt_up = new DateTime(dt_se.getYear(), dt_se.getMonthOfYear(), dt_se.getDayOfMonth(), 0, 0, 1, 0);
                    dt_se = dt_up.plusSeconds(3);
                    list = new ArrayList<Put>();
                    for (int i = 0; i < count; i++) {
                        dt_up = dt_up.plusSeconds(1);
                        dt_se = dt_se.plusSeconds(1);
                        dateStr = dt_up.toString("yyyy-MM-dd HH:mm:ss");
                        se_dateStr = dt_se.toString("yyyy-MM-dd HH:mm:ss");
                        Put p = new Put(Bytes.toBytes(dateStr + "," + se_dateStr));
                        p.addColumn(Bytes.toBytes(COLUMN_FAMILY), Bytes.toBytes("l1"), Bytes.toBytes("my first value中文测试一下 " + dateStr));
    
                        list.add(p);
                    }
    
                    System.out.println("执行了" + t + "次");
    
                    table.put(list);
                }
    
                connection.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    
        public void getData()
        {
            try(Connection connection = ConnectionFactory.createConnection(conf)){
                Table table = connection.getTable(TableName.valueOf(TABLE_NAME));
                //加入row key
                Get g = new Get(Bytes.toBytes("myLittleRow"));
                Result r = table.get(g);
                byte [] value = r.getValue(Bytes.toBytes(COLUMN_FAMILY),
                        Bytes.toBytes("l1"));
                // If we convert the value bytes, we should get back 'Some Value', the
                // value we inserted at this location.
                String valueStr = Bytes.toString(value);
                System.out.println("GET: " + valueStr);
    
    
                connection.close();
            }
            catch(Exception e){
                e.printStackTrace();
            }
        }
    
        public void scanTable()
        {
            try(Connection connection = ConnectionFactory.createConnection(conf)){
                Table table = connection.getTable(TableName.valueOf(TABLE_NAME));
                //加入row key
    
                Scan s = new Scan();
                Filter filter;
    //            filter = new SingleColumnValueFilter(Bytes.toBytes(COLUMN_FAMILY), Bytes.toBytes("l1"), CompareFilter.CompareOp.EQUAL,new SubstringComparator("100009"));
                filter=new RowFilter(CompareFilter.CompareOp.EQUAL,new SubstringComparator("100009"));
    //            FilterList filterList=new FilterList()
                s.addColumn(Bytes.toBytes(COLUMN_FAMILY), Bytes.toBytes("l1"));
                s.setStartRow(Bytes.toBytes("2017-01-21 00:00:00,0000-00-00 00:00:00"));
                s.setStopRow(Bytes.toBytes("2017-01-22 00:00:00,0000-00-00 00:00:00"));
    //            s.setFilter(filter);
                int icount=0;
                ResultScanner scanner = table.getScanner(s);
                try {
                    // Scanners return Result instances.
                    // Now, for the actual iteration. One way is to use a while loop like so:
                    for (Result rr = scanner.next(); rr != null; rr = scanner.next()) {
                        // print out the row we found and the columns we were looking for
    //                    System.out.println("Found row: " + rr);
    //                    System.out.println("Value:"+ Bytes.toString(rr.value()));
                        icount++;
                    }
                } finally {
                    // Make sure you close your scanners when you are done!
                    // Thats why we have it inside a try/finally clause
                    scanner.close();
                    System.out.println("总共:"+ icount + "行");
                }
    
                connection.close();
            }
            catch(Exception e){
                e.printStackTrace();
            }
        }
    }

    步骤:

    1.首先我将下载的HBase的安装包中的lib目录下的所有jar包引用到项目中(应该可以优化,无须这么多jar包,后期在进行)

    2.我使用的是IDEA,代码结构如下图:

    2.1加入了资源文件,并加入了hbase-site.xml,该文件记录zookeeper的服务器名称,值如下:

    <?xml version="1.0" encoding="UTF-8"?>
    <configuration>
        <property>
            <name>hbase.zookeeper.quorum</name>
            <value>slave03,slave04,slave05</value>
        </property>
    </configuration>
    View Code

    2.2加入Test1的java端代码文件,文件中代码在上面已经给出。

    3.已JAVA Application的方式运行上述代码即可。

    注:主要是参考了HBase官方文档中的例子。

  • 相关阅读:
    邻接矩阵学习
    Spark on Yarn年度知识整理
    HBASE解析
    Storm的数据处理编程单元:Bolt 学习整理
    jsch ssh服务器调用Linux命令或脚本的小问题
    storm的数据源编程单元Spout学习整理
    Java 正则表达式详细使用
    【转】如何成为一个技术全面的架构师
    【转】Linux netstat命令详解,高级面试必备
    【转】Servlet 生命周期、工作原理
  • 原文地址:https://www.cnblogs.com/kxxx/p/7161195.html
Copyright © 2011-2022 走看看