zoukankan      html  css  js  c++  java
  • HBASE基础(5):语法(3) API (2) DML

    创建类HBase_DML

    1 插入数据

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.client.*;
    import org.apache.hadoop.hbase.util.Bytes;
    
    import java.io.IOException;
    
    public class HBase_DML {
    
        //TODO 插入数据
        public static void putData(String tableName, String rowKey, String cf, String cn, String value) throws IOException {
    
            //1.获取配置信息并设置连接参数
            Configuration configuration = HBaseConfiguration.create();
            configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
    
            //2.获取连接
            Connection connection = ConnectionFactory.createConnection(configuration);
    
            //3.获取表的连接
            Table table = connection.getTable(TableName.valueOf(tableName));
    
            //4.创建Put对象
            Put put = new Put(Bytes.toBytes(rowKey));
    
            //5.放入数据
            put.addColumn(Bytes.toBytes(cf), Bytes.toBytes(cn), Bytes.toBytes(value));
    
            //6.执行插入数据操作
            table.put(put);
    
            //7.关闭连接
            table.close();
            connection.close();
        }
    
    }

    2 单条数据查询

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.client.*;
    import org.apache.hadoop.hbase.util.Bytes;
    
    import java.io.IOException;
    
    public class HBase_DML {
    
        //TODO 单条数据查询(GET)
        public static void getDate(String tableName, String rowKey, String cf, String cn) throws IOException {
    
            //1.获取配置信息并设置连接参数
            Configuration configuration = HBaseConfiguration.create();
            configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
    
            //2.获取连接
            Connection connection = ConnectionFactory.createConnection(configuration);
    
            //3.获取表的连接
            Table table = connection.getTable(TableName.valueOf(tableName));
    
            //4.创建Get对象
            Get get = new Get(Bytes.toBytes(rowKey));
            // 指定列族查询
            // get.addFamily(Bytes.toBytes(cf));
            // 指定列族:列查询
            // get.addColumn(Bytes.toBytes(cf), Bytes.toBytes(cn));
    
            //5.查询数据
            Result result = table.get(get);
    
            //6.解析result
            for (Cell cell : result.rawCells()) {
                System.out.println("ROW:" + Bytes.toString(CellUtil.cloneRow(cell)) +
                            " CF:" + Bytes.toString(CellUtil.cloneFamily(cell))+
                            " CL:" + Bytes.toString(CellUtil.cloneQualifier(cell))+
                            " VALUE:" + Bytes.toString(CellUtil.cloneValue(cell)));
            }
    
            //7.关闭连接
            table.close();
            connection.close();
    
        }
    
    }

    3 扫描数据

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.client.*;
    import org.apache.hadoop.hbase.util.Bytes;
    
    import java.io.IOException;
    
    public class HBase_DML {
    
        //TODO 扫描数据(Scan)
        public static void scanTable(String tableName) throws IOException {
    
            //1.获取配置信息并设置连接参数
            Configuration configuration = HBaseConfiguration.create();
            configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
    
            //2.获取连接
            Connection connection = ConnectionFactory.createConnection(configuration);
    
            //3.获取表的连接
            Table table = connection.getTable(TableName.valueOf(tableName));
    
            //4.创建Scan对象
            Scan scan = new Scan();
    
            //5.扫描数据
            ResultScanner results = table.getScanner(scan);
    
            //6.解析results
            for (Result result : results) {
                for (Cell cell : result.rawCells()) {
          System.out.println(
                            Bytes.toString(CellUtil.cloneRow(cell))+":"+
                                    Bytes.toString(CellUtil.cloneFamily(cell))+":" +
                                    Bytes.toString(CellUtil.cloneQualifier(cell)) +":" +
                                    Bytes.toString(CellUtil.cloneValue(cell))
                    );
                }
            }
    
            //7.关闭资源
            table.close();
            connection.close();
    
        }
    
    }

    4 删除数据

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.client.*;
    import org.apache.hadoop.hbase.util.Bytes;
    
    import java.io.IOException;
    
    public class HBase_DML {
    
        //TODO 删除数据
        public static void deletaData(String tableName, String rowKey, String cf, String cn) throws IOException {
    
            //1.获取配置信息并设置连接参数
            Configuration configuration = HBaseConfiguration.create();
            configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
    
            //2.获取连接
            Connection connection = ConnectionFactory.createConnection(configuration);
    
            //3.获取表的连接
            Table table = connection.getTable(TableName.valueOf(tableName));
    
            //4.创建Delete对象
            Delete delete = new Delete(Bytes.toBytes(rowKey));
    
            // 指定列族删除数据
            // delete.addFamily(Bytes.toBytes(cf));
            // 指定列族:列删除数据(所有版本)
            // delete.addColumn(Bytes.toBytes(cf), Bytes.toBytes(cn));
            // 指定列族:列删除数据(指定版本)
            // delete.addColumns(Bytes.toBytes(cf), Bytes.toBytes(cn));
    
            //5.执行删除数据操作
            table.delete(delete);
    
            //6.关闭资源
            table.close();
            connection.close();
    
    }
    
    }

    本文来自博客园,作者:秋华,转载请注明原文链接:https://www.cnblogs.com/qiu-hua/p/15225444.html

  • 相关阅读:
    SpringBoot 如何生成接口文档,老鸟们都这么玩的!
    ELK 外网访问
    Elasticsearch 7.x配置用户名密码访问 开启x-pack验证
    在centos7 中安装Kibana
    在centos7 安装Elasticsearch 步骤:
    cuda-pytorch-gpu快速配置
    Face 2 to 3 D
    PointNet++
    PointNet:Deep Learning on Point Sets for 3D Classification and Segmentation
    3D Face Modeling From Diverse Raw Scan Data
  • 原文地址:https://www.cnblogs.com/qiu-hua/p/15225444.html
Copyright © 2011-2022 走看看