zoukankan      html  css  js  c++  java
  • hdfs向hbase上传数据报错分析

    通过hbse的import工具向hbase导入文件时出现出错误:

    hbase org.apache.hadoop.hbase.mapreduce.Driver import hbase_rgrid_kpi_400 /tmp/dumphbase/20180717/datahbase/datahbase/hbase_rgrid_kpi_400

    日志如下:

    [BEGIN] 2018/7/19 13:32:31
    [9euser@cq-t-h-node0-0-150 ~]$ hbase org.apache.hadoop.hbase.mapreduce.Driver import  hbase_rgrid_kpi_100 /tmp/dumphbase/20180717/datahbase/datahbase/hbase_rgrid_kpi_100
    Java HotSpot(TM) 64-Bit Server VM warning: Using incremental CMS is deprecated and will likely be removed in a future release
    18/07/19 13:32:12 INFO client.RMProxy: Connecting to ResourceManager at cq-t-h-node0-0-150/192.168.0.150:8032
    18/07/19 13:32:13 INFO input.FileInputFormat: Total input paths to process : 25
    18/07/19 13:32:13 INFO mapreduce.JobSubmitter: number of splits:25
    18/07/19 13:32:13 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1531962677618_0032
    18/07/19 13:32:14 INFO impl.YarnClientImpl: Submitted application application_1531962677618_0032
    18/07/19 13:32:14 INFO mapreduce.Job: The url to track the job: http://cq-t-h-node0-0-150:8088/proxy/application_1531962677618_0032/
    18/07/19 13:32:14 INFO mapreduce.Job: Running job: job_1531962677618_0032
    18/07/19 13:32:18 INFO mapreduce.Job: Job job_1531962677618_0032 running in uber mode : false
    18/07/19 13:32:18 INFO mapreduce.Job:  map 0% reduce 0%
    18/07/19 13:32:24 INFO mapreduce.Job:  map 8% reduce 0%
    18/07/19 13:32:25 INFO mapreduce.Job:  map 16% reduce 0%
    18/07/19 13:32:26 INFO mapreduce.Job:  map 20% reduce 0%
    18/07/19 13:32:27 INFO mapreduce.Job:  map 32% reduce 0%
    18/07/19 13:32:28 INFO mapreduce.Job:  map 48% reduce 0%
    18/07/19 13:32:29 INFO mapreduce.Job:  map 52% reduce 0%
    18/07/19 13:32:30 INFO mapreduce.Job:  map 60% reduce 0%
    18/07/19 13:32:30 INFO mapreduce.Job: Task Id : attempt_1531962677618_0032_m_000004_0, Status : FAILED
    ', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', COMPRESSION => 'NONE', MIN_VERSIONS => '0', BLOCKCACHE => 'true', BLOCKSIZE => '65536', REPLICATION_SCOPE => '0'}
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.doBatchOp(RSRpcServices.java:760)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.doNonAtomicRegionMutation(RSRpcServices.java:715)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.multi(RSRpcServices.java:2148)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:33656)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2182)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:185)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:165)
    : 4583 times, 
        at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.makeException(AsyncProcess.java:258)
        at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.access$2000(AsyncProcess.java:238)
        at org.apache.hadoop.hbase.client.AsyncProcess.waitForAllPreviousOpsAndReset(AsyncProcess.java:1810)
        at org.apache.hadoop.hbase.client.BufferedMutatorImpl.backgroundFlushCommits(BufferedMutatorImpl.java:240)
        at org.apache.hadoop.hbase.client.BufferedMutatorImpl.mutate(BufferedMutatorImpl.java:146)
        at org.apache.hadoop.hbase.client.BufferedMutatorImpl.mutate(BufferedMutatorImpl.java:113)
        at org.apache.hadoop.hbase.mapreduce.TableOutputFormat$TableRecordWriter.write(TableOutputFormat.java:138)
        at org.apache.hadoop.hbase.mapreduce.TableOutputFormat$TableRecordWriter.write(TableOutputFormat.java:94)
        at org.apache.hadoop.mapred.MapTask$NewDirectOutputCollector.write(MapTask.java:664)
        at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
        at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
        at org.apache.hadoop.hbase.mapreduce.Import$Importer.processKV(Import.java:210)
        at org.apache.hadoop.hbase.mapreduce.Import$Importer.writeResult(Import.java:165)
        at org.apache.hadoop.hbase.mapreduce.Import$Importer.map(Import.java:150)
        at org.apache.hadoop.hbase.mapreduce.Import$Importer.map(Import.java:133)
        at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
    
    18/07/19 13:32:30 INFO mapreduce.Job: Task Id : attempt_1531962677618_0032_m_000008_0, Status : FAILED
    ', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', COMPRESSION => 'NONE', MIN_VERSIONS => '0', BLOCKCACHE => 'true', BLOCKSIZE => '65536', REPLICATION_SCOPE => '0'}
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.doBatchOp(RSRpcServices.java:760)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.doNonAtomicRegionMutation(RSRpcServices.java:715)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.multi(RSRpcServices.java:2148)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:33656)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2182)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:185)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:165)
    : 4597 times, 
        at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.makeException(AsyncProcess.java:258)
        at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.access$2000(AsyncProcess.java:238)
        at org.apache.hadoop.hbase.client.AsyncProcess.waitForAllPreviousOpsAndReset(AsyncProcess.java:1810)
        at org.apache.hadoop.hbase.client.BufferedMutatorImpl.backgroundFlushCommits(BufferedMutatorImpl.java:240)
        at org.apache.hadoop.hbase.client.BufferedMutatorImpl.mutate(BufferedMutatorImpl.java:146)
        at org.apache.hadoop.hbase.client.BufferedMutatorImpl.mutate(BufferedMutatorImpl.java:113)
        at org.apache.hadoop.hbase.mapreduce.TableOutputFormat$TableRecordWriter.write(TableOutputFormat.java:138)
        at org.apache.hadoop.hbase.mapreduce.TableOutputFormat$TableRecordWriter.write(TableOutputFormat.java:94)
        at org.apache.hadoop.mapred.MapTask$NewDirectOutputCollector.write(MapTask.java:664)
        at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
        at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
        at org.apache.hadoop.hbase.mapreduce.Import$Importer.processKV(Import.java:210)
        at org.apache.hadoop.hbase.mapreduce.Import$Importer.writeResult(Import.java:165)
        at org.apache.hadoop.hbase.mapreduce.Import$Importer.map(Import.java:150)
        at org.apache.hadoop.hbase.mapreduce.Import$Importer.map(Import.java:133)
        at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)

    分析结果是因为 hbase相应的表中没有指定的列簇所致,所以改变了一下hbase表的列簇 

  • 相关阅读:
    “敏捷版”全链路压测
    不改一行代码,轻松拥有企业级微服务治理|MSE微服务治理专业版重磅发布
    和 VMware、深信服、天翼云、招商云专家一起聊聊云原生边缘计算
    阿里大规模业务混部下的全链路资源隔离技术演进
    Serverless 下的微服务实践
    阿里云发布云原生加速器,携手生态企业拥抱数字时代
    LifseaOS 悄然来袭,一款为云原生而生的 OS
    12.04 深圳站 | Serverless Developer Meetup 开放报名啦!
    Kubernetes 已经成为云原生时代的安卓,这就够了吗?
    阿里云发布云原生加速器,携手生态企业拥抱数字时代
  • 原文地址:https://www.cnblogs.com/xhnxhnu/p/9336150.html
Copyright © 2011-2022 走看看