zoukankan      html  css  js  c++  java
  • Hive插数据报错

    报错信息:

    Failed with exception MetaException(message:javax.jdo.JDODataStoreException: Put request failed : INSERT INTO `SERDE_PARAMS` (`PARAM_VALUE`,`SERDE_ID`,`PARAM_KEY`) VALUES (?,?,?) 
    	at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451)
    	at org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:732)
    	at org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:752)
    	at org.apache.hadoop.hive.metastore.ObjectStore.addPartition(ObjectStore.java:1462)
    	at sun.reflect.GeneratedMethodAccessor40.invoke(Unknown Source)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:497)
    	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
    	at com.sun.proxy.$Proxy2.addPartition(Unknown Source)
    	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.append_partition_common(HiveMetaStore.java:1967)
    	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.append_partition_with_environment_context(HiveMetaStore.java:2022)
    	at sun.reflect.GeneratedMethodAccessor39.invoke(Unknown Source)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:497)
    	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
    	at com.sun.proxy.$Proxy4.append_partition_with_environment_context(Unknown Source)
    	at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$append_partition_with_environment_context.getResult(ThriftHiveMetastore.java:9660)
    	at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$append_partition_with_environment_context.getResult(ThriftHiveMetastore.java:9644)
    	at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
    	at org.apache.hadoop.hive.metastore.TUGIBasedProcessor$1.run(TUGIBasedProcessor.java:110)
    	at org.apache.hadoop.hive.metastore.TUGIBasedProcessor$1.run(TUGIBasedProcessor.java:106)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at javax.security.auth.Subject.doAs(Subject.java:422)
    	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1656)
    	at org.apache.hadoop.hive.metastore.TUGIBasedProcessor.process(TUGIBasedProcessor.java:118)
    	at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:285)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    NestedThrowablesStackTrace:
    org.datanucleus.store.rdbms.exceptions.MappedDatastoreException: INSERT INTO `SERDE_PARAMS` (`PARAM_VALUE`,`SERDE_ID`,`PARAM_KEY`) VALUES (?,?,?) 
    	at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1078)
    	at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:220)
    	at org.datanucleus.store.rdbms.mapping.java.MapMapping.postInsert(MapMapping.java:137)
    	at org.datanucleus.store.rdbms.request.InsertRequest.execute(InsertRequest.java:519)
    	at org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertTable(RDBMSPersistenceHandler.java:167)
    	at org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertObject(RDBMSPersistenceHandler.java:143)
    	at org.datanucleus.state.JDOStateManager.internalMakePersistent(JDOStateManager.java:3784)
    	at org.datanucleus.state.JDOStateManager.makePersistent(JDOStateManager.java:3760)
    	at org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2219)
    	at org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2314)
    	at org.datanucleus.store.rdbms.mapping.java.PersistableMapping.setObjectAsValue(PersistableMapping.java:567)
    	at org.datanucleus.store.rdbms.mapping.java.PersistableMapping.setObject(PersistableMapping.java:326)
    	at org.datanucleus.store.rdbms.fieldmanager.ParameterSetter.storeObjectField(ParameterSetter.java:193)
    	at org.datanucleus.state.JDOStateManager.providedObjectField(JDOStateManager.java:1269)
    	at org.apache.hadoop.hive.metastore.model.MStorageDescriptor.jdoProvideField(MStorageDescriptor.java)
    	at org.apache.hadoop.hive.metastore.model.MStorageDescriptor.jdoProvideFields(MStorageDescriptor.java)
    	at org.datanucleus.state.JDOStateManager.provideFields(JDOStateManager.java:1346)
    	at org.datanucleus.store.rdbms.request.InsertRequest.execute(InsertRequest.java:289)
    	at org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertTable(RDBMSPersistenceHandler.java:167)
    	at org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertObject(RDBMSPersistenceHandler.java:143)
    	at org.datanucleus.state.JDOStateManager.internalMakePersistent(JDOStateManager.java:3784)
    	at org.datanucleus.state.JDOStateManager.makePersistent(JDOStateManager.java:3760)
    	at org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2219)
    	at org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2314)
    	at org.datanucleus.store.rdbms.mapping.java.PersistableMapping.setObjectAsValue(PersistableMapping.java:567)
    	at org.datanucleus.store.rdbms.mapping.java.PersistableMapping.setObject(PersistableMapping.java:326)
    	at org.datanucleus.store.rdbms.fieldmanager.ParameterSetter.storeObjectField(ParameterSetter.java:193)
    	at org.datanucleus.state.JDOStateManager.providedObjectField(JDOStateManager.java:1269)
    	at org.apache.hadoop.hive.metastore.model.MPartition.jdoProvideField(MPartition.java)
    	at org.apache.hadoop.hive.metastore.model.MPartition.jdoProvideFields(MPartition.java)
    	at org.datanucleus.state.JDOStateManager.provideFields(JDOStateManager.java:1346)
    	at org.datanucleus.store.rdbms.request.InsertRequest.execute(InsertRequest.java:289)
    	at org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertTable(RDBMSPersistenceHandler.java:167)
    	at org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertObject(RDBMSPersistenceHandler.java:143)
    	at org.datanucleus.state.JDOStateManager.internalMakePersistent(JDOStateManager.java:3784)
    	at org.datanucleus.state.JDOStateManager.makePersistent(JDOStateManager.java:3760)
    	at org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2219)
    	at org.datanucleus.ExecutionContextImpl.persistObjectWork(ExecutionContextImpl.java:2065)
    	at org.datanucleus.ExecutionContextImpl.persistObject(ExecutionContextImpl.java:1913)
    	at org.datanucleus.ExecutionContextThreadedImpl.persistObject(ExecutionContextThreadedImpl.java:217)
    	at org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:727)
    	at org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:752)
    	at org.apache.hadoop.hive.metastore.ObjectStore.addPartition(ObjectStore.java:1462)
    	at sun.reflect.GeneratedMethodAccessor40.invoke(Unknown Source)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:497)
    	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
    	at com.sun.proxy.$Proxy2.addPartition(Unknown Source)
    	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.append_partition_common(HiveMetaStore.java:1967)
    	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.append_partition_with_environment_context(HiveMetaStore.java:2022)
    	at sun.reflect.GeneratedMethodAccessor39.invoke(Unknown Source)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:497)
    	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
    	at com.sun.proxy.$Proxy4.append_partition_with_environment_context(Unknown Source)
    	at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$append_partition_with_environment_context.getResult(ThriftHiveMetastore.java:9660)
    	at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$append_partition_with_environment_context.getResult(ThriftHiveMetastore.java:9644)
    	at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
    	at org.apache.hadoop.hive.metastore.TUGIBasedProcessor$1.run(TUGIBasedProcessor.java:110)
    	at org.apache.hadoop.hive.metastore.TUGIBasedProcessor$1.run(TUGIBasedProcessor.java:106)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at javax.security.auth.Subject.doAs(Subject.java:422)
    	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1656)
    	at org.apache.hadoop.hive.metastore.TUGIBasedProcessor.process(TUGIBasedProcessor.java:118)
    	at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:285)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    Caused by: java.sql.SQLException: The table 'SERDE_PARAMS' is full
    	at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1073)
    	at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3597)
    	at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3529)
    	at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1990)
    	at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2151)
    	at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2625)
    	at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:2119)
    	at com.mysql.jdbc.PreparedStatement.executeUpdate(PreparedStatement.java:2415)
    	at com.mysql.jdbc.PreparedStatement.executeUpdate(PreparedStatement.java:2333)
    	at com.mysql.jdbc.PreparedStatement.executeUpdate(PreparedStatement.java:2318)
    	at com.jolbox.bonecp.PreparedStatementHandle.executeUpdate(PreparedStatementHandle.java:205)
    	at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeUpdate(ParamLoggingPreparedStatement.java:399)
    	at org.datanucleus.store.rdbms.SQLController.executeStatementUpdate(SQLController.java:439)
    	at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1069)
    	... 67 more

    原因:我以为磁盘满了,但查看了一下不是,最后在hive所在机器df -h查看了一下,根目录100%,而我的数据是存在另一块磁盘的,原因是hive的日志把根目录塞满了。

  • 相关阅读:
    一步一步学习开发BPM工作流系统(三)开发WinForm的应用平台1
    支持多数据库本地和远程调用的数据访问层架构
    HF Web表单开发技术文档
    CDZSC_2015寒假新人(2) 数学 C
    CDZSC_2015寒假新人(2)——数学 A
    ZSC 1306: 沼跃鱼早已看穿了一切 题解
    解决”java.lang.UnsatisfiedLinkError: Native Library .dll already loaded in another classloader”的问题
    有目标就要坚持
    (转)新兴XML处理方法VTDXML介绍
    (转)Java远程通讯可选技术及原理
  • 原文地址:https://www.cnblogs.com/zeppelin/p/6100565.html
Copyright © 2011-2022 走看看