zoukankan      html  css  js  c++  java
  • [java][spark]Exception in thread "JobGenerator" java.lang.OutOfMemoryError: GC overhead limit exceeded

    Exception in thread "JobGenerator" java.lang.OutOfMemoryError: GC overhead limit exceeded
        at java.util.Arrays.copyOf(Arrays.java:3236)
        at java.util.zip.ZipCoder.getBytes(ZipCoder.java:89)
        at java.util.zip.ZipFile.getEntry(ZipFile.java:310)
        at java.util.jar.JarFile.getEntry(JarFile.java:240)
        at java.util.jar.JarFile.getJarEntry(JarFile.java:223)
        at sun.misc.URLClassPath$JarLoader.getResource(URLClassPath.java:1005)
        at sun.misc.URLClassPath$JarLoader.findResource(URLClassPath.java:983)
        at sun.misc.URLClassPath.findResource(URLClassPath.java:188)
        at java.net.URLClassLoader$2.run(URLClassLoader.java:569)
        at java.net.URLClassLoader$2.run(URLClassLoader.java:567)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findResource(URLClassLoader.java:566)
        at java.lang.ClassLoader.getResource(ClassLoader.java:1096)
        at java.net.URLClassLoader.getResourceAsStream(URLClassLoader.java:232)
        at java.lang.Class.getResourceAsStream(Class.java:2223)
        at org.apache.spark.util.ClosureCleaner$.getClassReader(ClosureCleaner.scala:40)
        at org.apache.spark.util.ClosureCleaner$.getInnerClosureClasses(ClosureCleaner.scala:84)
        at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:224)
        at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:159)
        at org.apache.spark.SparkContext.clean(SparkContext.scala:2299)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$combineByKeyWithClassTag$1.apply(PairRDDFunctions.scala:88)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$combineByKeyWithClassTag$1.apply(PairRDDFunctions.scala:77)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
        at org.apache.spark.rdd.PairRDDFunctions.combineByKeyWithClassTag(PairRDDFunctions.scala:77)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$combineByKey$1.apply(PairRDDFunctions.scala:119)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$combineByKey$1.apply(PairRDDFunctions.scala:119)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
        at org.apache.spark.rdd.PairRDDFunctions.combineByKey(PairRDDFunctions.scala:117)
    View Code
  • 相关阅读:
    检测后缀表达式的合法性
    对表达式进行空白符预处理
    字符串替换
    中缀表达式的计算
    Linux shell编程
    Linux常用shell
    [转载]shell脚本编写规范
    [转载]Linux文件类型
    Linux的进程
    Linux进入命令行模式
  • 原文地址:https://www.cnblogs.com/fadedlemon/p/11733065.html
Copyright © 2011-2022 走看看