工程在Tomcat部署后,BI更新数据时要么很慢,要么卡死。有没有人遇到这情况? 以下是部份日志: 13:43:37 Executor task launch worker for task 12 ERROR [standard] empty String java.lang.NumberFormatException: empty String at sun.misc.FloatingDecimal.readJavaFormatString(FloatingDecimal.java:1842) at sun.misc.FloatingDecimal.parseDouble(FloatingDecimal.java:110) at java.lang.Double.parseDouble(Double.java:538) at com.fr.function.TODOUBLE.run(TODOUBLE.java:31) at com.finebi.jep.function.custom.text.TODOUBLE.run(TODOUBLE.java:19) at com.finebi.jep.function.AbstractFunction.run(AbstractFunction.java:49) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:273) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.EvaluatorVisitor.getValue(EvaluatorVisitor.java:110) at org.nfunk.jep.JEP.evaluate(JEP.java:635) at com.finebi.jep.Jep.evaluateCheck(Jep.java:76) at com.finebi.jep.Jep.evaluate(Jep.java:64) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:87) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:21) at org.apache.spark.sql.functions$$anonfun$21.apply(functions.scala:3616) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.serializefromobject_doConsume$(Unknown Source) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.convert.Wrappers$IteratorWrapper.hasNext(Wrappers.scala:30) at com.finebi.spider.etl.job.spark.analysisfunction.ExtendRowIterator.hasNext(ExtendRowIterator.java:57) at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage14.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:438) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 13:43:37 Executor task launch worker for task 12 ERROR [standard] empty String java.lang.NumberFormatException: empty String at sun.misc.FloatingDecimal.readJavaFormatString(FloatingDecimal.java:1842) at sun.misc.FloatingDecimal.parseDouble(FloatingDecimal.java:110) at java.lang.Double.parseDouble(Double.java:538) at com.fr.function.TODOUBLE.run(TODOUBLE.java:31) at com.finebi.jep.function.custom.text.TODOUBLE.run(TODOUBLE.java:19) at com.finebi.jep.function.AbstractFunction.run(AbstractFunction.java:49) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:273) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.EvaluatorVisitor.getValue(EvaluatorVisitor.java:110) at org.nfunk.jep.JEP.evaluate(JEP.java:635) at com.finebi.jep.Jep.evaluateCheck(Jep.java:76) at com.finebi.jep.Jep.evaluate(Jep.java:64) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:87) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:21) at org.apache.spark.sql.functions$$anonfun$21.apply(functions.scala:3616) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.serializefromobject_doConsume$(Unknown Source) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.convert.Wrappers$IteratorWrapper.hasNext(Wrappers.scala:30) at com.finebi.spider.etl.job.spark.analysisfunction.ExtendRowIterator.hasNext(ExtendRowIterator.java:57) at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage14.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:438) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 13:43:37 http-nio-8080-exec-3 DEBUG [standard] Database session opened 13:43:37 Executor task launch worker for task 12 ERROR [standard] empty String java.lang.NumberFormatException: empty String at sun.misc.FloatingDecimal.readJavaFormatString(FloatingDecimal.java:1842) at sun.misc.FloatingDecimal.parseDouble(FloatingDecimal.java:110) at java.lang.Double.parseDouble(Double.java:538) at com.fr.function.TODOUBLE.run(TODOUBLE.java:31) at com.finebi.jep.function.custom.text.TODOUBLE.run(TODOUBLE.java:19) at com.finebi.jep.function.AbstractFunction.run(AbstractFunction.java:49) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:273) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.EvaluatorVisitor.getValue(EvaluatorVisitor.java:110) at org.nfunk.jep.JEP.evaluate(JEP.java:635) at com.finebi.jep.Jep.evaluateCheck(Jep.java:76) at com.finebi.jep.Jep.evaluate(Jep.java:64) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:87) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:21) at org.apache.spark.sql.functions$$anonfun$21.apply(functions.scala:3616) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.serializefromobject_doConsume$(Unknown Source) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.convert.Wrappers$IteratorWrapper.hasNext(Wrappers.scala:30) at com.finebi.spider.etl.job.spark.analysisfunction.ExtendRowIterator.hasNext(ExtendRowIterator.java:57) at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage14.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:438) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 13:43:37 http-nio-8080-exec-3 DEBUG [standard] Found user by condition QueryConditionImpl{restriction=Restriction{type=AND, restrictions=[Restriction{type=EQ, column=userName, value=luojian0323}]}, skip=0, count=0, sort=[]} 13:43:37 Executor task launch worker for task 12 ERROR [standard] empty String java.lang.NumberFormatException: empty String at sun.misc.FloatingDecimal.readJavaFormatString(FloatingDecimal.java:1842) at sun.misc.FloatingDecimal.parseDouble(FloatingDecimal.java:110) at java.lang.Double.parseDouble(Double.java:538) at com.fr.function.TODOUBLE.run(TODOUBLE.java:31) at com.finebi.jep.function.custom.text.TODOUBLE.run(TODOUBLE.java:19) at com.finebi.jep.function.AbstractFunction.run(AbstractFunction.java:49) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:273) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.EvaluatorVisitor.getValue(EvaluatorVisitor.java:110) at org.nfunk.jep.JEP.evaluate(JEP.java:635) at com.finebi.jep.Jep.evaluateCheck(Jep.java:76) at com.finebi.jep.Jep.evaluate(Jep.java:64) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:87) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:21) at org.apache.spark.sql.functions$$anonfun$21.apply(functions.scala:3616) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.serializefromobject_doConsume$(Unknown Source) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.convert.Wrappers$IteratorWrapper.hasNext(Wrappers.scala:30) at com.finebi.spider.etl.job.spark.analysisfunction.ExtendRowIterator.hasNext(ExtendRowIterator.java:57) at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage14.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:438) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 13:43:37 http-nio-8080-exec-3 DEBUG [standard] Database session closed 13:43:37 Executor task launch worker for task 12 ERROR [standard] empty String java.lang.NumberFormatException: empty String at sun.misc.FloatingDecimal.readJavaFormatString(FloatingDecimal.java:1842) at sun.misc.FloatingDecimal.parseDouble(FloatingDecimal.java:110) at java.lang.Double.parseDouble(Double.java:538) at com.fr.function.TODOUBLE.run(TODOUBLE.java:31) at com.finebi.jep.function.custom.text.TODOUBLE.run(TODOUBLE.java:19) at com.finebi.jep.function.AbstractFunction.run(AbstractFunction.java:49) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:273) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.EvaluatorVisitor.getValue(EvaluatorVisitor.java:110) at org.nfunk.jep.JEP.evaluate(JEP.java:635) at com.finebi.jep.Jep.evaluateCheck(Jep.java:76) at com.finebi.jep.Jep.evaluate(Jep.java:64) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:87) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:21) at org.apache.spark.sql.functions$$anonfun$21.apply(functions.scala:3616) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.serializefromobject_doConsume$(Unknown Source) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.convert.Wrappers$IteratorWrapper.hasNext(Wrappers.scala:30) at com.finebi.spider.etl.job.spark.analysisfunction.ExtendRowIterator.hasNext(ExtendRowIterator.java:57) at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage14.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:438) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 13:43:37 http-nio-8080-exec-3 DEBUG [standard] Query action using 6 ms 13:43:37 Executor task launch worker for task 12 ERROR [standard] empty String java.lang.NumberFormatException: empty String at sun.misc.FloatingDecimal.readJavaFormatString(FloatingDecimal.java:1842) at sun.misc.FloatingDecimal.parseDouble(FloatingDecimal.java:110) at java.lang.Double.parseDouble(Double.java:538) at com.fr.function.TODOUBLE.run(TODOUBLE.java:31) at com.finebi.jep.function.custom.text.TODOUBLE.run(TODOUBLE.java:19) at com.finebi.jep.function.AbstractFunction.run(AbstractFunction.java:49) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:273) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.EvaluatorVisitor.getValue(EvaluatorVisitor.java:110) at org.nfunk.jep.JEP.evaluate(JEP.java:635) at com.finebi.jep.Jep.evaluateCheck(Jep.java:76) at com.finebi.jep.Jep.evaluate(Jep.java:64) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:87) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:21) at org.apache.spark.sql.functions$$anonfun$21.apply(functions.scala:3616) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.serializefromobject_doConsume$(Unknown Source) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.convert.Wrappers$IteratorWrapper.hasNext(Wrappers.scala:30) at com.finebi.spider.etl.job.spark.analysisfunction.ExtendRowIterator.hasNext(ExtendRowIterator.java:57) at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage14.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:438) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 13:43:37 http-nio-8080-exec-3 DEBUG [standard] Database session opened 13:43:37 Executor task launch worker for task 12 ERROR [standard] empty String java.lang.NumberFormatException: empty String at sun.misc.FloatingDecimal.readJavaFormatString(FloatingDecimal.java:1842) at sun.misc.FloatingDecimal.parseDouble(FloatingDecimal.java:110) at java.lang.Double.parseDouble(Double.java:538) at com.fr.function.TODOUBLE.run(TODOUBLE.java:31) at com.finebi.jep.function.custom.text.TODOUBLE.run(TODOUBLE.java:19) at com.finebi.jep.function.AbstractFunction.run(AbstractFunction.java:49) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:273) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.EvaluatorVisitor.getValue(EvaluatorVisitor.java:110) at org.nfunk.jep.JEP.evaluate(JEP.java:635) at com.finebi.jep.Jep.evaluateCheck(Jep.java:76) at com.finebi.jep.Jep.evaluate(Jep.java:64) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:87) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:21) at org.apache.spark.sql.functions$$anonfun$21.apply(functions.scala:3616) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.serializefromobject_doConsume$(Unknown Source) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.convert.Wrappers$IteratorWrapper.hasNext(Wrappers.scala:30) at com.finebi.spider.etl.job.spark.analysisfunction.ExtendRowIterator.hasNext(ExtendRowIterator.java:57) at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage14.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:438) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 13:43:37 http-nio-8080-exec-3 DEBUG [standard] Found user by condition QueryConditionImpl{restriction=Restriction{type=AND, restrictions=[Restriction{type=EQ, column=userName, value=luojian0323}]}, skip=0, count=0, sort=[]} 13:43:37 Executor task launch worker for task 12 ERROR [standard] empty String java.lang.NumberFormatException: empty String at sun.misc.FloatingDecimal.readJavaFormatString(FloatingDecimal.java:1842) at sun.misc.FloatingDecimal.parseDouble(FloatingDecimal.java:110) at java.lang.Double.parseDouble(Double.java:538) at com.fr.function.TODOUBLE.run(TODOUBLE.java:31) at com.finebi.jep.function.custom.text.TODOUBLE.run(TODOUBLE.java:19) at com.finebi.jep.function.AbstractFunction.run(AbstractFunction.java:49) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:273) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.SimpleNode.childrenAccept(SimpleNode.java:77) at org.nfunk.jep.EvaluatorVisitor.visit(EvaluatorVisitor.java:258) at org.nfunk.jep.ASTFunNode.jjtAccept(ASTFunNode.java:53) at org.nfunk.jep.EvaluatorVisitor.getValue(EvaluatorVisitor.java:110) at org.nfunk.jep.JEP.evaluate(JEP.java:635) at com.finebi.jep.Jep.evaluateCheck(Jep.java:76) at com.finebi.jep.Jep.evaluate(Jep.java:64) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:87) at com.finebi.spider.sparksql.udf.JepFormulaUdfETL.call(JepFormulaUdfETL.java:21) at org.apache.spark.sql.functions$$anonfun$21.apply(functions.scala:3616) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.serializefromobject_doConsume$(Unknown Source) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage13.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at scala.collection.convert.Wrappers$IteratorWrapper.hasNext(Wrappers.scala:30) at com.finebi.spider.etl.job.spark.analysisfunction.ExtendRowIterator.hasNext(ExtendRowIterator.java:57) at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage14.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:438) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411) at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) |