Details
-
Bug
-
Status: New
-
Major
-
Resolution: Unresolved
-
None
-
None
-
None
-
All
-
Small
Description
Reproduced on INT and OnPrem with Studio 7.3.1 and with a Big Data Batch job (spark mode job). Standard job works.
When I try to run a job with a tDataPrepRun component pointing on a preparation with a step "Format numbers" then my step is not taken into account, I have a NullPointerException :
[ERROR]: org.talend.dataprep.transformation.actions.common.ApplyDataSetRowAction - Unable to use action 'change_number_format' (parameters: MixedContentMap{map={column_id=0004, create_new_column=true, from_decimal_separator=,, from_grouping_separator= , from_separators=custom, scope=column, target_pattern=us_pattern}}) due to unexpected error.[ERROR]: org.talend.dataprep.transformation.actions.common.ApplyDataSetRowAction - Unable to use action 'change_number_format' (parameters: MixedContentMap{map={column_id=0004, create_new_column=true, from_decimal_separator=,, from_grouping_separator= , from_separators=custom, scope=column, target_pattern=us_pattern}}) due to unexpected error.java.lang.NullPointerException at org.talend.dataprep.transformation.actions.math.ChangeNumberFormat.apply(ChangeNumberFormat.java:343) at org.talend.dataprep.transformation.actions.common.AbstractMultiScopeAction.applyOnColumn(AbstractMultiScopeAction.java:50) at org.talend.dataprep.transformation.actions.common.ApplyDataSetRowAction.handleRow(ApplyDataSetRowAction.java:69) at org.talend.dataprep.transformation.actions.common.ApplyDataSetRowAction.apply(ApplyDataSetRowAction.java:43) at org.talend.dataprep.transformation.actions.common.ApplyDataSetRowAction.apply(ApplyDataSetRowAction.java:25) at org.talend.dataprep.transformation.actions.common.DataSetRowActionImpl.apply(DataSetRowActionImpl.java:33) at org.talend.dataprep.transformation.actions.common.DataSetRowActionImpl.apply(DataSetRowActionImpl.java:20) at org.talend.dataprep.transformation.pipeline.node.ActionNode.receive(ActionNode.java:52) at org.talend.dataprep.transformation.pipeline.link.BasicLink.emit(BasicLink.java:17) at org.talend.dataprep.transformation.pipeline.node.CompileNode.receive(CompileNode.java:75) at org.talend.dataprep.transformation.pipeline.link.BasicLink.emit(BasicLink.java:17) at org.talend.dataprep.transformation.pipeline.node.BasicNode.receive(BasicNode.java:26) at org.talend.dataprep.transformation.pipeline.node.SourceNode.receive(SourceNode.java:34) at org.talend.dataprep.transformation.pipeline.Pipeline.receive(Pipeline.java:137) at org.talend.dataprep.transformation.pipeline.Pipeline.receive(Pipeline.java:137) at org.talend.dataprep.actions.SerializableFunction.apply(SerializableFunction.java:86) at org.talend.dataprep.actions.SerializableFunction.apply(SerializableFunction.java:36) at jobs_jixiao_731.runspark_731_tui_forms_0_1.runSpark_731_TUI_FORMS$tDataprepRun_1_Function.call(runSpark_731_TUI_FORMS.java:2886) at jobs_jixiao_731.runspark_731_tui_forms_0_1.runSpark_731_TUI_FORMS$tDataprepRun_1_Function.call(runSpark_731_TUI_FORMS.java:1) at org.apache.spark.sql.execution.MapElementsExec$$anonfun$7.apply(objects.scala:222) at org.apache.spark.sql.execution.MapElementsExec$$anonfun$8$$anonfun$apply$1.apply(objects.scala:229) at org.apache.spark.sql.execution.MapElementsExec$$anonfun$8$$anonfun$apply$1.apply(objects.scala:229) at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) at scala.collection.AbstractIterator.to(Iterator.scala:1336) at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1336) at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) at scala.collection.AbstractIterator.toArray(Iterator.scala:1336) at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:893) at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:893) at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1897) at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1897) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:70) at org.apache.spark.scheduler.Task.run(Task.scala:85) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274) at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source) at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source) at java.lang.Thread.run(Unknown Source)[Stage 0:> (0 + 2) / 2][ERROR]: org.talend.dataprep.transformation.actions.common.ApplyDataSetRowAction - Unable to use action 'change_number_format' (parameters: MixedContentMap{map={column_id=0004, create_new_column=true, from_decimal_separator=,, from_grouping_separator= , from_separators=custom, scope=column, target_pattern=us_pattern}}) due to unexpected error.java.lang.NullPointerException at org.talend.dataprep.transformation.actions.math.ChangeNumberFormat.apply(ChangeNumberFormat.java:343) at org.talend.dataprep.transformation.actions.common.AbstractMultiScopeAction.applyOnColumn(AbstractMultiScopeAction.java:50) at org.talend.dataprep.transformation.actions.common.ApplyDataSetRowAction.handleRow(ApplyDataSetRowAction.java:69) at org.talend.dataprep.transformation.actions.common.ApplyDataSetRowAction.apply(ApplyDataSetRowAction.java:43) at org.talend.dataprep.transformation.actions.common.ApplyDataSetRowAction.apply(ApplyDataSetRowAction.java:25) at org.talend.dataprep.transformation.actions.common.DataSetRowActionImpl.apply(DataSetRowActionImpl.java:33) at org.talend.dataprep.transformation.actions.common.DataSetRowActionImpl.apply(DataSetRowActionImpl.java:20) at org.talend.dataprep.transformation.pipeline.node.ActionNode.receive(ActionNode.java:52) at org.talend.dataprep.transformation.pipeline.link.BasicLink.emit(BasicLink.java:17) at org.talend.dataprep.transformation.pipeline.node.CompileNode.receive(CompileNode.java:75) at org.talend.dataprep.transformation.pipeline.link.BasicLink.emit(BasicLink.java:17) at org.talend.dataprep.transformation.pipeline.node.BasicNode.receive(BasicNode.java:26) at org.talend.dataprep.transformation.pipeline.node.SourceNode.receive(SourceNode.java:34) at org.talend.dataprep.transformation.pipeline.Pipeline.receive(Pipeline.java:137) at org.talend.dataprep.transformation.pipeline.Pipeline.receive(Pipeline.java:137) at org.talend.dataprep.actions.SerializableFunction.apply(SerializableFunction.java:86) at org.talend.dataprep.actions.SerializableFunction.apply(SerializableFunction.java:36) at jobs_jixiao_731.runspark_731_tui_forms_0_1.runSpark_731_TUI_FORMS$tDataprepRun_1_Function.call(runSpark_731_TUI_FORMS.java:2886) at jobs_jixiao_731.runspark_731_tui_forms_0_1.runSpark_731_TUI_FORMS$tDataprepRun_1_Function.call(runSpark_731_TUI_FORMS.java:1) at org.apache.spark.sql.execution.MapElementsExec$$anonfun$7.apply(objects.scala:222) at org.apache.spark.sql.execution.MapElementsExec$$anonfun$8$$anonfun$apply$1.apply(objects.scala:229) at org.apache.spark.sql.execution.MapElementsExec$$anonfun$8$$anonfun$apply$1.apply(objects.scala:229) at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) at scala.collection.AbstractIterator.to(Iterator.scala:1336) at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1336) at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) at scala.collection.AbstractIterator.toArray(Iterator.scala:1336) at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:893) at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:893) at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1897) at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1897) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:70) at org.apache.spark.scheduler.Task.run(Task.scala:85) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274) at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source) at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source) at java.lang.Thread.run(Unknown Source)