Uploaded image for project: 'CDAP'
  1. CDAP
  2. CDAP-15970

Database plugins does not handle decimal type

    Details

    • Type: Bug
    • Status: Open
    • Priority: Critical
    • Resolution: Unresolved
    • Affects Version/s: None
    • Fix Version/s: 6.3.0
    • Component/s: None
    • Rank:
      1|i00rnz:

      Description

      If a field has decimal type in a table the DBsource plugin fails with the following error

      io.cdap.wrangler.api.RecipeException: Problem converting into output record. Reason : Schema specifies field 'global_category_id' is long, but the value is nor a string or long. It is of type 'java.math.BigInteger'
      	at io.cdap.wrangler.executor.RecipePipelineExecutor.execute(RecipePipelineExecutor.java:103) ~[wrangler-core-4.0.1.jar:na]
      	at io.cdap.wrangler.Wrangler.transform(Wrangler.java:424) ~[1569835734981-0/:na]
      	at io.cdap.wrangler.Wrangler.transform(Wrangler.java:87) ~[1569835734981-0/:na]
      	at io.cdap.cdap.etl.common.plugin.WrappedTransform.lambda$transform$5(WrappedTransform.java:90) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.plugin.Caller$1.call(Caller.java:30) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.plugin.StageLoggingCaller.call(StageLoggingCaller.java:40) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.plugin.WrappedTransform.transform(WrappedTransform.java:89) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.TrackedTransform.transform(TrackedTransform.java:74) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.UnwrapPipeStage.consumeInput(UnwrapPipeStage.java:44) ~[cdap-etl-batch-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.UnwrapPipeStage.consumeInput(UnwrapPipeStage.java:32) ~[cdap-etl-batch-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.PipeStage.consume(PipeStage.java:44) ~[cdap-etl-batch-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.PipeEmitter.emit(PipeEmitter.java:83) ~[cdap-etl-batch-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.TrackedEmitter.emit(TrackedEmitter.java:56) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.plugin.UntimedEmitter.emit(UntimedEmitter.java:64) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.plugin.db.batch.source.DBSource.transform(DBSource.java:214) ~[1569835736886-0/:na]
      	at io.cdap.cdap.etl.common.plugin.WrappedBatchSource.lambda$transform$2(WrappedBatchSource.java:69) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.plugin.Caller$1.call(Caller.java:30) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.plugin.StageLoggingCaller.call(StageLoggingCaller.java:40) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.plugin.WrappedBatchSource.transform(WrappedBatchSource.java:68) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.plugin.WrappedBatchSource.transform(WrappedBatchSource.java:36) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.preview.LimitingTransform.transform(LimitingTransform.java:44) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.common.TrackedTransform.transform(TrackedTransform.java:74) ~[cdap-etl-core-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.UnwrapPipeStage.consumeInput(UnwrapPipeStage.java:44) ~[cdap-etl-batch-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.UnwrapPipeStage.consumeInput(UnwrapPipeStage.java:32) ~[cdap-etl-batch-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.PipeStage.consume(PipeStage.java:44) ~[cdap-etl-batch-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.PipeTransformExecutor.runOneIteration(PipeTransformExecutor.java:43) ~[cdap-etl-batch-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.mapreduce.TransformRunner.transform(TransformRunner.java:142) ~[cdap-etl-batch-6.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.mapreduce.ETLMapReduce$ETLMapper.map(ETLMapReduce.java:230) ~[cdap-etl-batch-6.0.1.jar:na]
      	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:146) [hadoop-mapreduce-client-core-2.9.2.jar:na]
      	at io.cdap.cdap.internal.app.runtime.batch.MapperWrapper.run(MapperWrapper.java:135) [na:na]
      	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) [hadoop-mapreduce-client-core-2.9.2.jar:na]
      	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) [hadoop-mapreduce-client-core-2.9.2.jar:na]
      	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) [hadoop-mapreduce-client-common-2.9.2.jar:na]
      	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [na:1.8.0_222]
      	at java.util.concurrent.FutureTask.run(FutureTask.java:266) [na:1.8.0_222]
      	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [na:1.8.0_222]
      	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [na:1.8.0_222]
      	at java.lang.Thread.run(Thread.java:748) [na:1.8.0_222]
      2019-09-30 09:29:04,781 - INFO  [Thread-719:i.c.c.i.a.r.b.MainOutputCommitter@181] - Invalidating transaction 1569835738787000000
      2019-09-30 09:29:04,782 - INFO  [MapReduceRunner-phase-1:i.c.c.i.a.r.b.MapReduceRuntimeService@405] - MapReduce Job completed. Job details: [name=phase-1, jobId=job_local796348111_0016, namespaceId=default, applicationId=preview-1569835719391, program=phase-1, runid=b8d851f0-e364-11e9-bf73-a23904eb13f0]
      2019-09-30 09:29:04,785 - WARN  [Thread-719:o.a.h.m.LocalJobRunner@589] - job_local796348111_0016
      java.lang.Exception: java.lang.RuntimeException: io.cdap.wrangler.api.RecipeException: Problem converting into output record. Reason : Schema specifies field 'global_category_id' is long, but the value is nor a string or long. It is of type 'java.math.BigInteger'
      	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:491) ~[hadoop-mapreduce-client-common-2.9.2.jar:na]
      	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:551) ~[hadoop-mapreduce-client-common-2.9.2.jar:na]
      java.lang.RuntimeException: io.cdap.wrangler.api.RecipeException: Problem converting into output record. Reason : Schema specifies field 'global_category_id' is long, but the value is nor a string or long. It is of type 'java.math.BigInteger'
      	at com.google.common.base.Throwables.propagate(Throwables.java:160) ~[guava-13.0.1.jar:na]
      	at io.cdap.cdap.etl.batch.mapreduce.ETLMapReduce$ETLMapper.map(ETLMapReduce.java:233) ~[cdap-etl-batch-6.0.1.jar:na]
      	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:146) ~[hadoop-mapreduce-client-core-2.9.2.jar:na]
      	at io.cdap.cdap.internal.app.runtime.batch.MapperWrapper.run(MapperWrapper.java:135) ~[na:na]
      	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) ~[hadoop-mapreduce-client-core-2.9.2.jar:na]
      	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) ~[hadoop-mapreduce-client-core-2.9.2.jar:na]
      	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) ~[hadoop-mapreduce-client-common-2.9.2.jar:na]
      	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) ~[na:1.8.0_222]
      	at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[na:1.8.0_222]
      	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[na:1.8.0_222]
      	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[na:1.8.0_222]
      	at java.lang.Thread.run(Thread.java:748) ~[na:1.8.0_222]
      

        Attachments

          Activity

            People

            • Assignee:
              vinisha Vinisha Shah
              Reporter:
              sree Sreevatsan Raman
            • Votes:
              1 Vote for this issue
              Watchers:
              2 Start watching this issue

              Dates

              • Created:
                Updated: