IBM Business Analytics

 View Only

Getting error while using Data Module after uploading the file

  • 1.  Getting error while using Data Module after uploading the file

    Posted Wed October 18, 2023 09:34 AM

    I am getting below error when I try to use data module after uploading files

    XQE-DAT-0001 Data source adapter error: GeneralException(requestId:913c2b9f-df88-4851-b126-b875c9e936da, message:Job aborted due to stage failure: Task 0 in stage 78.0 failed 1 times, most recent failure: Lost task 0.0 in stage 78.0 (TID 68) (SERVERPATH executor driver): java.nio.file.NoSuchFileException: E:\CognosTemp\OPS11\XQE\flint\scratch\blockmgr-3abd1535-96d9-4301-8429-79da15c4feb6\31
    at sun.nio.fs.WindowsException.translateToIOException(WindowsException.java:91)
    at sun.nio.fs.WindowsException.rethrowAsIOException(WindowsException.java:109)
    at sun.nio.fs.WindowsException.rethrowAsIOException(WindowsException.java:114)
    at sun.nio.fs.WindowsFileSystemProvider.createDirectory(WindowsFileSystemProvider.java:516)
    at java.nio.file.Files.createDirectory(Files.java:685)
    at org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:74)
    at org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:84)
    at org.apache.spark.storage.DiskBlockManager.createTempShuffleBlock(DiskBlockManager.scala:131)
    at org.apache.spark.shuffle.sort.ShuffleExternalSorter.writeSortedFile(ShuffleExternalSorter.java:179)
    at org.apache.spark.shuffle.sort.ShuffleExternalSorter.closeAndGetSpills(ShuffleExternalSorter.java:430)
    at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.closeAndWriteOutput(UnsafeShuffleWriter.java:220)
    at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.write(UnsafeShuffleWriter.java:180)
    at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
    at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
    at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)
    at org.apache.spark.scheduler.Task.run_aroundBody0(Task.scala:131)
    at org.apache.spark.scheduler.Task$AjcClosure1.run(Task.scala:1)
    at org.aspectj.runtime.reflect.JoinPointImpl.proceed(JoinPointImpl.java:149)
    at com.ibm.ba.flint.aop.spark.DecoratedTaskAspect$$Lambda$2193/0x00000000f9e22f40.call(Unknown Source)
    at com.ibm.ba.flint.server.logging.ApplyTaskMdcDecorator.doDecorate(ApplyTaskMdcDecorator.java:75)
    at com.ibm.ba.flint.server.logging.ApplyTaskMdcDecorator.lambda$decorate$3(ApplyTaskMdcDecorator.java:92)
    at com.ibm.ba.flint.server.logging.ApplyTaskMdcDecorator$$Lambda$2194/0x00000000fcc8c650.call(Unknown Source)
    at com.ibm.ba.flint.aop.spark.DecoratedTaskAspect.aroundTaskRun(DecoratedTaskAspect.java:111)
    at org.apache.spark.scheduler.Task.run(Task.scala:88)
    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:497)
    at org.apache.spark.executor.Executor$TaskRunner$$Lambda$2190/0x00000000f5d08a70.apply(Unknown Source)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:500)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1160)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
    at java.lang.Thread.run(Thread.java:825)
     
    Driver stacktrace:, trace:org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 78.0 failed 1 times, most recent failure: Lost task 0.0 in stage 78.0 (TID 68) (SERVERPATH executor driver): java.nio.file.NoSuchFileException: E:\CognosTemp\OPS11\XQE\flint\scratch\blockmgr-3abd1535-96d9-4301-8429-79da15c4feb6\31
    at sun.nio.fs.WindowsException.translateToIOException(WindowsException.java:91)
    at sun.nio.fs.WindowsException.rethrowAsIOException(WindowsException.java:109)
    at sun.nio.fs.WindowsException.rethrowAsIOException(WindowsException.java:114)
    at sun.nio.fs.WindowsFileSystemProvider.createDirectory(WindowsFileSystemProvider.java:516)
    at java.nio.file.Files.createDirectory(Files.java:685)
    at org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:74)
    at org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:84)
    at org.apache.spark.storage.DiskBlockManager.createTempShuffleBlock(DiskBlockManager.scala:131)
    at org.apache.spark.shuffle.sort.ShuffleExternalSorter.writeSortedFile(ShuffleExternalSorter.java:179)
    at org.apache.spark.shuffle.sort.ShuffleExternalSorter.closeAndGetSpills(ShuffleExternalSorter.java:430)
    at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.closeAndWriteOutput(UnsafeShuffleWriter.java:220)
    at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.write(UnsafeShuffleWriter.java:180)
    at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
    at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
    at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)
    at org.apache.spark.scheduler.Task.run_aroundBody0(Task.scala:131)
    at org.apache.spark.scheduler.Task$AjcClosure1.run(Task.scala:1)
    at org.aspectj.runtime.reflect.JoinPointImpl.proceed(JoinPointImpl.java:149)
    at com.ibm.ba.flint.aop.spark.DecoratedTaskAspect$$Lambda$2193/0x00000000f9e22f40.call(Unknown Source)
    at com.ibm.ba.flint.server.logging.ApplyTaskMdcDecorator.doDecorate(ApplyTaskMdcDecorator.java:75)
    at com.ibm.ba.flint.server.logging.ApplyTaskMdcDecorator.lambda$decorate$3(ApplyTaskMdcDecorator.java:92)
    at com.ibm.ba.flint.server.logging.ApplyTaskMdcDecorator$$Lambda$2194/0x00000000fcc8c650.call(Unknown Source)
    at com.ibm.ba.flint.aop.spark.DecoratedTaskAspect.aroundTaskRun(DecoratedTaskAspect.java:111)
    at org.apache.spark.scheduler.Task.run(Task.scala:88)
    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:497)
    at org.apache.spark.executor.Executor$TaskRunner$$Lambda$2190/0x00000000f5d08a70.apply(Unknown Source)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:500)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1160)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
    at java.lang.Thread.run(Thread.java:825)
     
    Driver stacktrace:
    at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2258)
    at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2207)
    at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2206)
    at org.apache.spark.scheduler.DAGScheduler$$Lambda$3235/0x00000000fc1883e0.apply(Unknown Source)
    at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
    at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
    at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
    at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2206)
    at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1079)
    at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1079)
    at org.apache.spark.scheduler.DAGScheduler$$Lambda$3232/0x00000000fc187c90.apply(Unknown Source)
    at scala.Option.foreach(Option.scala:407)
    at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1079)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2445)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2387)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2376)
    at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
    Caused by: java.nio.file.NoSuchFileException: E:\CognosTemp\OPS11\XQE\flint\scratch\blockmgr-3abd1535-96d9-4301-8429-79da15c4feb6\31
    at sun.nio.fs.WindowsException.translateToIOException(WindowsException.java:91)
    at sun.nio.fs.WindowsException.rethrowAsIOException(WindowsException.java:109)
    at sun.nio.fs.WindowsException.rethrowAsIOException(WindowsException.java:114)
    at sun.nio.fs.WindowsFileSystemProvider.createDirectory(WindowsFileSystemProvider.java:516)
    at java.nio.file.Files.createDirectory(Files.java:685)
    at org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:74)
    at org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:84)
    at org.apache.spark.storage.DiskBlockManager.createTempShuffleBlock(DiskBlockManager.scala:131)
    at org.apache.spark.shuffle.sort.ShuffleExternalSorter.writeSortedFile(ShuffleExternalSorter.java:179)
    at org.apache.spark.shuffle.sort.ShuffleExternalSorter.closeAndGetSpills(ShuffleExternalSorter.java:430)
    at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.closeAndWriteOutput(UnsafeShuffleWriter.java:220)
    at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.write(UnsafeShuffleWriter.java:180)
    at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
    at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
    at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)
    at org.apache.spark.scheduler.Task.run_aroundBody0(Task.scala:131)
    at org.apache.spark.scheduler.Task$AjcClosure1.run(Task.scala:1)
    at org.aspectj.runtime.reflect.JoinPointImpl.proceed(JoinPointImpl.java:149)
    at com.ibm.ba.flint.aop.spark.DecoratedTaskAspect$$Lambda$2193/0x00000000f9e22f40.call(Unknown Source)
    at com.ibm.ba.flint.server.logging.ApplyTaskMdcDecorator.doDecorate(ApplyTaskMdcDecorator.java:75)
    at com.ibm.ba.flint.server.logging.ApplyTaskMdcDecorator.lambda$decorate$3(ApplyTaskMdcDecorator.java:92)
    at com.ibm.ba.flint.server.logging.ApplyTaskMdcDecorator$$Lambda$2194/0x00000000fcc8c650.call(Unknown Source)
    at com.ibm.ba.flint.aop.spark.DecoratedTaskAspect.aroundTaskRun(DecoratedTaskAspect.java:111)
    at org.apache.spark.scheduler.Task.run(Task.scala:88)
    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:497)
    at org.apache.spark.executor.Executor$TaskRunner$$Lambda$2190/0x00000000f5d08a70.apply(Unknown Source)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:500)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1160)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
    at java.lang.Thread.run(Thread.java:825)
    )


    ------------------------------
    Jagdish Kokate
    ------------------------------