Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
---|---|---|---|---|---|---|---|---|---|
513057 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/02 15:24:16 | 12 ms |
1/1
| 155.0 B | |||
513056 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Project [cap#94385214, turnover#94382804, (1.0 / cast(turnover#94382804 as double)) AS days_hold#94385248]
+- *(3) Sort [cap_sort#94385125 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(cap_sort#94385125 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7535950]
+- *(2) Project [turnover#94382804, cap_description#94385124 AS cap#94385214, cap_sort#94385125]
+- *(2) BroadcastHashJoin [knownfloatingpointnormalized(normalizenanandzero(cap#94382550))], [knownfloatingpointnormalized(normalizenanandzero(cast(cap#94160394 as float)))], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(knownfloatingpointnormalized(normalizenanandzero(input[0, float, false]))),false), [id=#7535942]
: +- *(1) Filter isnotnull(cap#94382550)
: +- *(1) ColumnarToRow
: +- InMemoryTableScan [cap#94382550, turnover#94382804], [isnotnull(cap#94382550)]
: +- InMemoryRelation [cap#94382550, retIC#943...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/02 15:24:16 | 38 ms |
2/2
| 133.0 B | 155.0 B | ||
513055 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((cap#94160377 = NA) OR (cap#94160377 = null)) THEN null ELSE cast(cap#94160377 as int) END AS cap#94160394, CASE WHEN (sort#94160378 = null) THEN null ELSE sort#94160378 END AS sort#94160395, CASE WHEN (description#94160379 = null) THEN null ELSE description#94160379 END AS description#94160396, CASE WHEN ((universe#94160380 = NA) OR (universe#94160380 = null)) THEN null ELSE cast(universe#94160380 as int) END AS universe#94160397]
+- FileScan csv [cap#94160377,sort#94160378,description#94160379,universe#94160380] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/curate/curate_cap.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<cap:string,sort:string,description:string,universe:string>
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/02 15:24:16 | 17 ms |
1/1
| 624.0 B | 133.0 B |