Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
---|---|---|---|---|---|---|---|---|---|
511643 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 16:00:00 | 9 ms |
1/1
| 1397.0 B | |||
511642 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [sort#93880530 ASC NULLS FIRST, description#93880532 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(sort#93880530 ASC NULLS FIRST, description#93880532 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7515156]
+- *(2) Project [sector_id#94125513, numcos#94125528, numdates#94125529, sort#93880530, description#93880532, universe#94125754, coverage#94125694]
+- *(2) BroadcastHashJoin [sector_id#94125513], [sector_id#93880529], Inner, BuildRight, false
:- *(2) Project [sector_id#94125513, numcos#94125528, numdates#94125529, coverage#94125694, round((cast(numcos#94125528 as double) / cast(coverage#94125694 as double)), 0) AS universe#94125754]
: +- *(2) Filter isnotnull(sector_id#94125513)
: +- *(2) ColumnarToRow
: +- InMemoryTableScan [coverage#94125694, numcos#94125528, numdates#94125529, sector_id#94125513], [isnotnull(sector_id#94125513)]
: +- InMemoryRelation [sector_id#94125513, retIC#94125514, resretIC#94125...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 16:00:00 | 56 ms |
13/13
| 1397.0 B | 1397.0 B | ||
511641 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((sector_id#94125150 = NA) OR (sector_id#94125150 = null)) THEN null ELSE cast(sector_id#94125150 as int) END AS sector_id#94125513, CASE WHEN ((retIC#94125151 = NA) OR (retIC#94125151 = null)) THEN null ELSE cast(retIC#94125151 as float) END AS retIC#94125514, CASE WHEN ((resretIC#94125152 = NA) OR (resretIC#94125152 = null)) THEN null ELSE cast(resretIC#94125152 as float) END AS resretIC#94125515, CASE WHEN ((numcos#94125153 = NA) OR (numcos#94125153 = null)) THEN null ELSE cast(numcos#94125153 as float) END AS numcos#94125528, CASE WHEN ((numdates#94125154 = NA) OR (numdates#94125154 = null)) THEN null ELSE cast(numdates#94125154 as int) END AS numdates#94125529, CASE WHEN ((annual_bmret#94125155 = NA) OR (annual_bmret#94125155 = null)) THEN null ELSE cast(annual_bmret#94125155 as float) END AS annual_bmret#94125530, CASE WHEN ((annual_ret#94125156 = NA) OR (annual_ret#94125156 = null)) THEN null ELSE cast(annual_ret#94125156 as float) END AS annual_ret#94125531, CASE WHEN ((std_...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 16:00:00 | 10 ms |
1/1
| 3.5 KiB | 1397.0 B |