Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
---|---|---|---|---|---|---|---|---|---|
512963 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:22:15 | 12 ms |
1/1
| 1396.0 B | |||
512962 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [sort#94160419 ASC NULLS FIRST, description#94160423 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(sort#94160419 ASC NULLS FIRST, description#94160423 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7534490]
+- *(2) Project [sector_id#94368013, numcos#94368019, numdates#94368101, sort#94160419, description#94160423, universe#94368286, coverage#94368142]
+- *(2) BroadcastHashJoin [sector_id#94368013], [sector_id#94160418], Inner, BuildRight, false
:- *(2) Project [sector_id#94368013, numcos#94368019, numdates#94368101, coverage#94368142, round((cast(numcos#94368019 as double) / cast(coverage#94368142 as double)), 0) AS universe#94368286]
: +- *(2) Filter isnotnull(sector_id#94368013)
: +- *(2) ColumnarToRow
: +- InMemoryTableScan [coverage#94368142, numcos#94368019, numdates#94368101, sector_id#94368013], [isnotnull(sector_id#94368013)]
: +- InMemoryRelation [sector_id#94368013, retIC#94368015, resretIC#94368...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:22:15 | 79 ms |
13/13
| 1396.0 B | 1396.0 B | ||
512961 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((sector_id#94367759 = NA) OR (sector_id#94367759 = null)) THEN null ELSE cast(sector_id#94367759 as int) END AS sector_id#94368013, CASE WHEN ((retIC#94367760 = NA) OR (retIC#94367760 = null)) THEN null ELSE cast(retIC#94367760 as float) END AS retIC#94368015, CASE WHEN ((resretIC#94367761 = NA) OR (resretIC#94367761 = null)) THEN null ELSE cast(resretIC#94367761 as float) END AS resretIC#94368017, CASE WHEN ((numcos#94367762 = NA) OR (numcos#94367762 = null)) THEN null ELSE cast(numcos#94367762 as float) END AS numcos#94368019, CASE WHEN ((numdates#94367763 = NA) OR (numdates#94367763 = null)) THEN null ELSE cast(numdates#94367763 as int) END AS numdates#94368101, CASE WHEN ((annual_bmret#94367764 = NA) OR (annual_bmret#94367764 = null)) THEN null ELSE cast(annual_bmret#94367764 as float) END AS annual_bmret#94368107, CASE WHEN ((annual_ret#94367765 = NA) OR (annual_ret#94367765 = null)) THEN null ELSE cast(annual_ret#94367765 as float) END AS annual_ret#94368108, CASE WHEN ((std_...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:22:15 | 26 ms |
1/1
| 3.5 KiB | 1396.0 B |