Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
---|---|---|---|---|---|---|---|---|---|
511773 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:44 | 19 ms |
1/1
| 47.6 KiB | |||
511772 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7516276]
+- *(1) Project [date#94006072, midret#94006104 AS daily_gross#94140035, midretnet#94006107 AS daily_net#94140036]
+- InMemoryTableScan [date#94006072, midret#94006104, midretnet#94006107]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#940053...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:44 | 0.7 s |
200/200
| 47.6 KiB | 47.6 KiB | ||
511771 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:42 | 49 ms |
1/1
| 155.6 KiB | 47.6 KiB |