Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
---|---|---|---|---|---|---|---|---|---|
512646 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:14:49 | 18 ms |
1/1
| 54.5 KiB | |||
512645 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94306892 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94306892 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7529639]
+- *(1) Project [date#94306892, allret#94306893 AS daily_gross#94306959, allretnet#94306897 AS daily_net#94306960]
+- InMemoryTableScan [allret#94306893, allretnet#94306897, date#94306892]
+- InMemoryRelation [date#94306892, allret#94306893, allcum#94306894, drawdown_gross#94306895, drawdown_length_gross#94306896, allretnet#94306897, allcumnet#94306898, drawdown_net#94306899, drawdown_length_net#94306900, numcos#94306901, largecum#94306902, largecumnet#94306903, largeret#94306904, largeretnet#94306905, midcum#94306906, midcumnet#94306907, midret#94306908, midretnet#94306909, smallcum#94306910, smallcumnet#94306911, smallret#94306912, smallretnet#94306913], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94305708 = null) THEN null ELSE cast(date#94305708 as date) END AS date#943058...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:14:48 | 1 s |
200/200
| 54.5 KiB | 54.5 KiB | ||
512644 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94305708 = null) THEN null ELSE cast(date#94305708 as date) END AS date#94305864, CASE WHEN ((allret#94305709 = NA) OR (allret#94305709 = null)) THEN null ELSE cast(allret#94305709 as float) END AS allret#94305867, CASE WHEN ((allcum#94305710 = NA) OR (allcum#94305710 = null)) THEN null ELSE cast(allcum#94305710 as float) END AS allcum#94305869, CASE WHEN ((drawdown_gross#94305711 = NA) OR (drawdown_gross#94305711 = null)) THEN null ELSE cast(drawdown_gross#94305711 as float) END AS drawdown_gross#94305870, CASE WHEN ((drawdown_length_gross#94305712 = NA) OR (drawdown_length_gross#94305712 = null)) THEN null ELSE cast(drawdown_length_gross#94305712 as int) END AS drawdown_length_gross#94305872, CASE WHEN ((allretnet#94305713 = NA) OR (allretnet#94305713 = null)) THEN null ELSE cast(allretnet#94305713 as float) END AS allretnet#94305875, CASE WHEN ((allcumnet#94305714 = NA) OR (allcumnet#94305714 = null)) THEN null ELSE cast(allcumnet#94305714 as float) END AS allcumnet#9430587...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:14:48 | 59 ms |
1/1
| 266.1 KiB | 54.5 KiB |