Pool Name | Minimum Share | Pool Weight | Active Stages | Running Tasks | SchedulingMode |
---|---|---|---|---|---|
default | 0 | 1 | 0 | 0 | FIFO |
Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
---|---|---|---|---|---|---|---|---|---|
511813 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:49 | 9 ms |
1/1
| 1237.0 B | |||
511812 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [fractile#94017829 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(fractile#94017829 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7516528]
+- *(2) Project [fractile#94017829, size#94017831, value#94017832, growth#94017833, leverage#94017834, volatility#94017835, momentum#94017836, yield#94017837, ##94017838, min_date#94017839, max_date#94017840]
+- *(2) BroadcastHashJoin [cap_description#94144890], [description#93880533], Inner, BuildRight, false
:- *(2) Project [fractile#94017829, cap#94017830 AS cap_description#94144890, size#94017831, value#94017832, growth#94017833, leverage#94017834, volatility#94017835, momentum#94017836, yield#94017837, ##94017838, min_date#94017839, max_date#94017840]
: +- *(2) Filter ((isnotnull(fractile#94017829) AND NOT (fractile#94017829 = -1)) AND isnotnull(cap#94017830))
: +- InMemoryTableScan [##94017838, cap#94017830, fractile#94017829, growth#94017833, leverage#94017834, max_date#94017840, min_date#94017839, mo...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:49 | 44 ms |
10/10
| 1237.0 B | 1237.0 B | ||
511811 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((fractile#94017781 = NA) OR (fractile#94017781 = null)) THEN null ELSE cast(fractile#94017781 as int) END AS fractile#94017829, CASE WHEN (cap#94017782 = null) THEN null ELSE cap#94017782 END AS cap#94017830, CASE WHEN ((size#94017783 = NA) OR (size#94017783 = null)) THEN null ELSE cast(size#94017783 as float) END AS size#94017831, CASE WHEN ((value#94017784 = NA) OR (value#94017784 = null)) THEN null ELSE cast(value#94017784 as float) END AS value#94017832, CASE WHEN ((growth#94017785 = NA) OR (growth#94017785 = null)) THEN null ELSE cast(growth#94017785 as float) END AS growth#94017833, CASE WHEN ((leverage#94017786 = NA) OR (leverage#94017786 = null)) THEN null ELSE cast(leverage#94017786 as float) END AS leverage#94017834, CASE WHEN ((volatility#94017787 = NA) OR (volatility#94017787 = null)) THEN null ELSE cast(volatility#94017787 as float) END AS volatility#94017835, CASE WHEN ((momentum#94017788 = NA) OR (momentum#94017788 = null)) THEN null ELSE cast(momentum#94017788 as fl...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:49 | 10 ms |
1/1
| 3.0 KiB | 1237.0 B | ||
511810 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((fractile#94017781 = NA) OR (fractile#94017781 = null)) THEN null ELSE cast(fractile#94017781 as int) END AS fractile#94017829, CASE WHEN (cap#94017782 = null) THEN null ELSE cap#94017782 END AS cap#94017830, CASE WHEN ((size#94017783 = NA) OR (size#94017783 = null)) THEN null ELSE cast(size#94017783 as float) END AS size#94017831, CASE WHEN ((value#94017784 = NA) OR (value#94017784 = null)) THEN null ELSE cast(value#94017784 as float) END AS value#94017832, CASE WHEN ((growth#94017785 = NA) OR (growth#94017785 = null)) THEN null ELSE cast(growth#94017785 as float) END AS growth#94017833, CASE WHEN ((leverage#94017786 = NA) OR (leverage#94017786 = null)) THEN null ELSE cast(leverage#94017786 as float) END AS leverage#94017834, CASE WHEN ((volatility#94017787 = NA) OR (volatility#94017787 = null)) THEN null ELSE cast(volatility#94017787 as float) END AS volatility#94017835, CASE WHEN ((momentum#94017788 = NA) OR (momentum#94017788 = null)) THEN null ELSE cast(momentum#94017788 as fl...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:49 | 11 ms |
1/1
| 3.0 KiB | |||
511809 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((overall#94014066 = NA) OR (overall#94014066 = null)) THEN null ELSE cast(overall#94014066 as int) END AS overall#94014252, CASE WHEN ((cap#94014067 = NA) OR (cap#94014067 = null)) THEN null ELSE cast(cap#94014067 as float) END AS cap#94014254, CASE WHEN ((retIC#94014068 = NA) OR (retIC#94014068 = null)) THEN null ELSE cast(retIC#94014068 as float) END AS retIC#94014256, CASE WHEN ((resretIC#94014069 = NA) OR (resretIC#94014069 = null)) THEN null ELSE cast(resretIC#94014069 as float) END AS resretIC#94014258, CASE WHEN ((size#94014070 = NA) OR (size#94014070 = null)) THEN null ELSE cast(size#94014070 as float) END AS size#94014260, CASE WHEN ((value#94014071 = NA) OR (value#94014071 = null)) THEN null ELSE cast(value#94014071 as float) END AS value#94014263, CASE WHEN ((growth#94014072 = NA) OR (growth#94014072 = null)) THEN null ELSE cast(growth#94014072 as float) END AS growth#94014265, CASE WHEN ((leverage#94014073 = NA) OR (leverage#94014073 = null)) THEN null ELSE cast(leverag...*(1) Project [yield#94014273, volatility#94014269, momentum#94014271, size#94014260, value#94014263, growth#94014265, leverage#94014267]
+- *(1) Filter (isnotnull(cap#94014254) AND (cap#94014254 = 3.0))
+- InMemoryTableScan [cap#94014254, growth#94014265, leverage#94014267, momentum#94014271, size#94014260, value#94014263, volatility#94014269, yield#94014273], [isnotnull(cap#94014254), (cap#94014254 = 3.0)]
+- InMemoryRelation [overall#94014252, cap#94014254, retIC#94014256, resretIC#94014258, size#94014260, value#94014263, growth#94014265, leverage#94014267, volatility#94014269, momentum#94014271, yield#94014273, numcos#94014276, numdates#94014278, annual_bmret#94014280, annual_ret#94014282, std_ret#94014284, Sharpe_ret#94014286, PctPos_ret#94014288, TR_ret#94014290, IR_ret#94014291, annual_resret#94014293, std_resret#94014294, Sharpe_resret#94014295, PctPos_resret#94014296, ... 9 more fields], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN ((ove...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:48 | 58 ms |
1/1
| 3.0 KiB | |||
511808 | default | broadcast exchange (runId 49b027cb-804f-4eff-88f2-ad3da33db1bd) $anonfun$withThreadLocalCaptured$1 at FutureTask.java:264
RDD: *(1) Project [CASE WHEN ((cap#93880496 = NA) OR (cap#93880496 = null)) THEN null ELSE cast(cap#93880496 as int) END AS cap#93880528, CASE WHEN (sort#93880498 = null) THEN null ELSE sort#93880498 END AS sort#93880531, CASE WHEN (description#93880500 = null) THEN null ELSE description#93880500 END AS description#93880533, CASE WHEN ((universe#93880502 = NA) OR (universe#93880502 = null)) THEN null ELSE cast(universe#93880502 as int) END AS universe#93880535]
+- FileScan csv [cap#93880496,sort#93880498,description#93880500,universe#93880502] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/curate/curate_cap.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<cap:string,sort:string,description:string,universe:string>
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$1(SQLExecution.scala:185) java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:47 | 58 ms |
1/1
| 624.0 B | |||
511807 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((fractile#94005058 = NA) OR (fractile#94005058 = null)) THEN null ELSE cast(fractile#94005058 as float) END AS fractile#94005101, CASE WHEN ((cap#94005059 = NA) OR (cap#94005059 = null)) THEN null ELSE cast(cap#94005059 as float) END AS cap#94005102, CASE WHEN ((annual_ret#94005060 = NA) OR (annual_ret#94005060 = null)) THEN null ELSE cast(annual_ret#94005060 as float) END AS annual_ret#94005103, CASE WHEN ((annual_resret#94005061 = NA) OR (annual_resret#94005061 = null)) THEN null ELSE cast(annual_resret#94005061 as float) END AS annual_resret#94005104, CASE WHEN ((numcos#94005062 = NA) OR (numcos#94005062 = null)) THEN null ELSE cast(numcos#94005062 as float) END AS numcos#94005105]
+- FileScan csv [fractile#94005058,cap#94005059,annual_ret#94005060,annual_resret#94005061,numcos#94005062] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/output/rankviews_history/rank..., PartitionFilters: [], PushedFilters:...*(1) Project [fractile#94005101 AS fractile#94005106, annual_ret#94005103 AS annual_ret#94005109, annual_resret#94005104 AS annual_resret#94005115, numcos#94005105 AS numcos#94005116]
+- *(1) Filter (isnotnull(cap#94005102) AND (cast(cap#94005102 as string) = 3))
+- *(1) ColumnarToRow
+- InMemoryTableScan [annual_resret#94005104, annual_ret#94005103, cap#94005102, fractile#94005101, numcos#94005105], [isnotnull(cap#94005102), (cast(cap#94005102 as string) = 3)]
+- InMemoryRelation [fractile#94005101, cap#94005102, annual_ret#94005103, annual_resret#94005104, numcos#94005105], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN ((fractile#94005058 = NA) OR (fractile#94005058 = null)) THEN null ELSE cast(fractile#94005058 as float) END AS fractile#94005101, CASE WHEN ((cap#94005059 = NA) OR (cap#94005059 = null)) THEN null ELSE cast(cap#94005059 as float) END AS cap#94005102, CASE WHEN ((annual_ret#94005060 = NA) OR (annual_ret#94005060 = nul...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 60 ms |
1/1
| 1160.0 B | |||
511806 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (group#93875754 = null) THEN null ELSE group#93875754 END AS group#93875766, CASE WHEN (Category#93875755 = null) THEN null ELSE Category#93875755 END AS Category#93875767, CASE WHEN (Label#93875756 = null) THEN null ELSE Label#93875756 END AS Label#93875768]
+- FileScan csv [group#93875754,Category#93875755,Label#93875756] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/common/map_stats.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<group:string,Category:string,Label:string>
*(2) Project [Category#93875767, Label#93875768, annual_ret#94006310, Sharpe_ret#94006312, PctPos_ret#94006313, annual_resret#94006316, Sharpe_resret#94006318, PctPos_resret#94006319, annual_retnet#94006322, Sharpe_retnet#94006324, PctPos_retnet#94006325]
+- *(2) BroadcastHashJoin [group#94006308], [group#93875766], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(input[0, string, false]),false), [id=#7505834]
: +- *(1) Filter isnotnull(group#94006308)
: +- InMemoryTableScan [group#94006308, annual_ret#94006310, Sharpe_ret#94006312, PctPos_ret#94006313, annual_resret#94006316, Sharpe_resret#94006318, PctPos_resret#94006319, annual_retnet#94006322, Sharpe_retnet#94006324, PctPos_retnet#94006325], [isnotnull(group#94006308)]
: +- InMemoryRelation [group#94006308, annual_bmret#94006309, annual_ret#94006310, std_ret#94006311, Sharpe_ret#94006312, PctPos_ret#94006313, TR_ret#94006314, IR_ret#94006315, annual_resret#94006316, std_resret#94006317, Sharpe_res...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 13 ms |
1/1
| 4.4 KiB | |||
511805 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:49 | 68 ms |
1/1
| 42.6 KiB | |||
511804 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7505747]
+- InMemoryTableScan [date#94006072, drawdown_gross#94006078, drawdown_net#94006085]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 0.5 s |
200/200
| 96.9 KiB | 42.6 KiB | ||
511802 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:48 | 48 ms |
1/1
| 28.9 KiB | |||
511801 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94005402 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94005402 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7505769]
+- *(1) Project [date#94005402, (drawdown_length_gross#94005420 * -1) AS drawdown_length_gross#94005473]
+- InMemoryTableScan [date#94005402, drawdown_length_gross#94005420]
+- InMemoryRelation [date#94005402, allret#94005410, allcum#94005415, drawdown_gross#94005419, drawdown_length_gross#94005420, allretnet#94005421, allcumnet#94005422, drawdown_net#94005423, drawdown_length_net#94005424, numcos#94005425, largecum#94005427, largecumnet#94005429, largeret#94005431, largeretnet#94005433, midcum#94005435, midcumnet#94005439, midret#94005440, midretnet#94005443, smallcum#94005445, smallcumnet#94005446, smallret#94005449, smallretnet#94005450], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN (...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 2 s |
200/200
| 66.3 KiB | 28.9 KiB | ||
511799 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:48 | 32 ms |
1/1
| 47.5 KiB | |||
511798 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7516472]
+- *(1) Project [date#94006072, smallret#94006114 AS daily_gross#94144989, smallretnet#94006116 AS daily_net#94144990]
+- InMemoryTableScan [date#94006072, smallret#94006114, smallretnet#94006116]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS dat...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 0.8 s |
200/200
| 47.5 KiB | 47.5 KiB | ||
511797 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 98 ms |
1/1
| 155.6 KiB | 47.5 KiB | ||
511796 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:47 | 40 ms |
1/1
| 42.3 KiB | |||
511795 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7516417]
+- *(1) Project [date#94006072, smallcum#94006109 AS cum_gross#94144955, smallcumnet#94006111 AS cum_net#94144956]
+- InMemoryTableScan [date#94006072, smallcum#94006109, smallcumnet#94006111]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 1 s |
200/200
| 42.3 KiB | 42.3 KiB | ||
511794 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 90 ms |
1/1
| 155.6 KiB | 42.3 KiB | ||
511793 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94010641 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94010641 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7516445]
+- *(1) Project [date#94010643 AS date#94010641, numcos#94010669 AS numcos#94010663]
+- *(1) Filter (isnotnull(cap#94010646) AND (cast(cap#94010646 as string) = 3))
+- InMemoryTableScan [cap#94010646, date#94010643, numcos#94010669], [isnotnull(cap#94010646), (cast(cap#94010646 as string) = 3)]
+- InMemoryRelation [date#94010643, cap#94010646, ret#94010650, resret#94010656, retnet#94010660, turnover#94010664, numcos#94010669, coverage#94010673, benchmark#94010677, excess_ret#94010681, excess_resret#94010683, excess_retnet#94010698], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94010359 = null) THEN null ELSE cast(date#94010359 as date) END AS date#94010643, CASE WHEN ((cap#94010360 = NA) OR (cap#94010360 = null)) THEN null ELSE cast(cap#94010360 as float) END AS ...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 22 ms |
1/1
| ||||
511792 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94010359 = null) THEN null ELSE cast(date#94010359 as date) END AS date#94010643, CASE WHEN ((cap#94010360 = NA) OR (cap#94010360 = null)) THEN null ELSE cast(cap#94010360 as float) END AS cap#94010646, CASE WHEN ((ret#94010361 = NA) OR (ret#94010361 = null)) THEN null ELSE cast(ret#94010361 as float) END AS ret#94010650, CASE WHEN ((resret#94010362 = NA) OR (resret#94010362 = null)) THEN null ELSE cast(resret#94010362 as float) END AS resret#94010656, CASE WHEN ((retnet#94010363 = NA) OR (retnet#94010363 = null)) THEN null ELSE cast(retnet#94010363 as float) END AS retnet#94010660, CASE WHEN ((turnover#94010364 = NA) OR (turnover#94010364 = null)) THEN null ELSE cast(turnover#94010364 as float) END AS turnover#94010664, CASE WHEN ((numcos#94010365 = NA) OR (numcos#94010365 = null)) THEN null ELSE cast(numcos#94010365 as float) END AS numcos#94010669, CASE WHEN ((coverage#94010366 = NA) OR (coverage#94010366 = null)) THEN null ELSE cast(coverage#94010366 as float) END AS cover...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 16 ms |
1/1
| 262.7 KiB | |||
511791 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 18 ms |
1/1
| 155.6 KiB | |||
511790 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 13 ms |
1/1
| 155.6 KiB | |||
511789 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 21 ms |
1/1
| 1313.0 B | |||
511788 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [sort#93880530 ASC NULLS FIRST, description#93880532 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(sort#93880530 ASC NULLS FIRST, description#93880532 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506039]
+- *(2) Project [sector_id#94010645, numcos#94010654, numdates#94010659, sort#93880530, description#93880532, universe#94011004, coverage#94010916]
+- *(2) BroadcastHashJoin [sector_id#94010645], [sector_id#93880529], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)),false), [id=#7506032]
: +- *(1) Project [sector_id#94010645, numcos#94010654, numdates#94010659, coverage#94010916, round((cast(numcos#94010654 as double) / cast(coverage#94010916 as double)), 0) AS universe#94011004]
: +- *(1) Filter isnotnull(sector_id#94010645)
: +- InMemoryTableScan [coverage#94010916, numcos#94010654, numdates#94010659, sector_id#94010645], [isnotnull(sector_id#94010645)]
...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 58 ms |
12/12
| 10.6 KiB | 1313.0 B | ||
511786 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94010359 = null) THEN null ELSE cast(date#94010359 as date) END AS date#94010643, CASE WHEN ((cap#94010360 = NA) OR (cap#94010360 = null)) THEN null ELSE cast(cap#94010360 as float) END AS cap#94010646, CASE WHEN ((ret#94010361 = NA) OR (ret#94010361 = null)) THEN null ELSE cast(ret#94010361 as float) END AS ret#94010650, CASE WHEN ((resret#94010362 = NA) OR (resret#94010362 = null)) THEN null ELSE cast(resret#94010362 as float) END AS resret#94010656, CASE WHEN ((retnet#94010363 = NA) OR (retnet#94010363 = null)) THEN null ELSE cast(retnet#94010363 as float) END AS retnet#94010660, CASE WHEN ((turnover#94010364 = NA) OR (turnover#94010364 = null)) THEN null ELSE cast(turnover#94010364 as float) END AS turnover#94010664, CASE WHEN ((numcos#94010365 = NA) OR (numcos#94010365 = null)) THEN null ELSE cast(numcos#94010365 as float) END AS numcos#94010669, CASE WHEN ((coverage#94010366 = NA) OR (coverage#94010366 = null)) THEN null ELSE cast(coverage#94010366 as float) END AS cover...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 11 ms |
1/1
| 262.7 KiB | |||
511785 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((cap#94010439 = NA) OR (cap#94010439 = null)) THEN null ELSE cast(cap#94010439 as float) END AS cap#94010644, CASE WHEN ((retIC#94010440 = NA) OR (retIC#94010440 = null)) THEN null ELSE cast(retIC#94010440 as float) END AS retIC#94010648, CASE WHEN ((resretIC#94010441 = NA) OR (resretIC#94010441 = null)) THEN null ELSE cast(resretIC#94010441 as float) END AS resretIC#94010653, CASE WHEN ((numcos#94010442 = NA) OR (numcos#94010442 = null)) THEN null ELSE cast(numcos#94010442 as float) END AS numcos#94010657, CASE WHEN ((numdates#94010443 = NA) OR (numdates#94010443 = null)) THEN null ELSE cast(numdates#94010443 as int) END AS numdates#94010661, CASE WHEN (annual_bmret#94010444 = null) THEN null ELSE annual_bmret#94010444 END AS annual_bmret#94010666, CASE WHEN ((annual_ret#94010445 = NA) OR (annual_ret#94010445 = null)) THEN null ELSE cast(annual_ret#94010445 as float) END AS annual_ret#94010671, CASE WHEN ((std_ret#94010446 = NA) OR (std_ret#94010446 = null)) THEN null ELSE cast(st...*(2) Project [ret_large#94013861, ret_small#94013891, (ret_large#94013861 - ret_small#94013891) AS differential#94013977]
+- *(2) BroadcastNestedLoopJoin BuildRight, Cross
:- *(2) Project [annual_ret#94010671 AS ret_large#94013861]
: +- *(2) Filter (isnotnull(cap#94010644) AND (cap#94010644 = 1.0))
: +- InMemoryTableScan [annual_ret#94010671, cap#94010644], [isnotnull(cap#94010644), (cap#94010644 = 1.0)]
: +- InMemoryRelation [cap#94010644, retIC#94010648, resretIC#94010653, numcos#94010657, numdates#94010661, annual_bmret#94010666, annual_ret#94010671, std_ret#94010676, Sharpe_ret#94010680, PctPos_ret#94010684, TR_ret#94010696, IR_ret#94010697, annual_resret#94010699, std_resret#94010700, Sharpe_resret#94010701, PctPos_resret#94010702, TR_resret#94010703, IR_resret#94010704, annual_retnet#94010705, std_retnet#94010706, Sharpe_retnet#94010707, PctPos_retnet#94010720, TR_retnet#94010722, IR_retnet#94010723, ... 2 more fields], StorageLevel(disk, memory, deserialized, 1 replicas)
...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:46 | 11 ms |
1/1
| 424.0 B | |||
511784 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 14 ms |
1/1
| 318.0 B | |||
511783 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [sort#93880531 ASC NULLS FIRST, description#93880533 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(sort#93880531 ASC NULLS FIRST, description#93880533 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506102]
+- *(2) Project [cap#94010644, numcos#94010657, numdates#94010661, sort#93880531, description#93880533, universe#93880535, (cast(numcos#94010657 as double) / cast(universe#93880535 as double)) AS coverage#94010966]
+- *(2) BroadcastHashJoin [knownfloatingpointnormalized(normalizenanandzero(cap#94010644))], [knownfloatingpointnormalized(normalizenanandzero(cast(cap#93880528 as float)))], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(knownfloatingpointnormalized(normalizenanandzero(input[0, float, false]))),false), [id=#7506095]
: +- *(1) Filter isnotnull(cap#94010644)
: +- InMemoryTableScan [cap#94010644, numcos#94010657, numdates#94010661], [isnotnull(cap#94010644)]
: +- InMemoryRelation [ca...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:46 | 18 ms |
3/3
| 2.7 KiB | 318.0 B | ||
511781 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:46 | 32 ms |
1/1
| 154.0 B | |||
511780 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Project [cap#94013946, turnover#94010724, (1.0 / cast(turnover#94010724 as double)) AS days_hold#94013974]
+- *(3) Sort [cap_sort#94013857 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(cap_sort#94013857 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506325]
+- *(2) Project [turnover#94010724, cap_description#94013856 AS cap#94013946, cap_sort#94013857]
+- *(2) BroadcastHashJoin [knownfloatingpointnormalized(normalizenanandzero(cap#94010644))], [knownfloatingpointnormalized(normalizenanandzero(cast(cap#93880528 as float)))], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(knownfloatingpointnormalized(normalizenanandzero(input[0, float, false]))),false), [id=#7506317]
: +- *(1) Filter isnotnull(cap#94010644)
: +- InMemoryTableScan [cap#94010644, turnover#94010724], [isnotnull(cap#94010644)]
: +- InMemoryRelation [cap#94010644, retIC#94010648, resretIC#94010653, numcos#94010657, num...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:46 | 16 ms |
2/2
| 656.0 B | 154.0 B | ||
511778 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((turnover#94014098 = NA) OR (turnover#94014098 = null)) THEN null ELSE cast(turnover#94014098 as float) END AS turnover#94014305, CASE WHEN ((turnover#94014098 = NA) OR (turnover#94014098 = null)) THEN null ELSE (1.0 / cast(cast(turnover#94014098 as float) as double)) END AS days_hold#94014368]
+- *(1) Filter ((isnotnull(cap#94014067) AND NOT coalesce(((cap#94014067 = NA) OR (cap#94014067 = null)), false)) AND (cast(cap#94014067 as float) = 0.0))
+- FileScan csv [cap#94014067,turnover#94014098] Batched: false, DataFilters: [isnotnull(cap#94014067), NOT coalesce(((cap#94014067 = NA) OR (cap#94014067 = null)), false), (c..., Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/output/rankviews_history/rank..., PartitionFilters: [], PushedFilters: [IsNotNull(cap)], ReadSchema: struct<cap:string,turnover:string>
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:46 | 7 ms |
1/1
| 368.0 B | |||
511777 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:46 | 33 ms |
1/1
| 647.0 B | |||
511776 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [year#94014248 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(year#94014248 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506265]
+- *(1) Project [year#94014248, turnover#94014292, (1.0 / cast(turnover#94014292 as double)) AS days_hold#94014331]
+- *(1) Filter isnotnull(turnover#94014292)
+- InMemoryTableScan [turnover#94014292, year#94014248], [isnotnull(turnover#94014292)]
+- InMemoryRelation [year#94014248, retIC#94014249, resretIC#94014250, numcos#94014251, numdates#94014253, annual_bmret#94014255, annual_ret#94014257, std_ret#94014259, Sharpe_ret#94014261, PctPos_ret#94014262, TR_ret#94014264, IR_ret#94014266, annual_resret#94014268, std_resret#94014270, Sharpe_resret#94014272, PctPos_resret#94014274, TR_resret#94014275, IR_resret#94014277, annual_retnet#94014279, std_retnet#94014281, Sharpe_retnet#94014283, PctPos_retnet#94014285, TR_retnet#94014287, IR_retnet#94014289, turnover#94014292], StorageLevel(disk, memory, deserialized, 1 replicas)
...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:46 | 33 ms |
9/9
| 4.1 KiB | 647.0 B | ||
511774 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (group#93875754 = null) THEN null ELSE group#93875754 END AS group#93875766, CASE WHEN (Category#93875755 = null) THEN null ELSE Category#93875755 END AS Category#93875767, CASE WHEN (Label#93875756 = null) THEN null ELSE Label#93875756 END AS Label#93875768]
+- FileScan csv [group#93875754,Category#93875755,Label#93875756] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/common/map_stats.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<group:string,Category:string,Label:string>
*(2) Project [Category#93875767, Label#93875768, annual_ret#94006310, Sharpe_ret#94006312, PctPos_ret#94006313, annual_resret#94006316, Sharpe_resret#94006318, PctPos_resret#94006319, annual_retnet#94006322, Sharpe_retnet#94006324, PctPos_retnet#94006325]
+- *(2) BroadcastHashJoin [group#94006308], [group#93875766], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(input[0, string, false]),false), [id=#7505834]
: +- *(1) Filter isnotnull(group#94006308)
: +- InMemoryTableScan [group#94006308, annual_ret#94006310, Sharpe_ret#94006312, PctPos_ret#94006313, annual_resret#94006316, Sharpe_resret#94006318, PctPos_resret#94006319, annual_retnet#94006322, Sharpe_retnet#94006324, PctPos_retnet#94006325], [isnotnull(group#94006308)]
: +- InMemoryRelation [group#94006308, annual_bmret#94006309, annual_ret#94006310, std_ret#94006311, Sharpe_ret#94006312, PctPos_ret#94006313, TR_ret#94006314, IR_ret#94006315, annual_resret#94006316, std_resret#94006317, Sharpe_res...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:43 | 32 ms |
1/1
| 4.4 KiB | |||
511773 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:44 | 19 ms |
1/1
| 47.6 KiB | |||
511772 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7516276]
+- *(1) Project [date#94006072, midret#94006104 AS daily_gross#94140035, midretnet#94006107 AS daily_net#94140036]
+- InMemoryTableScan [date#94006072, midret#94006104, midretnet#94006107]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#940053...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:44 | 0.7 s |
200/200
| 47.6 KiB | 47.6 KiB | ||
511771 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:42 | 49 ms |
1/1
| 155.6 KiB | 47.6 KiB | ||
511770 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((fractile#94005058 = NA) OR (fractile#94005058 = null)) THEN null ELSE cast(fractile#94005058 as float) END AS fractile#94005101, CASE WHEN ((cap#94005059 = NA) OR (cap#94005059 = null)) THEN null ELSE cast(cap#94005059 as float) END AS cap#94005102, CASE WHEN ((annual_ret#94005060 = NA) OR (annual_ret#94005060 = null)) THEN null ELSE cast(annual_ret#94005060 as float) END AS annual_ret#94005103, CASE WHEN ((annual_resret#94005061 = NA) OR (annual_resret#94005061 = null)) THEN null ELSE cast(annual_resret#94005061 as float) END AS annual_resret#94005104, CASE WHEN ((numcos#94005062 = NA) OR (numcos#94005062 = null)) THEN null ELSE cast(numcos#94005062 as float) END AS numcos#94005105]
+- FileScan csv [fractile#94005058,cap#94005059,annual_ret#94005060,annual_resret#94005061,numcos#94005062] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/output/rankviews_history/rank..., PartitionFilters: [], PushedFilters:...*(1) Project [fractile#94005101 AS fractile#94005106, annual_ret#94005103 AS annual_ret#94005109, annual_resret#94005104 AS annual_resret#94005115, numcos#94005105 AS numcos#94005116]
+- *(1) Filter (isnotnull(cap#94005102) AND (cast(cap#94005102 as string) = 2))
+- *(1) ColumnarToRow
+- InMemoryTableScan [annual_resret#94005104, annual_ret#94005103, cap#94005102, fractile#94005101, numcos#94005105], [isnotnull(cap#94005102), (cast(cap#94005102 as string) = 2)]
+- InMemoryRelation [fractile#94005101, cap#94005102, annual_ret#94005103, annual_resret#94005104, numcos#94005105], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN ((fractile#94005058 = NA) OR (fractile#94005058 = null)) THEN null ELSE cast(fractile#94005058 as float) END AS fractile#94005101, CASE WHEN ((cap#94005059 = NA) OR (cap#94005059 = null)) THEN null ELSE cast(cap#94005059 as float) END AS cap#94005102, CASE WHEN ((annual_ret#94005060 = NA) OR (annual_ret#94005060 = nul...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:42 | 37 ms |
1/1
| 1160.0 B | |||
511769 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:44 | 44 ms |
1/1
| 28.9 KiB | |||
511768 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94005402 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94005402 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7505769]
+- *(1) Project [date#94005402, (drawdown_length_gross#94005420 * -1) AS drawdown_length_gross#94005473]
+- InMemoryTableScan [date#94005402, drawdown_length_gross#94005420]
+- InMemoryRelation [date#94005402, allret#94005410, allcum#94005415, drawdown_gross#94005419, drawdown_length_gross#94005420, allretnet#94005421, allcumnet#94005422, drawdown_net#94005423, drawdown_length_net#94005424, numcos#94005425, largecum#94005427, largecumnet#94005429, largeret#94005431, largeretnet#94005433, midcum#94005435, midcumnet#94005439, midret#94005440, midretnet#94005443, smallcum#94005445, smallcumnet#94005446, smallret#94005449, smallretnet#94005450], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN (...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:42 | 0.6 s |
200/200
| 66.3 KiB | 28.9 KiB | ||
511766 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:43 | 27 ms |
1/1
| 1236.0 B | |||
511765 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [fractile#94017829 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(fractile#94017829 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7516232]
+- *(2) Project [fractile#94017829, size#94017831, value#94017832, growth#94017833, leverage#94017834, volatility#94017835, momentum#94017836, yield#94017837, ##94017838, min_date#94017839, max_date#94017840]
+- *(2) BroadcastHashJoin [cap_description#94140226], [description#93880533], Inner, BuildRight, false
:- *(2) Project [fractile#94017829, cap#94017830 AS cap_description#94140226, size#94017831, value#94017832, growth#94017833, leverage#94017834, volatility#94017835, momentum#94017836, yield#94017837, ##94017838, min_date#94017839, max_date#94017840]
: +- *(2) Filter ((isnotnull(fractile#94017829) AND NOT (fractile#94017829 = -1)) AND isnotnull(cap#94017830))
: +- InMemoryTableScan [##94017838, cap#94017830, fractile#94017829, growth#94017833, leverage#94017834, max_date#94017840, min_date#94017839, mo...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:43 | 59 ms |
10/10
| 1236.0 B | 1236.0 B | ||
511764 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((fractile#94017781 = NA) OR (fractile#94017781 = null)) THEN null ELSE cast(fractile#94017781 as int) END AS fractile#94017829, CASE WHEN (cap#94017782 = null) THEN null ELSE cap#94017782 END AS cap#94017830, CASE WHEN ((size#94017783 = NA) OR (size#94017783 = null)) THEN null ELSE cast(size#94017783 as float) END AS size#94017831, CASE WHEN ((value#94017784 = NA) OR (value#94017784 = null)) THEN null ELSE cast(value#94017784 as float) END AS value#94017832, CASE WHEN ((growth#94017785 = NA) OR (growth#94017785 = null)) THEN null ELSE cast(growth#94017785 as float) END AS growth#94017833, CASE WHEN ((leverage#94017786 = NA) OR (leverage#94017786 = null)) THEN null ELSE cast(leverage#94017786 as float) END AS leverage#94017834, CASE WHEN ((volatility#94017787 = NA) OR (volatility#94017787 = null)) THEN null ELSE cast(volatility#94017787 as float) END AS volatility#94017835, CASE WHEN ((momentum#94017788 = NA) OR (momentum#94017788 = null)) THEN null ELSE cast(momentum#94017788 as fl...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 14 ms |
1/1
| 3.0 KiB | 1236.0 B | ||
511763 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:43 | 49 ms |
1/1
| 42.6 KiB | |||
511762 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7505747]
+- InMemoryTableScan [date#94006072, drawdown_gross#94006078, drawdown_net#94006085]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 2 s |
200/200
| 96.9 KiB | 42.6 KiB | ||
511760 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:43 | 52 ms |
1/1
| 42.2 KiB | |||
511759 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7516120]
+- *(1) Project [date#94006072, midcum#94006098 AS cum_gross#94140030, midcumnet#94006101 AS cum_net#94140031]
+- InMemoryTableScan [date#94006072, midcum#94006098, midcumnet#94006101]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, ...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:42 | 1 s |
200/200
| 42.2 KiB | 42.2 KiB | ||
511758 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 68 ms |
1/1
| 155.6 KiB | 42.2 KiB | ||
511757 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 20 ms |
1/1
| 155.6 KiB | |||
511756 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((fractile#94017781 = NA) OR (fractile#94017781 = null)) THEN null ELSE cast(fractile#94017781 as int) END AS fractile#94017829, CASE WHEN (cap#94017782 = null) THEN null ELSE cap#94017782 END AS cap#94017830, CASE WHEN ((size#94017783 = NA) OR (size#94017783 = null)) THEN null ELSE cast(size#94017783 as float) END AS size#94017831, CASE WHEN ((value#94017784 = NA) OR (value#94017784 = null)) THEN null ELSE cast(value#94017784 as float) END AS value#94017832, CASE WHEN ((growth#94017785 = NA) OR (growth#94017785 = null)) THEN null ELSE cast(growth#94017785 as float) END AS growth#94017833, CASE WHEN ((leverage#94017786 = NA) OR (leverage#94017786 = null)) THEN null ELSE cast(leverage#94017786 as float) END AS leverage#94017834, CASE WHEN ((volatility#94017787 = NA) OR (volatility#94017787 = null)) THEN null ELSE cast(volatility#94017787 as float) END AS volatility#94017835, CASE WHEN ((momentum#94017788 = NA) OR (momentum#94017788 = null)) THEN null ELSE cast(momentum#94017788 as fl...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 18 ms |
1/1
| 3.0 KiB | |||
511755 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 16 ms |
1/1
| 155.6 KiB | |||
511754 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((overall#94014066 = NA) OR (overall#94014066 = null)) THEN null ELSE cast(overall#94014066 as int) END AS overall#94014252, CASE WHEN ((cap#94014067 = NA) OR (cap#94014067 = null)) THEN null ELSE cast(cap#94014067 as float) END AS cap#94014254, CASE WHEN ((retIC#94014068 = NA) OR (retIC#94014068 = null)) THEN null ELSE cast(retIC#94014068 as float) END AS retIC#94014256, CASE WHEN ((resretIC#94014069 = NA) OR (resretIC#94014069 = null)) THEN null ELSE cast(resretIC#94014069 as float) END AS resretIC#94014258, CASE WHEN ((size#94014070 = NA) OR (size#94014070 = null)) THEN null ELSE cast(size#94014070 as float) END AS size#94014260, CASE WHEN ((value#94014071 = NA) OR (value#94014071 = null)) THEN null ELSE cast(value#94014071 as float) END AS value#94014263, CASE WHEN ((growth#94014072 = NA) OR (growth#94014072 = null)) THEN null ELSE cast(growth#94014072 as float) END AS growth#94014265, CASE WHEN ((leverage#94014073 = NA) OR (leverage#94014073 = null)) THEN null ELSE cast(leverag...*(1) Project [yield#94014273, volatility#94014269, momentum#94014271, size#94014260, value#94014263, growth#94014265, leverage#94014267]
+- *(1) Filter (isnotnull(cap#94014254) AND (cap#94014254 = 2.0))
+- InMemoryTableScan [cap#94014254, growth#94014265, leverage#94014267, momentum#94014271, size#94014260, value#94014263, volatility#94014269, yield#94014273], [isnotnull(cap#94014254), (cap#94014254 = 2.0)]
+- InMemoryRelation [overall#94014252, cap#94014254, retIC#94014256, resretIC#94014258, size#94014260, value#94014263, growth#94014265, leverage#94014267, volatility#94014269, momentum#94014271, yield#94014273, numcos#94014276, numdates#94014278, annual_bmret#94014280, annual_ret#94014282, std_ret#94014284, Sharpe_ret#94014286, PctPos_ret#94014288, TR_ret#94014290, IR_ret#94014291, annual_resret#94014293, std_resret#94014294, Sharpe_resret#94014295, PctPos_resret#94014296, ... 9 more fields], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN ((ove...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 32 ms |
1/1
| 3.0 KiB | |||
511753 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 16 ms |
1/1
| 1313.0 B | |||
511752 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [sort#93880530 ASC NULLS FIRST, description#93880532 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(sort#93880530 ASC NULLS FIRST, description#93880532 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506039]
+- *(2) Project [sector_id#94010645, numcos#94010654, numdates#94010659, sort#93880530, description#93880532, universe#94011004, coverage#94010916]
+- *(2) BroadcastHashJoin [sector_id#94010645], [sector_id#93880529], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)),false), [id=#7506032]
: +- *(1) Project [sector_id#94010645, numcos#94010654, numdates#94010659, coverage#94010916, round((cast(numcos#94010654 as double) / cast(coverage#94010916 as double)), 0) AS universe#94011004]
: +- *(1) Filter isnotnull(sector_id#94010645)
: +- InMemoryTableScan [coverage#94010916, numcos#94010654, numdates#94010659, sector_id#94010645], [isnotnull(sector_id#94010645)]
...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 55 ms |
12/12
| 10.6 KiB | 1313.0 B | ||
511750 | default | broadcast exchange (runId 43879a86-edea-4b24-808b-7a27fff026e0) $anonfun$withThreadLocalCaptured$1 at FutureTask.java:264
RDD: *(1) Project [CASE WHEN ((cap#93880496 = NA) OR (cap#93880496 = null)) THEN null ELSE cast(cap#93880496 as int) END AS cap#93880528, CASE WHEN (sort#93880498 = null) THEN null ELSE sort#93880498 END AS sort#93880531, CASE WHEN (description#93880500 = null) THEN null ELSE description#93880500 END AS description#93880533, CASE WHEN ((universe#93880502 = NA) OR (universe#93880502 = null)) THEN null ELSE cast(universe#93880502 as int) END AS universe#93880535]
+- FileScan csv [cap#93880496,sort#93880498,description#93880500,universe#93880502] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/curate/curate_cap.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<cap:string,sort:string,description:string,universe:string>
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$1(SQLExecution.scala:185) java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 18 ms |
1/1
| 624.0 B | |||
511749 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94010641 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94010641 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7516160]
+- *(1) Project [date#94010643 AS date#94010641, numcos#94010669 AS numcos#94010663]
+- *(1) Filter (isnotnull(cap#94010646) AND (cast(cap#94010646 as string) = 2))
+- InMemoryTableScan [cap#94010646, date#94010643, numcos#94010669], [isnotnull(cap#94010646), (cast(cap#94010646 as string) = 2)]
+- InMemoryRelation [date#94010643, cap#94010646, ret#94010650, resret#94010656, retnet#94010660, turnover#94010664, numcos#94010669, coverage#94010673, benchmark#94010677, excess_ret#94010681, excess_resret#94010683, excess_retnet#94010698], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94010359 = null) THEN null ELSE cast(date#94010359 as date) END AS date#94010643, CASE WHEN ((cap#94010360 = NA) OR (cap#94010360 = null)) THEN null ELSE cast(cap#94010360 as float) END AS ...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 26 ms |
1/1
| ||||
511748 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94010359 = null) THEN null ELSE cast(date#94010359 as date) END AS date#94010643, CASE WHEN ((cap#94010360 = NA) OR (cap#94010360 = null)) THEN null ELSE cast(cap#94010360 as float) END AS cap#94010646, CASE WHEN ((ret#94010361 = NA) OR (ret#94010361 = null)) THEN null ELSE cast(ret#94010361 as float) END AS ret#94010650, CASE WHEN ((resret#94010362 = NA) OR (resret#94010362 = null)) THEN null ELSE cast(resret#94010362 as float) END AS resret#94010656, CASE WHEN ((retnet#94010363 = NA) OR (retnet#94010363 = null)) THEN null ELSE cast(retnet#94010363 as float) END AS retnet#94010660, CASE WHEN ((turnover#94010364 = NA) OR (turnover#94010364 = null)) THEN null ELSE cast(turnover#94010364 as float) END AS turnover#94010664, CASE WHEN ((numcos#94010365 = NA) OR (numcos#94010365 = null)) THEN null ELSE cast(numcos#94010365 as float) END AS numcos#94010669, CASE WHEN ((coverage#94010366 = NA) OR (coverage#94010366 = null)) THEN null ELSE cast(coverage#94010366 as float) END AS cover...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 26 ms |
1/1
| 262.7 KiB | |||
511747 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 42 ms |
1/1
| 318.0 B | |||
511746 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [sort#93880531 ASC NULLS FIRST, description#93880533 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(sort#93880531 ASC NULLS FIRST, description#93880533 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506102]
+- *(2) Project [cap#94010644, numcos#94010657, numdates#94010661, sort#93880531, description#93880533, universe#93880535, (cast(numcos#94010657 as double) / cast(universe#93880535 as double)) AS coverage#94010966]
+- *(2) BroadcastHashJoin [knownfloatingpointnormalized(normalizenanandzero(cap#94010644))], [knownfloatingpointnormalized(normalizenanandzero(cast(cap#93880528 as float)))], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(knownfloatingpointnormalized(normalizenanandzero(input[0, float, false]))),false), [id=#7506095]
: +- *(1) Filter isnotnull(cap#94010644)
: +- InMemoryTableScan [cap#94010644, numcos#94010657, numdates#94010661], [isnotnull(cap#94010644)]
: +- InMemoryRelation [ca...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 25 ms |
3/3
| 2.7 KiB | 318.0 B | ||
511744 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:41 | 15 ms |
1/1
| 154.0 B | |||
511743 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Project [cap#94013946, turnover#94010724, (1.0 / cast(turnover#94010724 as double)) AS days_hold#94013974]
+- *(3) Sort [cap_sort#94013857 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(cap_sort#94013857 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506325]
+- *(2) Project [turnover#94010724, cap_description#94013856 AS cap#94013946, cap_sort#94013857]
+- *(2) BroadcastHashJoin [knownfloatingpointnormalized(normalizenanandzero(cap#94010644))], [knownfloatingpointnormalized(normalizenanandzero(cast(cap#93880528 as float)))], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(knownfloatingpointnormalized(normalizenanandzero(input[0, float, false]))),false), [id=#7506317]
: +- *(1) Filter isnotnull(cap#94010644)
: +- InMemoryTableScan [cap#94010644, turnover#94010724], [isnotnull(cap#94010644)]
: +- InMemoryRelation [cap#94010644, retIC#94010648, resretIC#94010653, numcos#94010657, num...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:41 | 22 ms |
2/2
| 656.0 B | 154.0 B | ||
511741 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94010359 = null) THEN null ELSE cast(date#94010359 as date) END AS date#94010643, CASE WHEN ((cap#94010360 = NA) OR (cap#94010360 = null)) THEN null ELSE cast(cap#94010360 as float) END AS cap#94010646, CASE WHEN ((ret#94010361 = NA) OR (ret#94010361 = null)) THEN null ELSE cast(ret#94010361 as float) END AS ret#94010650, CASE WHEN ((resret#94010362 = NA) OR (resret#94010362 = null)) THEN null ELSE cast(resret#94010362 as float) END AS resret#94010656, CASE WHEN ((retnet#94010363 = NA) OR (retnet#94010363 = null)) THEN null ELSE cast(retnet#94010363 as float) END AS retnet#94010660, CASE WHEN ((turnover#94010364 = NA) OR (turnover#94010364 = null)) THEN null ELSE cast(turnover#94010364 as float) END AS turnover#94010664, CASE WHEN ((numcos#94010365 = NA) OR (numcos#94010365 = null)) THEN null ELSE cast(numcos#94010365 as float) END AS numcos#94010669, CASE WHEN ((coverage#94010366 = NA) OR (coverage#94010366 = null)) THEN null ELSE cast(coverage#94010366 as float) END AS cover...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:41 | 16 ms |
1/1
| 262.7 KiB | |||
511740 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:41 | 37 ms |
1/1
| 647.0 B | |||
511739 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [year#94014248 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(year#94014248 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506265]
+- *(1) Project [year#94014248, turnover#94014292, (1.0 / cast(turnover#94014292 as double)) AS days_hold#94014331]
+- *(1) Filter isnotnull(turnover#94014292)
+- InMemoryTableScan [turnover#94014292, year#94014248], [isnotnull(turnover#94014292)]
+- InMemoryRelation [year#94014248, retIC#94014249, resretIC#94014250, numcos#94014251, numdates#94014253, annual_bmret#94014255, annual_ret#94014257, std_ret#94014259, Sharpe_ret#94014261, PctPos_ret#94014262, TR_ret#94014264, IR_ret#94014266, annual_resret#94014268, std_resret#94014270, Sharpe_resret#94014272, PctPos_resret#94014274, TR_resret#94014275, IR_resret#94014277, annual_retnet#94014279, std_retnet#94014281, Sharpe_retnet#94014283, PctPos_retnet#94014285, TR_retnet#94014287, IR_retnet#94014289, turnover#94014292], StorageLevel(disk, memory, deserialized, 1 replicas)
...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:41 | 37 ms |
9/9
| 4.1 KiB | 647.0 B | ||
511737 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((cap#94010439 = NA) OR (cap#94010439 = null)) THEN null ELSE cast(cap#94010439 as float) END AS cap#94010644, CASE WHEN ((retIC#94010440 = NA) OR (retIC#94010440 = null)) THEN null ELSE cast(retIC#94010440 as float) END AS retIC#94010648, CASE WHEN ((resretIC#94010441 = NA) OR (resretIC#94010441 = null)) THEN null ELSE cast(resretIC#94010441 as float) END AS resretIC#94010653, CASE WHEN ((numcos#94010442 = NA) OR (numcos#94010442 = null)) THEN null ELSE cast(numcos#94010442 as float) END AS numcos#94010657, CASE WHEN ((numdates#94010443 = NA) OR (numdates#94010443 = null)) THEN null ELSE cast(numdates#94010443 as int) END AS numdates#94010661, CASE WHEN (annual_bmret#94010444 = null) THEN null ELSE annual_bmret#94010444 END AS annual_bmret#94010666, CASE WHEN ((annual_ret#94010445 = NA) OR (annual_ret#94010445 = null)) THEN null ELSE cast(annual_ret#94010445 as float) END AS annual_ret#94010671, CASE WHEN ((std_ret#94010446 = NA) OR (std_ret#94010446 = null)) THEN null ELSE cast(st...*(2) Project [ret_large#94013861, ret_small#94013891, (ret_large#94013861 - ret_small#94013891) AS differential#94013977]
+- *(2) BroadcastNestedLoopJoin BuildRight, Cross
:- *(2) Project [annual_ret#94010671 AS ret_large#94013861]
: +- *(2) Filter (isnotnull(cap#94010644) AND (cap#94010644 = 1.0))
: +- InMemoryTableScan [annual_ret#94010671, cap#94010644], [isnotnull(cap#94010644), (cap#94010644 = 1.0)]
: +- InMemoryRelation [cap#94010644, retIC#94010648, resretIC#94010653, numcos#94010657, numdates#94010661, annual_bmret#94010666, annual_ret#94010671, std_ret#94010676, Sharpe_ret#94010680, PctPos_ret#94010684, TR_ret#94010696, IR_ret#94010697, annual_resret#94010699, std_resret#94010700, Sharpe_resret#94010701, PctPos_resret#94010702, TR_resret#94010703, IR_resret#94010704, annual_retnet#94010705, std_retnet#94010706, Sharpe_retnet#94010707, PctPos_retnet#94010720, TR_retnet#94010722, IR_retnet#94010723, ... 2 more fields], StorageLevel(disk, memory, deserialized, 1 replicas)
...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:41 | 22 ms |
1/1
| 424.0 B | |||
511736 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((turnover#94014098 = NA) OR (turnover#94014098 = null)) THEN null ELSE cast(turnover#94014098 as float) END AS turnover#94014305, CASE WHEN ((turnover#94014098 = NA) OR (turnover#94014098 = null)) THEN null ELSE (1.0 / cast(cast(turnover#94014098 as float) as double)) END AS days_hold#94014368]
+- *(1) Filter ((isnotnull(cap#94014067) AND NOT coalesce(((cap#94014067 = NA) OR (cap#94014067 = null)), false)) AND (cast(cap#94014067 as float) = 0.0))
+- FileScan csv [cap#94014067,turnover#94014098] Batched: false, DataFilters: [isnotnull(cap#94014067), NOT coalesce(((cap#94014067 = NA) OR (cap#94014067 = null)), false), (c..., Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/output/rankviews_history/rank..., PartitionFilters: [], PushedFilters: [IsNotNull(cap)], ReadSchema: struct<cap:string,turnover:string>
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:41 | 8 ms |
1/1
| 368.0 B | |||
511735 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (group#93875754 = null) THEN null ELSE group#93875754 END AS group#93875766, CASE WHEN (Category#93875755 = null) THEN null ELSE Category#93875755 END AS Category#93875767, CASE WHEN (Label#93875756 = null) THEN null ELSE Label#93875756 END AS Label#93875768]
+- FileScan csv [group#93875754,Category#93875755,Label#93875756] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/common/map_stats.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<group:string,Category:string,Label:string>
*(2) Project [Category#93875767, Label#93875768, annual_ret#94006310, Sharpe_ret#94006312, PctPos_ret#94006313, annual_resret#94006316, Sharpe_resret#94006318, PctPos_resret#94006319, annual_retnet#94006322, Sharpe_retnet#94006324, PctPos_retnet#94006325]
+- *(2) BroadcastHashJoin [group#94006308], [group#93875766], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(input[0, string, false]),false), [id=#7505834]
: +- *(1) Filter isnotnull(group#94006308)
: +- InMemoryTableScan [group#94006308, annual_ret#94006310, Sharpe_ret#94006312, PctPos_ret#94006313, annual_resret#94006316, Sharpe_resret#94006318, PctPos_resret#94006319, annual_retnet#94006322, Sharpe_retnet#94006324, PctPos_retnet#94006325], [isnotnull(group#94006308)]
: +- InMemoryRelation [group#94006308, annual_bmret#94006309, annual_ret#94006310, std_ret#94006311, Sharpe_ret#94006312, PctPos_ret#94006313, TR_ret#94006314, IR_ret#94006315, annual_resret#94006316, std_resret#94006317, Sharpe_res...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 28 ms |
1/1
| 4.4 KiB | |||
511734 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((fractile#94005058 = NA) OR (fractile#94005058 = null)) THEN null ELSE cast(fractile#94005058 as float) END AS fractile#94005101, CASE WHEN ((cap#94005059 = NA) OR (cap#94005059 = null)) THEN null ELSE cast(cap#94005059 as float) END AS cap#94005102, CASE WHEN ((annual_ret#94005060 = NA) OR (annual_ret#94005060 = null)) THEN null ELSE cast(annual_ret#94005060 as float) END AS annual_ret#94005103, CASE WHEN ((annual_resret#94005061 = NA) OR (annual_resret#94005061 = null)) THEN null ELSE cast(annual_resret#94005061 as float) END AS annual_resret#94005104, CASE WHEN ((numcos#94005062 = NA) OR (numcos#94005062 = null)) THEN null ELSE cast(numcos#94005062 as float) END AS numcos#94005105]
+- FileScan csv [fractile#94005058,cap#94005059,annual_ret#94005060,annual_resret#94005061,numcos#94005062] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/output/rankviews_history/rank..., PartitionFilters: [], PushedFilters:...*(1) Project [fractile#94005101 AS fractile#94005106, annual_ret#94005103 AS annual_ret#94005109, annual_resret#94005104 AS annual_resret#94005115, numcos#94005105 AS numcos#94005116]
+- *(1) Filter (isnotnull(cap#94005102) AND (cast(cap#94005102 as string) = 1))
+- *(1) ColumnarToRow
+- InMemoryTableScan [annual_resret#94005104, annual_ret#94005103, cap#94005102, fractile#94005101, numcos#94005105], [isnotnull(cap#94005102), (cast(cap#94005102 as string) = 1)]
+- InMemoryRelation [fractile#94005101, cap#94005102, annual_ret#94005103, annual_resret#94005104, numcos#94005105], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN ((fractile#94005058 = NA) OR (fractile#94005058 = null)) THEN null ELSE cast(fractile#94005058 as float) END AS fractile#94005101, CASE WHEN ((cap#94005059 = NA) OR (cap#94005059 = null)) THEN null ELSE cast(cap#94005059 as float) END AS cap#94005102, CASE WHEN ((annual_ret#94005060 = NA) OR (annual_ret#94005060 = nul...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 34 ms |
1/1
| 1160.0 B | |||
511733 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:40 | 35 ms |
1/1
| 42.6 KiB | |||
511732 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7505747]
+- InMemoryTableScan [date#94006072, drawdown_gross#94006078, drawdown_net#94006085]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 0.6 s |
200/200
| 96.9 KiB | 42.6 KiB | ||
511730 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:40 | 27 ms |
1/1
| 28.9 KiB | |||
511729 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94005402 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94005402 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7505769]
+- *(1) Project [date#94005402, (drawdown_length_gross#94005420 * -1) AS drawdown_length_gross#94005473]
+- InMemoryTableScan [date#94005402, drawdown_length_gross#94005420]
+- InMemoryRelation [date#94005402, allret#94005410, allcum#94005415, drawdown_gross#94005419, drawdown_length_gross#94005420, allretnet#94005421, allcumnet#94005422, drawdown_net#94005423, drawdown_length_net#94005424, numcos#94005425, largecum#94005427, largecumnet#94005429, largeret#94005431, largeretnet#94005433, midcum#94005435, midcumnet#94005439, midret#94005440, midretnet#94005443, smallcum#94005445, smallcumnet#94005446, smallret#94005449, smallretnet#94005450], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN (...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 2 s |
200/200
| 66.3 KiB | 28.9 KiB | ||
511727 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:39 | 26 ms |
1/1
| 47.5 KiB | |||
511726 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7515990]
+- *(1) Project [date#94006072, largeret#94006095 AS daily_gross#94135448, largeretnet#94006096 AS daily_net#94135449]
+- InMemoryTableScan [date#94006072, largeret#94006095, largeretnet#94006096]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS dat...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 0.8 s |
200/200
| 47.5 KiB | 47.5 KiB | ||
511725 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 56 ms |
1/1
| 155.6 KiB | 47.5 KiB | ||
511724 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 27 ms |
1/1
| 43.5 KiB | |||
511723 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94006072 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94006072 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7515843]
+- *(1) Project [date#94006072, largecum#94006091 AS cum_gross#94135477, largecumnet#94006093 AS cum_net#94135478]
+- InMemoryTableScan [date#94006072, largecum#94006091, largecumnet#94006093]
+- InMemoryRelation [date#94006072, allret#94006074, allcum#94006076, drawdown_gross#94006078, drawdown_length_gross#94006080, allretnet#94006081, allcumnet#94006083, drawdown_net#94006085, drawdown_length_net#94006087, numcos#94006089, largecum#94006091, largecumnet#94006093, largeret#94006095, largeretnet#94006096, midcum#94006098, midcumnet#94006101, midret#94006104, midretnet#94006107, smallcum#94006109, smallcumnet#94006111, smallret#94006114, smallretnet#94006116], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 0.8 s |
200/200
| 43.5 KiB | 43.5 KiB | ||
511722 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 0.1 s |
1/1
| 155.6 KiB | 43.5 KiB | ||
511721 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 22 ms |
1/1
| 1249.0 B | |||
511720 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [fractile#94017829 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(fractile#94017829 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7515879]
+- *(2) Project [fractile#94017829, size#94017831, value#94017832, growth#94017833, leverage#94017834, volatility#94017835, momentum#94017836, yield#94017837, ##94017838, min_date#94017839, max_date#94017840]
+- *(2) BroadcastHashJoin [cap_description#94135062], [description#93880533], Inner, BuildRight, false
:- *(2) Project [fractile#94017829, cap#94017830 AS cap_description#94135062, size#94017831, value#94017832, growth#94017833, leverage#94017834, volatility#94017835, momentum#94017836, yield#94017837, ##94017838, min_date#94017839, max_date#94017840]
: +- *(2) Filter ((isnotnull(fractile#94017829) AND NOT (fractile#94017829 = -1)) AND isnotnull(cap#94017830))
: +- InMemoryTableScan [##94017838, cap#94017830, fractile#94017829, growth#94017833, leverage#94017834, max_date#94017840, min_date#94017839, mo...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 85 ms |
10/10
| 1249.0 B | 1249.0 B | ||
511719 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((fractile#94017781 = NA) OR (fractile#94017781 = null)) THEN null ELSE cast(fractile#94017781 as int) END AS fractile#94017829, CASE WHEN (cap#94017782 = null) THEN null ELSE cap#94017782 END AS cap#94017830, CASE WHEN ((size#94017783 = NA) OR (size#94017783 = null)) THEN null ELSE cast(size#94017783 as float) END AS size#94017831, CASE WHEN ((value#94017784 = NA) OR (value#94017784 = null)) THEN null ELSE cast(value#94017784 as float) END AS value#94017832, CASE WHEN ((growth#94017785 = NA) OR (growth#94017785 = null)) THEN null ELSE cast(growth#94017785 as float) END AS growth#94017833, CASE WHEN ((leverage#94017786 = NA) OR (leverage#94017786 = null)) THEN null ELSE cast(leverage#94017786 as float) END AS leverage#94017834, CASE WHEN ((volatility#94017787 = NA) OR (volatility#94017787 = null)) THEN null ELSE cast(volatility#94017787 as float) END AS volatility#94017835, CASE WHEN ((momentum#94017788 = NA) OR (momentum#94017788 = null)) THEN null ELSE cast(momentum#94017788 as fl...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 11 ms |
1/1
| 3.0 KiB | 1249.0 B | ||
511718 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 15 ms |
1/1
| 155.6 KiB | |||
511717 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94010641 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94010641 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7515934]
+- *(1) Project [date#94010643 AS date#94010641, numcos#94010669 AS numcos#94010663]
+- *(1) Filter (isnotnull(cap#94010646) AND (cast(cap#94010646 as string) = 1))
+- InMemoryTableScan [cap#94010646, date#94010643, numcos#94010669], [isnotnull(cap#94010646), (cast(cap#94010646 as string) = 1)]
+- InMemoryRelation [date#94010643, cap#94010646, ret#94010650, resret#94010656, retnet#94010660, turnover#94010664, numcos#94010669, coverage#94010673, benchmark#94010677, excess_ret#94010681, excess_resret#94010683, excess_retnet#94010698], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94010359 = null) THEN null ELSE cast(date#94010359 as date) END AS date#94010643, CASE WHEN ((cap#94010360 = NA) OR (cap#94010360 = null)) THEN null ELSE cast(cap#94010360 as float) END AS ...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 32 ms |
1/1
| ||||
511716 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94010359 = null) THEN null ELSE cast(date#94010359 as date) END AS date#94010643, CASE WHEN ((cap#94010360 = NA) OR (cap#94010360 = null)) THEN null ELSE cast(cap#94010360 as float) END AS cap#94010646, CASE WHEN ((ret#94010361 = NA) OR (ret#94010361 = null)) THEN null ELSE cast(ret#94010361 as float) END AS ret#94010650, CASE WHEN ((resret#94010362 = NA) OR (resret#94010362 = null)) THEN null ELSE cast(resret#94010362 as float) END AS resret#94010656, CASE WHEN ((retnet#94010363 = NA) OR (retnet#94010363 = null)) THEN null ELSE cast(retnet#94010363 as float) END AS retnet#94010660, CASE WHEN ((turnover#94010364 = NA) OR (turnover#94010364 = null)) THEN null ELSE cast(turnover#94010364 as float) END AS turnover#94010664, CASE WHEN ((numcos#94010365 = NA) OR (numcos#94010365 = null)) THEN null ELSE cast(numcos#94010365 as float) END AS numcos#94010669, CASE WHEN ((coverage#94010366 = NA) OR (coverage#94010366 = null)) THEN null ELSE cast(coverage#94010366 as float) END AS cover...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 20 ms |
1/1
| 262.7 KiB | |||
511715 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((fractile#94017781 = NA) OR (fractile#94017781 = null)) THEN null ELSE cast(fractile#94017781 as int) END AS fractile#94017829, CASE WHEN (cap#94017782 = null) THEN null ELSE cap#94017782 END AS cap#94017830, CASE WHEN ((size#94017783 = NA) OR (size#94017783 = null)) THEN null ELSE cast(size#94017783 as float) END AS size#94017831, CASE WHEN ((value#94017784 = NA) OR (value#94017784 = null)) THEN null ELSE cast(value#94017784 as float) END AS value#94017832, CASE WHEN ((growth#94017785 = NA) OR (growth#94017785 = null)) THEN null ELSE cast(growth#94017785 as float) END AS growth#94017833, CASE WHEN ((leverage#94017786 = NA) OR (leverage#94017786 = null)) THEN null ELSE cast(leverage#94017786 as float) END AS leverage#94017834, CASE WHEN ((volatility#94017787 = NA) OR (volatility#94017787 = null)) THEN null ELSE cast(volatility#94017787 as float) END AS volatility#94017835, CASE WHEN ((momentum#94017788 = NA) OR (momentum#94017788 = null)) THEN null ELSE cast(momentum#94017788 as fl...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:37 | 17 ms |
1/1
| 3.0 KiB | |||
511714 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94005174 = null) THEN null ELSE cast(date#94005174 as date) END AS date#94005335, CASE WHEN ((allret#94005175 = NA) OR (allret#94005175 = null)) THEN null ELSE cast(allret#94005175 as float) END AS allret#94005336, CASE WHEN ((allcum#94005176 = NA) OR (allcum#94005176 = null)) THEN null ELSE cast(allcum#94005176 as float) END AS allcum#94005360, CASE WHEN ((drawdown_gross#94005177 = NA) OR (drawdown_gross#94005177 = null)) THEN null ELSE cast(drawdown_gross#94005177 as float) END AS drawdown_gross#94005361, CASE WHEN ((drawdown_length_gross#94005178 = NA) OR (drawdown_length_gross#94005178 = null)) THEN null ELSE cast(drawdown_length_gross#94005178 as int) END AS drawdown_length_gross#94005385, CASE WHEN ((allretnet#94005179 = NA) OR (allretnet#94005179 = null)) THEN null ELSE cast(allretnet#94005179 as float) END AS allretnet#94005389, CASE WHEN ((allcumnet#94005180 = NA) OR (allcumnet#94005180 = null)) THEN null ELSE cast(allcumnet#94005180 as float) END AS allcumnet#9400539...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:37 | 36 ms |
1/1
| 155.6 KiB | |||
511713 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:38 | 23 ms |
1/1
| 1313.0 B | |||
511712 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [sort#93880530 ASC NULLS FIRST, description#93880532 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(sort#93880530 ASC NULLS FIRST, description#93880532 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506039]
+- *(2) Project [sector_id#94010645, numcos#94010654, numdates#94010659, sort#93880530, description#93880532, universe#94011004, coverage#94010916]
+- *(2) BroadcastHashJoin [sector_id#94010645], [sector_id#93880529], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)),false), [id=#7506032]
: +- *(1) Project [sector_id#94010645, numcos#94010654, numdates#94010659, coverage#94010916, round((cast(numcos#94010654 as double) / cast(coverage#94010916 as double)), 0) AS universe#94011004]
: +- *(1) Filter isnotnull(sector_id#94010645)
: +- InMemoryTableScan [coverage#94010916, numcos#94010654, numdates#94010659, sector_id#94010645], [isnotnull(sector_id#94010645)]
...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:37 | 77 ms |
12/12
| 10.6 KiB | 1313.0 B | ||
511710 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((overall#94014066 = NA) OR (overall#94014066 = null)) THEN null ELSE cast(overall#94014066 as int) END AS overall#94014252, CASE WHEN ((cap#94014067 = NA) OR (cap#94014067 = null)) THEN null ELSE cast(cap#94014067 as float) END AS cap#94014254, CASE WHEN ((retIC#94014068 = NA) OR (retIC#94014068 = null)) THEN null ELSE cast(retIC#94014068 as float) END AS retIC#94014256, CASE WHEN ((resretIC#94014069 = NA) OR (resretIC#94014069 = null)) THEN null ELSE cast(resretIC#94014069 as float) END AS resretIC#94014258, CASE WHEN ((size#94014070 = NA) OR (size#94014070 = null)) THEN null ELSE cast(size#94014070 as float) END AS size#94014260, CASE WHEN ((value#94014071 = NA) OR (value#94014071 = null)) THEN null ELSE cast(value#94014071 as float) END AS value#94014263, CASE WHEN ((growth#94014072 = NA) OR (growth#94014072 = null)) THEN null ELSE cast(growth#94014072 as float) END AS growth#94014265, CASE WHEN ((leverage#94014073 = NA) OR (leverage#94014073 = null)) THEN null ELSE cast(leverag...*(1) Project [yield#94014273, volatility#94014269, momentum#94014271, size#94014260, value#94014263, growth#94014265, leverage#94014267]
+- *(1) Filter (isnotnull(cap#94014254) AND (cap#94014254 = 1.0))
+- InMemoryTableScan [cap#94014254, growth#94014265, leverage#94014267, momentum#94014271, size#94014260, value#94014263, volatility#94014269, yield#94014273], [isnotnull(cap#94014254), (cap#94014254 = 1.0)]
+- InMemoryRelation [overall#94014252, cap#94014254, retIC#94014256, resretIC#94014258, size#94014260, value#94014263, growth#94014265, leverage#94014267, volatility#94014269, momentum#94014271, yield#94014273, numcos#94014276, numdates#94014278, annual_bmret#94014280, annual_ret#94014282, std_ret#94014284, Sharpe_ret#94014286, PctPos_ret#94014288, TR_ret#94014290, IR_ret#94014291, annual_resret#94014293, std_resret#94014294, Sharpe_resret#94014295, PctPos_resret#94014296, ... 9 more fields], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN ((ove...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:37 | 0.1 s |
1/1
| 3.0 KiB | |||
511709 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94010359 = null) THEN null ELSE cast(date#94010359 as date) END AS date#94010643, CASE WHEN ((cap#94010360 = NA) OR (cap#94010360 = null)) THEN null ELSE cast(cap#94010360 as float) END AS cap#94010646, CASE WHEN ((ret#94010361 = NA) OR (ret#94010361 = null)) THEN null ELSE cast(ret#94010361 as float) END AS ret#94010650, CASE WHEN ((resret#94010362 = NA) OR (resret#94010362 = null)) THEN null ELSE cast(resret#94010362 as float) END AS resret#94010656, CASE WHEN ((retnet#94010363 = NA) OR (retnet#94010363 = null)) THEN null ELSE cast(retnet#94010363 as float) END AS retnet#94010660, CASE WHEN ((turnover#94010364 = NA) OR (turnover#94010364 = null)) THEN null ELSE cast(turnover#94010364 as float) END AS turnover#94010664, CASE WHEN ((numcos#94010365 = NA) OR (numcos#94010365 = null)) THEN null ELSE cast(numcos#94010365 as float) END AS numcos#94010669, CASE WHEN ((coverage#94010366 = NA) OR (coverage#94010366 = null)) THEN null ELSE cast(coverage#94010366 as float) END AS cover...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:37 | 41 ms |
1/1
| 262.7 KiB | |||
511708 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:37 | 22 ms |
1/1
| 318.0 B | |||
511707 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Sort [sort#93880531 ASC NULLS FIRST, description#93880533 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(sort#93880531 ASC NULLS FIRST, description#93880533 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506102]
+- *(2) Project [cap#94010644, numcos#94010657, numdates#94010661, sort#93880531, description#93880533, universe#93880535, (cast(numcos#94010657 as double) / cast(universe#93880535 as double)) AS coverage#94010966]
+- *(2) BroadcastHashJoin [knownfloatingpointnormalized(normalizenanandzero(cap#94010644))], [knownfloatingpointnormalized(normalizenanandzero(cast(cap#93880528 as float)))], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(knownfloatingpointnormalized(normalizenanandzero(input[0, float, false]))),false), [id=#7506095]
: +- *(1) Filter isnotnull(cap#94010644)
: +- InMemoryTableScan [cap#94010644, numcos#94010657, numdates#94010661], [isnotnull(cap#94010644)]
: +- InMemoryRelation [ca...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:37 | 66 ms |
3/3
| 2.7 KiB | 318.0 B | ||
511705 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((cap#94010439 = NA) OR (cap#94010439 = null)) THEN null ELSE cast(cap#94010439 as float) END AS cap#94010644, CASE WHEN ((retIC#94010440 = NA) OR (retIC#94010440 = null)) THEN null ELSE cast(retIC#94010440 as float) END AS retIC#94010648, CASE WHEN ((resretIC#94010441 = NA) OR (resretIC#94010441 = null)) THEN null ELSE cast(resretIC#94010441 as float) END AS resretIC#94010653, CASE WHEN ((numcos#94010442 = NA) OR (numcos#94010442 = null)) THEN null ELSE cast(numcos#94010442 as float) END AS numcos#94010657, CASE WHEN ((numdates#94010443 = NA) OR (numdates#94010443 = null)) THEN null ELSE cast(numdates#94010443 as int) END AS numdates#94010661, CASE WHEN (annual_bmret#94010444 = null) THEN null ELSE annual_bmret#94010444 END AS annual_bmret#94010666, CASE WHEN ((annual_ret#94010445 = NA) OR (annual_ret#94010445 = null)) THEN null ELSE cast(annual_ret#94010445 as float) END AS annual_ret#94010671, CASE WHEN ((std_ret#94010446 = NA) OR (std_ret#94010446 = null)) THEN null ELSE cast(st...*(2) Project [ret_large#94013861, ret_small#94013891, (ret_large#94013861 - ret_small#94013891) AS differential#94013977]
+- *(2) BroadcastNestedLoopJoin BuildRight, Cross
:- *(2) Project [annual_ret#94010671 AS ret_large#94013861]
: +- *(2) Filter (isnotnull(cap#94010644) AND (cap#94010644 = 1.0))
: +- InMemoryTableScan [annual_ret#94010671, cap#94010644], [isnotnull(cap#94010644), (cap#94010644 = 1.0)]
: +- InMemoryRelation [cap#94010644, retIC#94010648, resretIC#94010653, numcos#94010657, numdates#94010661, annual_bmret#94010666, annual_ret#94010671, std_ret#94010676, Sharpe_ret#94010680, PctPos_ret#94010684, TR_ret#94010696, IR_ret#94010697, annual_resret#94010699, std_resret#94010700, Sharpe_resret#94010701, PctPos_resret#94010702, TR_resret#94010703, IR_resret#94010704, annual_retnet#94010705, std_retnet#94010706, Sharpe_retnet#94010707, PctPos_retnet#94010720, TR_retnet#94010722, IR_retnet#94010723, ... 2 more fields], StorageLevel(disk, memory, deserialized, 1 replicas)
...
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:37 | 11 ms |
1/1
| 424.0 B | |||
511704 | default | broadcast exchange (runId 690bcd1f-a4d3-4ecd-9a21-5c03d300cec4) $anonfun$withThreadLocalCaptured$1 at FutureTask.java:264
RDD: *(1) Project [CASE WHEN ((cap#93880496 = NA) OR (cap#93880496 = null)) THEN null ELSE cast(cap#93880496 as int) END AS cap#93880528, CASE WHEN (sort#93880498 = null) THEN null ELSE sort#93880498 END AS sort#93880531, CASE WHEN (description#93880500 = null) THEN null ELSE description#93880500 END AS description#93880533, CASE WHEN ((universe#93880502 = NA) OR (universe#93880502 = null)) THEN null ELSE cast(universe#93880502 as int) END AS universe#93880535]
+- FileScan csv [cap#93880496,sort#93880498,description#93880500,universe#93880502] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/curate/curate_cap.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<cap:string,sort:string,description:string,universe:string>
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$1(SQLExecution.scala:185) java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:52:37 | 17 ms |
1/1
| 624.0 B | |||
511703 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:37 | 36 ms |
1/1
| 154.0 B | |||
511702 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(3) Project [cap#94013946, turnover#94010724, (1.0 / cast(turnover#94010724 as double)) AS days_hold#94013974]
+- *(3) Sort [cap_sort#94013857 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(cap_sort#94013857 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506325]
+- *(2) Project [turnover#94010724, cap_description#94013856 AS cap#94013946, cap_sort#94013857]
+- *(2) BroadcastHashJoin [knownfloatingpointnormalized(normalizenanandzero(cap#94010644))], [knownfloatingpointnormalized(normalizenanandzero(cast(cap#93880528 as float)))], Inner, BuildLeft, false
:- BroadcastExchange HashedRelationBroadcastMode(List(knownfloatingpointnormalized(normalizenanandzero(input[0, float, false]))),false), [id=#7506317]
: +- *(1) Filter isnotnull(cap#94010644)
: +- InMemoryTableScan [cap#94010644, turnover#94010724], [isnotnull(cap#94010644)]
: +- InMemoryRelation [cap#94010644, retIC#94010648, resretIC#94010653, numcos#94010657, num...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:37 | 13 ms |
2/2
| 656.0 B | 154.0 B | ||
511700 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:37 | 47 ms |
1/1
| 647.0 B | |||
511699 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [year#94014248 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(year#94014248 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7506265]
+- *(1) Project [year#94014248, turnover#94014292, (1.0 / cast(turnover#94014292 as double)) AS days_hold#94014331]
+- *(1) Filter isnotnull(turnover#94014292)
+- InMemoryTableScan [turnover#94014292, year#94014248], [isnotnull(turnover#94014292)]
+- InMemoryRelation [year#94014248, retIC#94014249, resretIC#94014250, numcos#94014251, numdates#94014253, annual_bmret#94014255, annual_ret#94014257, std_ret#94014259, Sharpe_ret#94014261, PctPos_ret#94014262, TR_ret#94014264, IR_ret#94014266, annual_resret#94014268, std_resret#94014270, Sharpe_resret#94014272, PctPos_resret#94014274, TR_resret#94014275, IR_resret#94014277, annual_retnet#94014279, std_retnet#94014281, Sharpe_retnet#94014283, PctPos_retnet#94014285, TR_retnet#94014287, IR_retnet#94014289, turnover#94014292], StorageLevel(disk, memory, deserialized, 1 replicas)
...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:37 | 34 ms |
9/9
| 4.1 KiB | 647.0 B | ||
511697 | default | toStream at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN ((turnover#94014098 = NA) OR (turnover#94014098 = null)) THEN null ELSE cast(turnover#94014098 as float) END AS turnover#94014305, CASE WHEN ((turnover#94014098 = NA) OR (turnover#94014098 = null)) THEN null ELSE (1.0 / cast(cast(turnover#94014098 as float) as double)) END AS days_hold#94014368]
+- *(1) Filter ((isnotnull(cap#94014067) AND NOT coalesce(((cap#94014067 = NA) OR (cap#94014067 = null)), false)) AND (cast(cap#94014067 as float) = 0.0))
+- FileScan csv [cap#94014067,turnover#94014098] Batched: false, DataFilters: [isnotnull(cap#94014067), NOT coalesce(((cap#94014067 = NA) OR (cap#94014067 = null)), false), (c..., Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/srv/plusamp/data/default/ea-market/output/rankviews_history/rank..., PartitionFilters: [], PushedFilters: [IsNotNull(cap)], ReadSchema: struct<cap:string,turnover:string>
scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1426) java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290) java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020) java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656) java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594) java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183) | 2025/08/01 18:52:37 | 14 ms |
1/1
| 368.0 B | |||
511696 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/01 18:35:38 | 18 ms |
1/1
| 28.9 KiB |