| Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
|---|---|---|---|---|---|---|---|---|---|
| 103 | tenants-pool-286 | Delta: replenishmentRunId = 10000000091 tenantId = 6944346648028224943 activityType = GetDemands activityId = 031b46dd-01c1-31bf-90b8-69a35d36279b workflowType = GetDemandsWorkflow workflowId = 7b368660-4ead-3646-9d37-2bc95eacade9 attempt = 4 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query $anonfun$recordDeltaOperationInternal$1 at DatabricksLogging.scala:128 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139) com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128) com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117) org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81) org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138) org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128) org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118) org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81) org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243) org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184) org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81) org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132) org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56) org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35) org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29) org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306) org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117) org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112) org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306) org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150) | 2026/01/27 05:03:20 | 82 ms |
50/50
| 3.4 KiB |
| Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
|---|---|---|---|---|---|---|---|---|---|
| 102 | default | $anonfun$recordDeltaOperationInternal$1 at DatabricksLogging.scala:128 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139) com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128) com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117) org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81) org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138) org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128) org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118) org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81) org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243) org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184) org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81) org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132) org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56) org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35) org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29) org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306) org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117) org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112) org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306) org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150) | Unknown | Unknown |
0/10
|