Draft

bootstrap

CREATE CATALOG IF NOT EXISTS ee_dlt_union MANAGED LOCATION 's3://synaptiq-databricks-default-storage-location/';

CREATE SCHEMA IF NOT EXISTS ee_dlt_union.bronze;
CREATE SCHEMA IF NOT EXISTS ee_dlt_union.silver;
USE CATALOG ee_dlt_union;
USE SCHEMA bronze;

CREATE OR REPLACE TABLE bronze.simulate_stream (
  transaction_id STRING,
  transaction_date DATE,
  product_name STRING,
  quantity INT,
  amount DECIMAL(10, 2)
)
TBLPROPERTIES (delta.enableChangeDataFeed = true)
;
CREATE OR REPLACE TABLE bronze.simulate_static (
  transaction_id STRING,
  transaction_date DATE,
  product_name STRING,
  quantity INT,
  amount DECIMAL(10, 2)
)
TBLPROPERTIES (delta.enableChangeDataFeed = true);
INSERT INTO bronze.simulate_static (
    transaction_id, transaction_date, product_name, quantity, amount
  )
  VALUES
    (1001, "2022-01-15", "Old Laptop", 1, 750.00),
    (1002, "2022-03-20", "Wired Mouse", 2, 15.00),
    (1003, "2022-06-10", "Basic Keyboard", 1, 25.00),
    (1004, "2022-11-05", "Old Monitor", 1, 150.00),
    (1005, "2023-02-12", "Docking Station", 1, 120.00);
num_affected_rows num_inserted_rows
5 5
INSERT INTO bronze.simulate_stream (
    transaction_id, transaction_date, product_name, quantity, amount
  )
  VALUES
    ('1', '2025-06-01', 'Product A', 10, 99.99),
    ('2', '2025-06-02', 'Product B', 5, 49.50),
    ('3', '2025-06-03', 'Product C', 20, 199.99);
num_affected_rows num_inserted_rows
3 3
INSERT INTO bronze.simulate_stream (
    transaction_id, transaction_date, product_name, quantity, amount
  )
  VALUES ('4', '2025-06-04', 'Product D', 15, 149.75);
num_affected_rows num_inserted_rows
1 1
INSERT INTO bronze.simulate_stream (
    transaction_id, transaction_date, product_name, quantity, amount
  )
  VALUES
    ('5', '2025-06-05', 'Product E', 8, 79.80),
    ('6', '2025-06-06', 'Product F', 12, 119.40),
    ('7', '2025-06-07', 'Product G', 7, 69.30);
num_affected_rows num_inserted_rows
3 3
SELECT * FROM bronze.simulate_stream
transaction_id transaction_date product_name quantity amount
1 2025-06-01 Product A 10 99.99
1 2025-06-01 Product A 10 99.99
1 2025-06-01 Product A 10 99.99
2 2025-06-02 Product B 30 49.50
2 2025-06-02 Product B 30 49.50
2 2025-06-02 Product B 30 49.50
DELETE FROM bronze.simulate_stream WHERE transaction_id = '3'
num_affected_rows
1
UPDATE bronze.simulate_stream
SET quantity = 30
WHERE transaction_id = 2;
num_affected_rows
3
SELECT * FROM bronze.appended_table
transaction_id transaction_date product_name quantity amount
1001 2022-01-15 Old Laptop 1 750.00
1002 2022-03-20 Wired Mouse 2 15.00
1003 2022-06-10 Basic Keyboard 1 25.00
1004 2022-11-05 Old Monitor 1 150.00
1005 2023-02-12 Docking Station 1 120.00
1 2025-06-01 Product A 10 99.99
2 2025-06-02 Product B 5 49.50
SELECT * FROM bronze.appended_table_cdc
transaction_id transaction_date product_name quantity amount _change_type _commit_version _commit_timestamp
2 2025-06-02 Product B 5 49.50 update_preimage 24 2025-07-14T19:58:43Z
2 2025-06-02 Product B 30 49.50 update_postimage 24 2025-07-14T19:58:43Z
2 2025-06-02 Product B 5 49.50 update_preimage 24 2025-07-14T19:58:43Z
2 2025-06-02 Product B 30 49.50 update_postimage 24 2025-07-14T19:58:43Z
2 2025-06-02 Product B 5 49.50 update_preimage 24 2025-07-14T19:58:43Z
2 2025-06-02 Product B 30 49.50 update_postimage 24 2025-07-14T19:58:43Z
1 2025-06-01 Product A 10 99.99 insert 18 2025-07-14T19:04:10Z
2 2025-06-02 Product B 5 49.50 insert 18 2025-07-14T19:04:10Z
1 2025-06-01 Product A 10 99.99 insert 18 2025-07-14T19:04:10Z
2 2025-06-02 Product B 5 49.50 insert 18 2025-07-14T19:04:10Z
3 2025-06-03 Product C 20 199.99 insert 18 2025-07-14T19:04:10Z
1001 2022-01-15 Old Laptop 1 750.00 insert 5 2025-07-14T17:04:05Z
1002 2022-03-20 Wired Mouse 2 15.00 insert 5 2025-07-14T17:04:05Z
1003 2022-06-10 Basic Keyboard 1 25.00 insert 5 2025-07-14T17:04:05Z
1004 2022-11-05 Old Monitor 1 150.00 insert 5 2025-07-14T17:04:05Z
1005 2023-02-12 Docking Station 1 120.00 insert 5 2025-07-14T17:04:05Z
5 2025-06-05 Product E 8 79.80 insert 26 2025-07-15T16:20:15Z
6 2025-06-06 Product F 12 119.40 insert 26 2025-07-15T16:20:15Z
7 2025-06-07 Product G 7 69.30 insert 26 2025-07-15T16:20:15Z
1 2025-06-01 Product A 10 99.99 insert 21 2025-07-14T19:54:20Z
2 2025-06-02 Product B 5 49.50 insert 21 2025-07-14T19:54:20Z
3 2025-06-03 Product C 20 199.99 insert 21 2025-07-14T19:54:20Z
3 2025-06-03 Product C 20 199.99 delete 22 2025-07-14T19:55:24Z
3 2025-06-03 Product C 20 199.99 delete 19 2025-07-14T19:45:52Z
SELECT * FROM silver.target_table
transaction_id transaction_date product_name quantity amount _change_type _commit_version _commit_timestamp
1001 2022-01-15 Old Laptop 1 750.00 insert 5 2025-07-14T17:04:05Z
1002 2022-03-20 Wired Mouse 2 15.00 insert 5 2025-07-14T17:04:05Z
1003 2022-06-10 Basic Keyboard 1 25.00 insert 5 2025-07-14T17:04:05Z
1004 2022-11-05 Old Monitor 1 150.00 insert 5 2025-07-14T17:04:05Z
1005 2023-02-12 Docking Station 1 120.00 insert 5 2025-07-14T17:04:05Z
1 2025-06-01 Product A 10 99.99 insert 21 2025-07-14T19:54:20Z
2 2025-06-02 Product B 30 49.50 update_postimage 24 2025-07-14T19:58:43Z
5 2025-06-05 Product E 8 79.80 insert 26 2025-07-15T16:20:15Z
6 2025-06-06 Product F 12 119.40 insert 26 2025-07-15T16:20:15Z
7 2025-06-07 Product G 7 69.30 insert 26 2025-07-15T16:20:15Z
SELECT * FROM silver.target_table_scd_type_2
org.apache.spark.sql.catalyst.ExtendedAnalysisException: [TABLE_OR_VIEW_NOT_FOUND] The table or view `silver`.`target_table_scd_type_2` cannot be found. Verify the spelling and correctness of the schema and catalog.
If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.
To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS. SQLSTATE: 42P01; line 1 pos 14;
'Project [*]
+- 'UnresolvedRelation [silver, target_table_scd_type_2], [], false

    at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.tableNotFound(package.scala:94)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis0$2(CheckAnalysis.scala:345)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis0$2$adapted(CheckAnalysis.scala:307)
    at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:303)
    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:302)
    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:302)
    at scala.collection.immutable.Vector.foreach(Vector.scala:2125)
    at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:302)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis0(CheckAnalysis.scala:307)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis0$(CheckAnalysis.scala:278)
    at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis0(Analyzer.scala:415)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:263)
    at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
    at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:250)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:250)
    at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:415)
    at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.$anonfun$resolveInFixedPoint$1(HybridAnalyzer.scala:254)
    at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:245)
    at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.resolveInFixedPoint(HybridAnalyzer.scala:254)
    at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.$anonfun$apply$1(HybridAnalyzer.scala:96)
    at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.withTrackedAnalyzerBridgeState(HybridAnalyzer.scala:131)
    at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.apply(HybridAnalyzer.scala:87)
    at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:468)
    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:425)
    at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:468)
    at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyAnalyzed$3(QueryExecution.scala:300)
    at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)
    at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:613)
    at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$5(QueryExecution.scala:690)
    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withExecutionPhase$1(SQLExecution.scala:153)
    at com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:293)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:59)
    at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:289)
    at com.databricks.util.TracingSpanUtils$.$anonfun$withTracing$4(TracingSpanUtils.scala:235)
    at com.databricks.util.TracingSpanUtils$.withTracing(TracingSpanUtils.scala:129)
    at com.databricks.util.TracingSpanUtils$.withTracing(TracingSpanUtils.scala:233)
    at com.databricks.tracing.TracingUtils$.withTracing(TracingUtils.scala:296)
    at com.databricks.spark.util.DatabricksTracingHelper.withSpan(DatabricksSparkTracingHelper.scala:61)
    at com.databricks.spark.util.DBRTracing$.withSpan(DBRTracing.scala:47)
    at org.apache.spark.sql.execution.SQLExecution$.withExecutionPhase(SQLExecution.scala:134)
    at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$4(QueryExecution.scala:690)
    at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:1332)
    at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:683)
    at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)
    at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:680)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:860)
    at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:680)
    at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyAnalyzed$2(QueryExecution.scala:294)
    at com.databricks.sql.util.MemoryTrackerHelper.withMemoryTracking(MemoryTrackerHelper.scala:111)
    at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyAnalyzed$1(QueryExecution.scala:293)
    at scala.util.Try$.apply(Try.scala:217)
    at org.apache.spark.util.Utils$.doTryWithCallerStacktrace(Utils.scala:1687)
    at org.apache.spark.util.Utils$.getTryWithCallerStacktrace(Utils.scala:1748)
    at org.apache.spark.util.LazyTry.get(LazyTry.scala:58)
    at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:332)
    at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:273)
    at org.apache.spark.sql.classic.Dataset$.$anonfun$ofRows$3(Dataset.scala:154)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:860)
    at org.apache.spark.sql.classic.SparkSession.$anonfun$withActiveAndFrameProfiler$1(SparkSession.scala:1157)
    at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)
    at org.apache.spark.sql.classic.SparkSession.withActiveAndFrameProfiler(SparkSession.scala:1157)
    at org.apache.spark.sql.classic.Dataset$.ofRows(Dataset.scala:146)
    at org.apache.spark.sql.classic.SparkSession.$anonfun$sql$4(SparkSession.scala:936)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:860)
    at org.apache.spark.sql.classic.SparkSession.sql(SparkSession.scala:888)
    at org.apache.spark.sql.classic.SparkSession.sql(SparkSession.scala:942)
    at com.databricks.backend.daemon.driver.DriverLocal$DbClassicStrategy.executeSQLQuery(DriverLocal.scala:374)
    at com.databricks.backend.daemon.driver.DriverLocal.executeSQLSubCommand(DriverLocal.scala:474)
    at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$executeSql$1(DriverLocal.scala:498)
    at scala.collection.immutable.List.map(List.scala:247)
    at scala.collection.immutable.List.map(List.scala:79)
    at com.databricks.backend.daemon.driver.DriverLocal.executeSql(DriverLocal.scala:493)
    at com.databricks.backend.daemon.driver.SQLDriverLocal.repl(SQLDriverLocal.scala:39)
    at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$36(DriverLocal.scala:1313)
    at com.databricks.unity.UCSEphemeralState$Handle.runWith(UCSEphemeralState.scala:51)
    at com.databricks.unity.HandleImpl.runWith(UCSHandle.scala:104)
    at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$30(DriverLocal.scala:1304)
    at com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:49)
    at com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:293)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:59)
    at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:289)
    at com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:47)
    at com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:44)
    at com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:130)
    at com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:96)
    at com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:77)
    at com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:130)
    at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$1(DriverLocal.scala:1229)
    at com.databricks.backend.daemon.driver.DriverLocal$.$anonfun$maybeSynchronizeExecution$4(DriverLocal.scala:1713)
    at com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:872)
    at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$2(DriverWrapper.scala:1050)
    at scala.util.Try$.apply(Try.scala:217)
    at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$1(DriverWrapper.scala:1039)
    at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$3(DriverWrapper.scala:1085)
    at com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:616)
    at com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:643)
    at com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:49)
    at com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:293)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:59)
    at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:289)
    at com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:47)
    at com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:44)
    at com.databricks.backend.daemon.driver.DriverWrapper.withAttributionContext(DriverWrapper.scala:81)
    at com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:96)
    at com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:77)
    at com.databricks.backend.daemon.driver.DriverWrapper.withAttributionTags(DriverWrapper.scala:81)
    at com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:611)
    at com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:519)
    at com.databricks.backend.daemon.driver.DriverWrapper.recordOperationWithResultTags(DriverWrapper.scala:81)
    at com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:1085)
    at com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(DriverWrapper.scala:762)
    at com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:855)
    at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$runInnerLoop$1(DriverWrapper.scala:618)
    at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
    at com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:49)
    at com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:293)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:59)
    at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:289)
    at com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:47)
    at com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:44)
    at com.databricks.backend.daemon.driver.DriverWrapper.withAttributionContext(DriverWrapper.scala:81)
    at com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:613)
    at com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:536)
    at com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:371)
    at java.base/java.lang.Thread.run(Thread.java:840)
    Suppressed: org.apache.spark.util.Utils$OriginalTryStackTraceException: Full stacktrace of original doTryWithCallerStacktrace caller
        at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.tableNotFound(package.scala:94)
        at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis0$2(CheckAnalysis.scala:345)
        at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis0$2$adapted(CheckAnalysis.scala:307)
        at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:303)
        at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:302)
        at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:302)
        at scala.collection.immutable.Vector.foreach(Vector.scala:2125)
        at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:302)
        at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis0(CheckAnalysis.scala:307)
        at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis0$(CheckAnalysis.scala:278)
        at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis0(Analyzer.scala:415)
        at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:263)
        at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
        at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)
        at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:250)
        at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:250)
        at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:415)
        at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.$anonfun$resolveInFixedPoint$1(HybridAnalyzer.scala:254)
        at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
        at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:245)
        at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.resolveInFixedPoint(HybridAnalyzer.scala:254)
        at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.$anonfun$apply$1(HybridAnalyzer.scala:96)
        at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.withTrackedAnalyzerBridgeState(HybridAnalyzer.scala:131)
        at org.apache.spark.sql.catalyst.analysis.resolver.HybridAnalyzer.apply(HybridAnalyzer.scala:87)
        at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:468)
        at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:425)
        at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:468)
        at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyAnalyzed$3(QueryExecution.scala:300)
        at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)
        at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:613)
        at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$5(QueryExecution.scala:690)
        at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withExecutionPhase$1(SQLExecution.scala:153)
        at com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:293)
        at scala.util.DynamicVariable.withValue(DynamicVariable.scala:59)
        at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:289)
        at com.databricks.util.TracingSpanUtils$.$anonfun$withTracing$4(TracingSpanUtils.scala:235)
        at com.databricks.util.TracingSpanUtils$.withTracing(TracingSpanUtils.scala:129)
        at com.databricks.util.TracingSpanUtils$.withTracing(TracingSpanUtils.scala:233)
        at com.databricks.tracing.TracingUtils$.withTracing(TracingUtils.scala:296)
        at com.databricks.spark.util.DatabricksTracingHelper.withSpan(DatabricksSparkTracingHelper.scala:61)
        at com.databricks.spark.util.DBRTracing$.withSpan(DBRTracing.scala:47)
        at org.apache.spark.sql.execution.SQLExecution$.withExecutionPhase(SQLExecution.scala:134)
        at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$4(QueryExecution.scala:690)
        at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:1332)
        at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:683)
        at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)
        at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:680)
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:860)
        at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:680)
        at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyAnalyzed$2(QueryExecution.scala:294)
        at com.databricks.sql.util.MemoryTrackerHelper.withMemoryTracking(MemoryTrackerHelper.scala:111)
        at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyAnalyzed$1(QueryExecution.scala:293)
        at scala.util.Try$.apply(Try.scala:217)
        at org.apache.spark.util.Utils$.doTryWithCallerStacktrace(Utils.scala:1687)
        at org.apache.spark.util.LazyTry.tryT$lzycompute(LazyTry.scala:46)
        at org.apache.spark.util.LazyTry.tryT(LazyTry.scala:46)
        ... 72 more