{
"message": "Spark code generated successfully",
"code": "from pyspark.sql import SparkSession, functions as F\n\nspark = (SparkSession.builder\n .appName(\"m2s_demo_job\")\n .config(\"spark.sql.extensions\", \"org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions\")\n .config(\"spark.sql.catalog.hive_prod\", \"org.apache.iceberg.spark.SparkCatalog\")\n .config(\"spark.sql.catalog.hive_prod.type\", \"hive\")\n .config(\"spark.sql.catalog.hive_prod.uri\", \"thrift://hms-host:9083\")\n .getOrCreate())\n\nn_84b3b8d0_ac7d_49eb_8968_915b0af63cc9 = spark.table(\"dm.txn\").alias(\"txn\")\nn_d124fa4f_39d8_457b_ae22_f80d556725ac = spark.read.format(\"parquet\").load(\"/data/raw/customers\").alias(\"customers\")\nn_1609806c_2d29_4e02_9cc8_ca6accfb97c8 = n_84b3b8d0_ac7d_49eb_8968_915b0af63cc9.filter(F.expr(\"amount > 0\"))\nn_6d9fb665_0682_4df4_8eae_39f0e0e616e3 = n_1609806c_2d29_4e02_9cc8_ca6accfb97c8.groupBy(F.col(\"customer_id\")).agg()\nn_bfa4cdf8_6f44_47cc_9ad6_b200f54e4869 = n_6d9fb665_0682_4df4_8eae_39f0e0e616e3.alias(\"l\").join(n_d124fa4f_39d8_457b_ae22_f80d556725ac.alias(\"r\"), F.expr(\"l.customer_id = r.id\"), \"left\")\nn_930a1ae6_e28d_474c_b384_0a1dc11c6655 = n_bfa4cdf8_6f44_47cc_9ad6_b200f54e4869\nn_930a1ae6_e28d_474c_b384_0a1dc11c6655.createOrReplaceTempView(\"stg\")\nspark.sql(\"INSERT INTO hive_prod.db.output SELECT * FROM stg\")\n\nprint(\"Job completed.\")\n"
}