== Physical Plan == Execute CreateViewCommand (1) +- CreateViewCommand (2) +- SerializeFromObject (10) +- MapElements (9) +- DeserializeToObject (8) +- Project (7) +- Project (6) +- Project (5) +- Project (4) +- LogicalRelation (3) (1) Execute CreateViewCommand Output: [] (2) CreateViewCommand Arguments: `b33102c2f3334fb3bfd52664bcd90319_1745020804598`, false, true, LocalTempView (3) LogicalRelation Arguments: JDBCRelation(( SELECT DISTINCT t1.F0000080, case when t4.ye<>'' THEN t4.ye ELSE DATE_FORMAT(NOW(),'%Y') end ye , '01-02-03-04-05-06-07-08-09-10-11-12' dataTime from (SELECT F0000080 from `立项审批` WHERE `Status`='1' ) t1 LEFT JOIN `井位信息` t2 on t2.ObjectId=t1.F0000080 and t2.`Status`='1' LEFT JOIN `井位运行情况表` t3 on t2.ObjectId=t3.F0000026 and t3.`Status`='1' LEFT JOIN( SELECT DISTINCT F0000031, CASE WHEN date_format(F0000030, '%d')>25 and date_format(F0000030, '%c')=12 THEN date_format(F0000030, '%Y')+1 ELSE date_format(F0000030, '%Y') END ye FROM `日报填报` where Status='1' )t4 on t4.F0000031=t3.ObjectId ) as T) [numPartitions=1], [F0000080#27277, ye#27278, dataTime#27279], false (4) Project Arguments: [F0000080#27277, ye#27278, dataTime#27279] (5) Project Arguments: [F0000080#27277 AS F0000080#27286, ye#27278, dataTime#27279] (6) Project Arguments: [F0000080#27286, ye#27278 AS ye#27290, dataTime#27279] (7) Project Arguments: [F0000080#27286, ye#27290, dataTime#27279 AS dataTime#27294] (8) DeserializeToObject Arguments: createexternalrow(F0000080#27286.toString, ye#27290.toString, dataTime#27294.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true)), obj#27395: org.apache.spark.sql.Row (9) MapElements Arguments: smartbix.datamining.engine.execute.node.preprocess.DivideNode$2@52af77bb, interface org.apache.spark.sql.Row, [StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true)], obj#27396: org.apache.spark.sql.Row (10) SerializeFromObject Arguments: [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#27397, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#27398, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#27399, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, dataTime_0), StringType), true, false) AS dataTime_0#27400, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, dataTime_1), StringType), true, false) AS dataTime_1#27401, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 5, dataTime_2), StringType), true, false) AS dataTime_2#27402, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 6, dataTime_3), StringType), true, false) AS dataTime_3#27403, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 7, dataTime_4), StringType), true, false) AS dataTime_4#27404, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 8, dataTime_5), StringType), true, false) AS dataTime_5#27405, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 9, dataTime_6), StringType), true, false) AS dataTime_6#27406, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 10, dataTime_7), StringType), true, false) AS dataTime_7#27407, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 11, dataTime_8), StringType), true, false) AS dataTime_8#27408, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 12, dataTime_9), StringType), true, false) AS dataTime_9#27409, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 13, dataTime_10), StringType), true, false) AS dataTime_10#27410, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 14, dataTime_11), StringType), true, false) AS dataTime_11#27411]