== Physical Plan == Execute CreateViewCommand (1) +- CreateViewCommand (2) +- SerializeFromObject (16) +- MapElements (15) +- DeserializeToObject (14) +- Project (13) +- Generate (12) +- SubqueryAlias (11) +- SerializeFromObject (10) +- MapElements (9) +- DeserializeToObject (8) +- Project (7) +- Project (6) +- Project (5) +- Project (4) +- LogicalRelation (3) (1) Execute CreateViewCommand Output: [] (2) CreateViewCommand Arguments: `1e7e1d7642034bada48b4690a5974596_1744848007254`, false, true, LocalTempView (3) LogicalRelation Arguments: JDBCRelation(( SELECT DISTINCT t1.F0000080, case when t4.ye<>'' THEN t4.ye ELSE DATE_FORMAT(NOW(),'%Y') end ye , '01-02-03-04-05-06-07-08-09-10-11-12' dataTime from (SELECT F0000080 from `立项审批` WHERE `Status`='1' ) t1 LEFT JOIN `井位信息` t2 on t2.ObjectId=t1.F0000080 and t2.`Status`='1' LEFT JOIN `井位运行情况表` t3 on t2.ObjectId=t3.F0000026 and t3.`Status`='1' LEFT JOIN( SELECT DISTINCT F0000031, CASE WHEN date_format(F0000030, '%d')>25 and date_format(F0000030, '%c')=12 THEN date_format(F0000030, '%Y')+1 ELSE date_format(F0000030, '%Y') END ye FROM `日报填报` where Status='1' )t4 on t4.F0000031=t3.ObjectId ) as T) [numPartitions=1], [F0000080#24077, ye#24078, dataTime#24079], false (4) Project Arguments: [F0000080#24077, ye#24078, dataTime#24079] (5) Project Arguments: [F0000080#24077 AS F0000080#24086, ye#24078, dataTime#24079] (6) Project Arguments: [F0000080#24086, ye#24078 AS ye#24090, dataTime#24079] (7) Project Arguments: [F0000080#24086, ye#24090, dataTime#24079 AS dataTime#24094] (8) DeserializeToObject Arguments: createexternalrow(F0000080#24086.toString, ye#24090.toString, dataTime#24094.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true)), obj#24195: org.apache.spark.sql.Row (9) MapElements Arguments: smartbix.datamining.engine.execute.node.preprocess.DivideNode$2@78aed76f, interface org.apache.spark.sql.Row, [StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true)], obj#24196: org.apache.spark.sql.Row (10) SerializeFromObject Arguments: [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#24197, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#24198, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#24199, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, dataTime_0), StringType), true, false) AS dataTime_0#24200, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, dataTime_1), StringType), true, false) AS dataTime_1#24201, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 5, dataTime_2), StringType), true, false) AS dataTime_2#24202, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 6, dataTime_3), StringType), true, false) AS dataTime_3#24203, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 7, dataTime_4), StringType), true, false) AS dataTime_4#24204, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 8, dataTime_5), StringType), true, false) AS dataTime_5#24205, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 9, dataTime_6), StringType), true, false) AS dataTime_6#24206, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 10, dataTime_7), StringType), true, false) AS dataTime_7#24207, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 11, dataTime_8), StringType), true, false) AS dataTime_8#24208, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 12, dataTime_9), StringType), true, false) AS dataTime_9#24209, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 13, dataTime_10), StringType), true, false) AS dataTime_10#24210, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 14, dataTime_11), StringType), true, false) AS dataTime_11#24211] (11) SubqueryAlias Arguments: da620ba17ce14519a543f976c6b5e544_1744848004583 (12) Generate Arguments: stack(12, dataTime_0, dataTime_0#24200, dataTime_1, dataTime_1#24201, dataTime_2, dataTime_2#24202, dataTime_3, dataTime_3#24203, dataTime_4, dataTime_4#24204, dataTime_5, dataTime_5#24205, dataTime_6, dataTime_6#24206, dataTime_7, dataTime_7#24207, dataTime_8, dataTime_8#24208, dataTime_9, dataTime_9#24209, dataTime_10, dataTime_10#24210, dataTime_11, dataTime_11#24211), false, [monname#24248, mon#24249] (13) Project Arguments: [F0000080#24197, ye#24198, dataTime#24199, monname#24248, mon#24249] (14) DeserializeToObject Arguments: createexternalrow(F0000080#24197.toString, ye#24198.toString, dataTime#24199.toString, monname#24248.toString, mon#24249.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true), StructField(monname,StringType,true), StructField(mon,StringType,true)), obj#24276: org.apache.spark.sql.Row (15) MapElements Arguments: smartbix.datamining.engine.execute.node.preprocess.ValueReplaceNode$2@67c32daf, interface org.apache.spark.sql.Row, [StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true), StructField(monname,StringType,true), StructField(mon,StringType,true)], obj#24277: org.apache.spark.sql.Row (16) SerializeFromObject Arguments: [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#24278, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#24279, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#24280, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, monname), StringType), true, false) AS monname#24281, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, mon), StringType), true, false) AS mon#24282]