== Physical Plan == Execute CreateViewCommand (1) +- CreateViewCommand (2) +- SerializeFromObject (16) +- MapElements (15) +- DeserializeToObject (14) +- Project (13) +- Generate (12) +- SubqueryAlias (11) +- SerializeFromObject (10) +- MapElements (9) +- DeserializeToObject (8) +- Project (7) +- Project (6) +- Project (5) +- Project (4) +- LogicalRelation (3) (1) Execute CreateViewCommand Output: [] (2) CreateViewCommand Arguments: `f21d3aa24af5447c9d1f520f27739a15_1744070407468`, false, true, LocalTempView (3) LogicalRelation Arguments: JDBCRelation(( SELECT DISTINCT t1.F0000080, case when t4.ye<>'' THEN t4.ye ELSE DATE_FORMAT(NOW(),'%Y') end ye , '01-02-03-04-05-06-07-08-09-10-11-12' dataTime from (SELECT F0000080 from `立项审批` WHERE `Status`='1' ) t1 LEFT JOIN `井位信息` t2 on t2.ObjectId=t1.F0000080 and t2.`Status`='1' LEFT JOIN `井位运行情况表` t3 on t2.ObjectId=t3.F0000026 and t3.`Status`='1' LEFT JOIN( SELECT DISTINCT F0000031, CASE WHEN date_format(F0000030, '%d')>25 and date_format(F0000030, '%c')=12 THEN date_format(F0000030, '%Y')+1 ELSE date_format(F0000030, '%Y') END ye FROM `日报填报` where Status='1' )t4 on t4.F0000031=t3.ObjectId ) as T) [numPartitions=1], [F0000080#9677, ye#9678, dataTime#9679], false (4) Project Arguments: [F0000080#9677, ye#9678, dataTime#9679] (5) Project Arguments: [F0000080#9677 AS F0000080#9686, ye#9678, dataTime#9679] (6) Project Arguments: [F0000080#9686, ye#9678 AS ye#9690, dataTime#9679] (7) Project Arguments: [F0000080#9686, ye#9690, dataTime#9679 AS dataTime#9694] (8) DeserializeToObject Arguments: createexternalrow(F0000080#9686.toString, ye#9690.toString, dataTime#9694.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true)), obj#9795: org.apache.spark.sql.Row (9) MapElements Arguments: smartbix.datamining.engine.execute.node.preprocess.DivideNode$2@251f04bb, interface org.apache.spark.sql.Row, [StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true)], obj#9796: org.apache.spark.sql.Row (10) SerializeFromObject Arguments: [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#9797, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#9798, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#9799, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, dataTime_0), StringType), true, false) AS dataTime_0#9800, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, dataTime_1), StringType), true, false) AS dataTime_1#9801, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 5, dataTime_2), StringType), true, false) AS dataTime_2#9802, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 6, dataTime_3), StringType), true, false) AS dataTime_3#9803, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 7, dataTime_4), StringType), true, false) AS dataTime_4#9804, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 8, dataTime_5), StringType), true, false) AS dataTime_5#9805, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 9, dataTime_6), StringType), true, false) AS dataTime_6#9806, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 10, dataTime_7), StringType), true, false) AS dataTime_7#9807, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 11, dataTime_8), StringType), true, false) AS dataTime_8#9808, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 12, dataTime_9), StringType), true, false) AS dataTime_9#9809, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 13, dataTime_10), StringType), true, false) AS dataTime_10#9810, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 14, dataTime_11), StringType), true, false) AS dataTime_11#9811] (11) SubqueryAlias Arguments: 47275fb8f72f40f4893bc6e3d48615f4_1744070404766 (12) Generate Arguments: stack(12, dataTime_0, dataTime_0#9800, dataTime_1, dataTime_1#9801, dataTime_2, dataTime_2#9802, dataTime_3, dataTime_3#9803, dataTime_4, dataTime_4#9804, dataTime_5, dataTime_5#9805, dataTime_6, dataTime_6#9806, dataTime_7, dataTime_7#9807, dataTime_8, dataTime_8#9808, dataTime_9, dataTime_9#9809, dataTime_10, dataTime_10#9810, dataTime_11, dataTime_11#9811), false, [monname#9848, mon#9849] (13) Project Arguments: [F0000080#9797, ye#9798, dataTime#9799, monname#9848, mon#9849] (14) DeserializeToObject Arguments: createexternalrow(F0000080#9797.toString, ye#9798.toString, dataTime#9799.toString, monname#9848.toString, mon#9849.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true), StructField(monname,StringType,true), StructField(mon,StringType,true)), obj#9876: org.apache.spark.sql.Row (15) MapElements Arguments: smartbix.datamining.engine.execute.node.preprocess.ValueReplaceNode$2@40f3bba4, interface org.apache.spark.sql.Row, [StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true), StructField(monname,StringType,true), StructField(mon,StringType,true)], obj#9877: org.apache.spark.sql.Row (16) SerializeFromObject Arguments: [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#9878, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#9879, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#9880, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, monname), StringType), true, false) AS monname#9881, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, mon), StringType), true, false) AS mon#9882]