digraph G {
subgraph cluster0 {
isCluster="true";
label="WholeStageCodegen (2)\n \nduration: 0 ms";
1 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build: 0 ms<br>number of output rows: 1"];
}
2 [labelType="html" label="<b>Exchange</b><br><br>shuffle records written: 1<br>shuffle write time: 0 ms<br>records read: 1<br>local bytes read: 59.0 B<br>fetch wait time: 0 ms<br>remote bytes read: 0.0 B<br>local blocks read: 1<br>remote blocks read: 0<br>data size: 16.0 B<br>remote bytes read to disk: 0.0 B<br>shuffle bytes written: 59.0 B"];
subgraph cluster3 {
isCluster="true";
label="WholeStageCodegen (1)\n \nduration: 0 ms";
4 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build: 0 ms<br>number of output rows: 1"];
}
5 [labelType="html" label="<b>InMemoryTableScan</b><br><br>number of output rows: 6,840"];
subgraph cluster6 {
isCluster="true";
label="WholeStageCodegen (2)\n \nduration: 85 ms";
7 [labelType="html" label="<br><b>SerializeFromObject</b><br><br>"];
8 [labelType="html" label="<br><b>MapElements</b><br><br>"];
9 [labelType="html" label="<br><b>DeserializeToObject</b><br><br>"];
}
10 [labelType="html" label="<b>Generate</b><br><br>number of output rows: 6,840"];
subgraph cluster11 {
isCluster="true";
label="WholeStageCodegen (1)\n \nduration: 43 ms";
12 [labelType="html" label="<br><b>SerializeFromObject</b><br><br>"];
13 [labelType="html" label="<br><b>MapElements</b><br><br>"];
14 [labelType="html" label="<br><b>DeserializeToObject</b><br><br>"];
15 [labelType="html" label="<b>Scan JDBCRelation(( SELECT \r<br>\tDISTINCT t1.F0000080,\r<br>\tcase when t4.ye<>''\r<br>\tTHEN t4.ye\r<br>\tELSE DATE_FORMAT(NOW(),'%Y') end ye\r<br>\t,\r<br>\t'01-02-03-04-05-06-07-08-09-10-11-12' dataTime\r<br>\tfrom (SELECT F0000080 from `\u7ACB\u9879\u5BA1\u6279` WHERE `Status`='1' ) t1\r<br>\tLEFT JOIN `\u4E95\u4F4D\u4FE1\u606F` t2 on t2.ObjectId=t1.F0000080 and t2.`Status`='1'\r<br>\tLEFT JOIN `\u4E95\u4F4D\u8FD0\u884C\u60C5\u51B5\u8868` t3 on t2.ObjectId=t3.F0000026 and t3.`Status`='1'\r<br>\tLEFT JOIN(\t\r<br>\tSELECT DISTINCT F0000031,\r<br>\tCASE WHEN date_format(F0000030, '%d')>25 and date_format(F0000030, '%c')=12\r<br>\t\tTHEN date_format(F0000030, '%Y')+1\r<br>\t\tELSE date_format(F0000030, '%Y')\r<br>\t\tEND ye\r<br>\t FROM \t`\u65E5\u62A5\u586B\u62A5` \r<br>\t where Status='1' \r<br>\t )t4 on t4.F0000031=t3.ObjectId <br> ) as T) [numPartitions=1] </b><br><br>number of output rows: 570"];
}
2->1;
4->2;
5->4;
7->5;
8->7;
9->8;
10->9;
12->10;
13->12;
14->13;
15->14;
}
16
HashAggregate(keys=[], functions=[count(1)])
WholeStageCodegen (2)
Exchange SinglePartition, ENSURE_REQUIREMENTS, [id=#11764]
HashAggregate(keys=[], functions=[partial_count(1)])
WholeStageCodegen (1)
InMemoryTableScan
SerializeFromObject [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#38678, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#38679, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#38680, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, monname), StringType), true, false) AS monname#38681, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, mon), StringType), true, false) AS mon#38682]
MapElements smartbix.datamining.engine.execute.node.preprocess.ValueReplaceNode$2@aaa7cc2, obj#38677: org.apache.spark.sql.Row
DeserializeToObject createexternalrow(F0000080#38597.toString, ye#38598.toString, dataTime#38599.toString, monname#38648.toString, mon#38649.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true), StructField(monname,StringType,true), StructField(mon,StringType,true)), obj#38676: org.apache.spark.sql.Row
WholeStageCodegen (2)
Generate stack(12, dataTime_0, dataTime_0#38600, dataTime_1, dataTime_1#38601, dataTime_2, dataTime_2#38602, dataTime_3, dataTime_3#38603, dataTime_4, dataTime_4#38604, dataTime_5, dataTime_5#38605, dataTime_6, dataTime_6#38606, dataTime_7, dataTime_7#38607, dataTime_8, dataTime_8#38608, dataTime_9, dataTime_9#38609, dataTime_10, dataTime_10#38610, dataTime_11, dataTime_11#38611), [F0000080#38597, ye#38598, dataTime#38599], false, [monname#38648, mon#38649]
SerializeFromObject [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#38597, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#38598, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#38599, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, dataTime_0), StringType), true, false) AS dataTime_0#38600, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, dataTime_1), StringType), true, false) AS dataTime_1#38601, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 5, dataTime_2), StringType), true, false) AS dataTime_2#38602, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 6, dataTime_3), StringType), true, false) AS dataTime_3#38603, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 7, dataTime_4), StringType), true, false) AS dataTime_4#38604, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 8, dataTime_5), StringType), true, false) AS dataTime_5#38605, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 9, dataTime_6), StringType), true, false) AS dataTime_6#38606, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 10, dataTime_7), StringType), true, false) AS dataTime_7#38607, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 11, dataTime_8), StringType), true, false) AS dataTime_8#38608, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 12, dataTime_9), StringType), true, false) AS dataTime_9#38609, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 13, dataTime_10), StringType), true, false) AS dataTime_10#38610, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 14, dataTime_11), StringType), true, false) AS dataTime_11#38611]
MapElements smartbix.datamining.engine.execute.node.preprocess.DivideNode$2@76684062, obj#38596: org.apache.spark.sql.Row
DeserializeToObject createexternalrow(F0000080#38477.toString, ye#38478.toString, dataTime#38479.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true)), obj#38595: org.apache.spark.sql.Row
Scan JDBCRelation(( SELECT
DISTINCT t1.F0000080,
case when t4.ye<>''
THEN t4.ye
ELSE DATE_FORMAT(NOW(),'%Y') end ye
,
'01-02-03-04-05-06-07-08-09-10-11-12' dataTime
from (SELECT F0000080 from `立项审批` WHERE `Status`='1' ) t1
LEFT JOIN `井位信息` t2 on t2.ObjectId=t1.F0000080 and t2.`Status`='1'
LEFT JOIN `井位运行情况表` t3 on t2.ObjectId=t3.F0000026 and t3.`Status`='1'
LEFT JOIN(
SELECT DISTINCT F0000031,
CASE WHEN date_format(F0000030, '%d')>25 and date_format(F0000030, '%c')=12
THEN date_format(F0000030, '%Y')+1
ELSE date_format(F0000030, '%Y')
END ye
FROM `日报填报`
where Status='1'
)t4 on t4.F0000031=t3.ObjectId
) as T) [numPartitions=1] [F0000080#38477,ye#38478,dataTime#38479] PushedFilters: [], ReadSchema: struct<F0000080:string,ye:string,dataTime:string>
WholeStageCodegen (1)
== Physical Plan ==
* HashAggregate (4)
+- Exchange (3)
+- * HashAggregate (2)
+- InMemoryTableScan (1)
+- InMemoryRelation (2)
+- * SerializeFromObject (10)
+- * MapElements (9)
+- * DeserializeToObject (8)
+- Generate (7)
+- * SerializeFromObject (6)
+- * MapElements (5)
+- * DeserializeToObject (4)
+- * Scan JDBCRelation(( SELECT
DISTINCT t1.F0000080,
case when t4.ye<>''
THEN t4.ye
ELSE DATE_FORMAT(NOW(),'%Y') end ye
,
'01-02-03-04-05-06-07-08-09-10-11-12' dataTime
from (SELECT F0000080 from `立项审批` WHERE `Status`='1' ) t1
LEFT JOIN `井位信息` t2 on t2.ObjectId=t1.F0000080 and t2.`Status`='1'
LEFT JOIN `井位运行情况表` t3 on t2.ObjectId=t3.F0000026 and t3.`Status`='1'
LEFT JOIN(
SELECT DISTINCT F0000031,
CASE WHEN date_format(F0000030, '%d')>25 and date_format(F0000030, '%c')=12
THEN date_format(F0000030, '%Y')+1
ELSE date_format(F0000030, '%Y')
END ye
FROM `日报填报`
where Status='1'
)t4 on t4.F0000031=t3.ObjectId
) as T) [numPartitions=1] (3)
(1) InMemoryTableScan
Output: []
(2) InMemoryRelation
Arguments: [F0000080#38678, ye#38679, dataTime#38680, monname#38681, mon#38682], CachedRDDBuilder(org.apache.spark.sql.execution.columnar.DefaultCachedBatchSerializer@6520a94a,StorageLevel(disk, memory, deserialized, 1 replicas),*(2) SerializeFromObject [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#38678, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#38679, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#38680, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, monname), StringType), true, false) AS monname#38681, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, mon), StringType), true, false) AS mon#38682]
+- *(2) MapElements smartbix.datamining.engine.execute.node.preprocess.ValueReplaceNode$2@aaa7cc2, obj#38677: org.apache.spark.sql.Row
+- *(2) DeserializeToObject createexternalrow(F0000080#38597.toString, ye#38598.toString, dataTime#38599.toString, monname#38648.toString, mon#38649.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true), StructField(monname,StringType,true), StructField(mon,StringType,true)), obj#38676: org.apache.spark.sql.Row
+- Generate stack(12, dataTime_0, dataTime_0#38600, dataTime_1, dataTime_1#38601, dataTime_2, dataTime_2#38602, dataTime_3, dataTime_3#38603, dataTime_4, dataTime_4#38604, dataTime_5, dataTime_5#38605, dataTime_6, dataTime_6#38606, dataTime_7, dataTime_7#38607, dataTime_8, dataTime_8#38608, dataTime_9, dataTime_9#38609, dataTime_10, dataTime_10#38610, dataTime_11, dataTime_11#38611), [F0000080#38597, ye#38598, dataTime#38599], false, [monname#38648, mon#38649]
+- *(1) SerializeFromObject [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#38597, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#38598, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#38599, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, dataTime_0), StringType), true, false) AS dataTime_0#38600, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, dataTime_1), StringType), true, false) AS dataTime_1#38601, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 5, dataTime_2), StringType), true, false) AS dataTime_2#38602, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 6, dataTime_3), StringType), true, false) AS dataTime_3#38603, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 7, dataTime_4), StringType), true, false) AS dataTime_4#38604, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 8, dataTime_5), StringType), true, false) AS dataTime_5#38605, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 9, dataTime_6), StringType), true, false) AS dataTime_6#38606, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 10, dataTime_7), StringType), true, false) AS dataTime_7#38607, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 11, dataTime_8), StringType), true, false) AS dataTime_8#38608, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 12, dataTime_9), StringType), true, false) AS dataTime_9#38609, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 13, dataTime_10), StringType), true, false) AS dataTime_10#38610, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 14, dataTime_11), StringType), true, false) AS dataTime_11#38611]
+- *(1) MapElements smartbix.datamining.engine.execute.node.preprocess.DivideNode$2@76684062, obj#38596: org.apache.spark.sql.Row
+- *(1) DeserializeToObject createexternalrow(F0000080#38477.toString, ye#38478.toString, dataTime#38479.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true)), obj#38595: org.apache.spark.sql.Row
+- *(1) Scan JDBCRelation(( SELECT
DISTINCT t1.F0000080,
case when t4.ye<>''
THEN t4.ye
ELSE DATE_FORMAT(NOW(),'%Y') end ye
,
'01-02-03-04-05-06-07-08-09-10-11-12' dataTime
from (SELECT F0000080 from `立项审批` WHERE `Status`='1' ) t1
LEFT JOIN `井位信息` t2 on t2.ObjectId=t1.F0000080 and t2.`Status`='1'
LEFT JOIN `井位运行情况表` t3 on t2.ObjectId=t3.F0000026 and t3.`Status`='1'
LEFT JOIN(
SELECT DISTINCT F0000031,
CASE WHEN date_format(F0000030, '%d')>25 and date_format(F0000030, '%c')=12
THEN date_format(F0000030, '%Y')+1
ELSE date_format(F0000030, '%Y')
END ye
FROM `日报填报`
where Status='1'
)t4 on t4.F0000031=t3.ObjectId
) as T) [numPartitions=1] [F0000080#38477,ye#38478,dataTime#38479] PushedFilters: [], ReadSchema: struct<F0000080:string,ye:string,dataTime:string>
,None)
(3) Scan JDBCRelation(( SELECT
DISTINCT t1.F0000080,
case when t4.ye<>''
THEN t4.ye
ELSE DATE_FORMAT(NOW(),'%Y') end ye
,
'01-02-03-04-05-06-07-08-09-10-11-12' dataTime
from (SELECT F0000080 from `立项审批` WHERE `Status`='1' ) t1
LEFT JOIN `井位信息` t2 on t2.ObjectId=t1.F0000080 and t2.`Status`='1'
LEFT JOIN `井位运行情况表` t3 on t2.ObjectId=t3.F0000026 and t3.`Status`='1'
LEFT JOIN(
SELECT DISTINCT F0000031,
CASE WHEN date_format(F0000030, '%d')>25 and date_format(F0000030, '%c')=12
THEN date_format(F0000030, '%Y')+1
ELSE date_format(F0000030, '%Y')
END ye
FROM `日报填报`
where Status='1'
)t4 on t4.F0000031=t3.ObjectId
) as T) [numPartitions=1] [codegen id : 1]
Output [3]: [F0000080#38477, ye#38478, dataTime#38479]
ReadSchema: struct<F0000080:string,ye:string,dataTime:string>
(4) DeserializeToObject [codegen id : 1]
Input [3]: [F0000080#38477, ye#38478, dataTime#38479]
Arguments: createexternalrow(F0000080#38477.toString, ye#38478.toString, dataTime#38479.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true)), obj#38595: org.apache.spark.sql.Row
(5) MapElements [codegen id : 1]
Input [1]: [obj#38595]
Arguments: smartbix.datamining.engine.execute.node.preprocess.DivideNode$2@76684062, obj#38596: org.apache.spark.sql.Row
(6) SerializeFromObject [codegen id : 1]
Input [1]: [obj#38596]
Arguments: [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#38597, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#38598, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#38599, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, dataTime_0), StringType), true, false) AS dataTime_0#38600, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, dataTime_1), StringType), true, false) AS dataTime_1#38601, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 5, dataTime_2), StringType), true, false) AS dataTime_2#38602, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 6, dataTime_3), StringType), true, false) AS dataTime_3#38603, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 7, dataTime_4), StringType), true, false) AS dataTime_4#38604, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 8, dataTime_5), StringType), true, false) AS dataTime_5#38605, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 9, dataTime_6), StringType), true, false) AS dataTime_6#38606, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 10, dataTime_7), StringType), true, false) AS dataTime_7#38607, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 11, dataTime_8), StringType), true, false) AS dataTime_8#38608, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 12, dataTime_9), StringType), true, false) AS dataTime_9#38609, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 13, dataTime_10), StringType), true, false) AS dataTime_10#38610, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 14, dataTime_11), StringType), true, false) AS dataTime_11#38611]
(7) Generate
Input [15]: [F0000080#38597, ye#38598, dataTime#38599, dataTime_0#38600, dataTime_1#38601, dataTime_2#38602, dataTime_3#38603, dataTime_4#38604, dataTime_5#38605, dataTime_6#38606, dataTime_7#38607, dataTime_8#38608, dataTime_9#38609, dataTime_10#38610, dataTime_11#38611]
Arguments: stack(12, dataTime_0, dataTime_0#38600, dataTime_1, dataTime_1#38601, dataTime_2, dataTime_2#38602, dataTime_3, dataTime_3#38603, dataTime_4, dataTime_4#38604, dataTime_5, dataTime_5#38605, dataTime_6, dataTime_6#38606, dataTime_7, dataTime_7#38607, dataTime_8, dataTime_8#38608, dataTime_9, dataTime_9#38609, dataTime_10, dataTime_10#38610, dataTime_11, dataTime_11#38611), [F0000080#38597, ye#38598, dataTime#38599], false, [monname#38648, mon#38649]
(8) DeserializeToObject [codegen id : 2]
Input [5]: [F0000080#38597, ye#38598, dataTime#38599, monname#38648, mon#38649]
Arguments: createexternalrow(F0000080#38597.toString, ye#38598.toString, dataTime#38599.toString, monname#38648.toString, mon#38649.toString, StructField(F0000080,StringType,true), StructField(ye,StringType,true), StructField(dataTime,StringType,true), StructField(monname,StringType,true), StructField(mon,StringType,true)), obj#38676: org.apache.spark.sql.Row
(9) MapElements [codegen id : 2]
Input [1]: [obj#38676]
Arguments: smartbix.datamining.engine.execute.node.preprocess.ValueReplaceNode$2@aaa7cc2, obj#38677: org.apache.spark.sql.Row
(10) SerializeFromObject [codegen id : 2]
Input [1]: [obj#38677]
Arguments: [if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, F0000080), StringType), true, false) AS F0000080#38678, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, ye), StringType), true, false) AS ye#38679, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 2, dataTime), StringType), true, false) AS dataTime#38680, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 3, monname), StringType), true, false) AS monname#38681, if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 4, mon), StringType), true, false) AS mon#38682]
(2) HashAggregate [codegen id : 1]
Input: []
Keys: []
Functions [1]: [partial_count(1)]
Aggregate Attributes [1]: [count#38791L]
Results [1]: [count#38792L]
(3) Exchange
Input [1]: [count#38792L]
Arguments: SinglePartition, ENSURE_REQUIREMENTS, [id=#11764]
(4) HashAggregate [codegen id : 2]
Input [1]: [count#38792L]
Keys: []
Functions [1]: [count(1)]
Aggregate Attributes [1]: [count(1)#38713L]
Results [1]: [count(1)#38713L AS count#38714L]