Created
May 28, 2020 08:33
-
-
Save asm0dey/5c0fa4c985ab999b383d16257b515100 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* 001 */ public Object generate(Object[] references) { | |
/* 002 */ return new GeneratedIteratorForCodegenStage1(references); | |
/* 003 */ } | |
/* 004 */ | |
/* 005 */ // codegenStageId=1 | |
/* 006 */ final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
/* 007 */ private Object[] references; | |
/* 008 */ private scala.collection.Iterator[] inputs; | |
/* 009 */ private scala.collection.Iterator localtablescan_input_0; | |
/* 010 */ private boolean deserializetoobject_resultIsNull_0; | |
/* 011 */ private int deserializetoobject_argValue_0; | |
/* 012 */ private boolean mapelements_resultIsNull_0; | |
/* 013 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] deserializetoobject_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[5]; | |
/* 014 */ private java.lang.Integer[] mapelements_mutableStateArray_0 = new java.lang.Integer[1]; | |
/* 015 */ | |
/* 016 */ public GeneratedIteratorForCodegenStage1(Object[] references) { | |
/* 017 */ this.references = references; | |
/* 018 */ } | |
/* 019 */ | |
/* 020 */ public void init(int index, scala.collection.Iterator[] inputs) { | |
/* 021 */ partitionIndex = index; | |
/* 022 */ this.inputs = inputs; | |
/* 023 */ localtablescan_input_0 = inputs[0]; | |
/* 024 */ | |
/* 025 */ deserializetoobject_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 026 */ deserializetoobject_mutableStateArray_0[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 027 */ deserializetoobject_mutableStateArray_0[2] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 028 */ deserializetoobject_mutableStateArray_0[3] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 029 */ deserializetoobject_mutableStateArray_0[4] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); | |
/* 030 */ | |
/* 031 */ } | |
/* 032 */ | |
/* 033 */ private void mapelements_doConsume_0(java.lang.Integer mapelements_expr_0_0, boolean mapelements_exprIsNull_0_0) throws java.io.IOException { | |
/* 034 */ boolean mapelements_isNull_1 = true; | |
/* 035 */ org.jetbrains.spark.api.Arity1 mapelements_value_1 = null; | |
/* 036 */ if (!false) { | |
/* 037 */ mapelements_resultIsNull_0 = false; | |
/* 038 */ | |
/* 039 */ if (!mapelements_resultIsNull_0) { | |
/* 040 */ mapelements_resultIsNull_0 = mapelements_exprIsNull_0_0; | |
/* 041 */ mapelements_mutableStateArray_0[0] = mapelements_expr_0_0; | |
/* 042 */ } | |
/* 043 */ | |
/* 044 */ mapelements_isNull_1 = mapelements_resultIsNull_0; | |
/* 045 */ if (!mapelements_isNull_1) { | |
/* 046 */ Object mapelements_funcResult_0 = null; | |
/* 047 */ | |
/* 048 */ try { | |
/* 049 */ mapelements_funcResult_0 = ((org.apache.spark.api.java.function.MapFunction) references[1] /* literal */).call(mapelements_mutableStateArray_0[0]); | |
/* 050 */ } catch (Exception e) { | |
/* 051 */ org.apache.spark.unsafe.Platform.throwException(e); | |
/* 052 */ } | |
/* 053 */ | |
/* 054 */ if (mapelements_funcResult_0 != null) { | |
/* 055 */ mapelements_value_1 = (org.jetbrains.spark.api.Arity1) mapelements_funcResult_0; | |
/* 056 */ } else { | |
/* 057 */ mapelements_isNull_1 = true; | |
/* 058 */ } | |
/* 059 */ | |
/* 060 */ } | |
/* 061 */ } | |
/* 062 */ | |
/* 063 */ serializefromobject_doConsume_0(mapelements_value_1, mapelements_isNull_1); | |
/* 064 */ | |
/* 065 */ } | |
/* 066 */ | |
/* 067 */ private void deserializetoobject_doConsume_0(InternalRow localtablescan_row_0, int deserializetoobject_expr_0_0, boolean deserializetoobject_exprIsNull_0_0) throws java.io.IOException { | |
/* 068 */ deserializetoobject_resultIsNull_0 = false; | |
/* 069 */ | |
/* 070 */ if (!deserializetoobject_resultIsNull_0) { | |
/* 071 */ deserializetoobject_resultIsNull_0 = deserializetoobject_exprIsNull_0_0; | |
/* 072 */ deserializetoobject_argValue_0 = deserializetoobject_expr_0_0; | |
/* 073 */ } | |
/* 074 */ | |
/* 075 */ boolean deserializetoobject_isNull_0 = deserializetoobject_resultIsNull_0; | |
/* 076 */ java.lang.Integer deserializetoobject_value_0 = null; | |
/* 077 */ if (!deserializetoobject_resultIsNull_0) { | |
/* 078 */ deserializetoobject_value_0 = java.lang.Integer.valueOf(deserializetoobject_argValue_0); | |
/* 079 */ } | |
/* 080 */ | |
/* 081 */ mapelements_doConsume_0(deserializetoobject_value_0, deserializetoobject_isNull_0); | |
/* 082 */ | |
/* 083 */ } | |
/* 084 */ | |
/* 085 */ private void serializefromobject_doConsume_0(org.jetbrains.spark.api.Arity1 serializefromobject_expr_0_0, boolean serializefromobject_exprIsNull_0_0) throws java.io.IOException { | |
/* 086 */ if (serializefromobject_exprIsNull_0_0) { | |
/* 087 */ throw new NullPointerException(((java.lang.String) references[2] /* errMsg */)); | |
/* 088 */ } | |
/* 089 */ boolean serializefromobject_isNull_2 = true; | |
/* 090 */ java.lang.Integer serializefromobject_value_2 = null; | |
/* 091 */ if (!false) { | |
/* 092 */ serializefromobject_isNull_2 = false; | |
/* 093 */ if (!serializefromobject_isNull_2) { | |
/* 094 */ Object serializefromobject_funcResult_0 = null; | |
/* 095 */ serializefromobject_funcResult_0 = serializefromobject_expr_0_0.getA(); | |
/* 096 */ | |
/* 097 */ if (serializefromobject_funcResult_0 != null) { | |
/* 098 */ serializefromobject_value_2 = (java.lang.Integer) serializefromobject_funcResult_0; | |
/* 099 */ } else { | |
/* 100 */ serializefromobject_isNull_2 = true; | |
/* 101 */ } | |
/* 102 */ | |
/* 103 */ } | |
/* 104 */ } | |
/* 105 */ boolean serializefromobject_isNull_1 = true; | |
/* 106 */ int serializefromobject_value_1 = -1; | |
/* 107 */ if (!serializefromobject_isNull_2) { | |
/* 108 */ serializefromobject_isNull_1 = false; | |
/* 109 */ if (!serializefromobject_isNull_1) { | |
/* 110 */ serializefromobject_value_1 = serializefromobject_value_2.intValue(); | |
/* 111 */ } | |
/* 112 */ } | |
/* 113 */ deserializetoobject_mutableStateArray_0[4].reset(); | |
/* 114 */ | |
/* 115 */ deserializetoobject_mutableStateArray_0[4].zeroOutNullBytes(); | |
/* 116 */ | |
/* 117 */ if (serializefromobject_isNull_1) { | |
/* 118 */ deserializetoobject_mutableStateArray_0[4].setNullAt(0); | |
/* 119 */ } else { | |
/* 120 */ deserializetoobject_mutableStateArray_0[4].write(0, serializefromobject_value_1); | |
/* 121 */ } | |
/* 122 */ append((deserializetoobject_mutableStateArray_0[4].getRow())); | |
/* 123 */ | |
/* 124 */ } | |
/* 125 */ | |
/* 126 */ protected void processNext() throws java.io.IOException { | |
/* 127 */ while ( localtablescan_input_0.hasNext()) { | |
/* 128 */ InternalRow localtablescan_row_0 = (InternalRow) localtablescan_input_0.next(); | |
/* 129 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(1); | |
/* 130 */ boolean localtablescan_isNull_0 = localtablescan_row_0.isNullAt(0); | |
/* 131 */ int localtablescan_value_0 = localtablescan_isNull_0 ? | |
/* 132 */ -1 : (localtablescan_row_0.getInt(0)); | |
/* 133 */ | |
/* 134 */ deserializetoobject_doConsume_0(localtablescan_row_0, localtablescan_value_0, localtablescan_isNull_0); | |
/* 135 */ if (shouldStop()) return; | |
/* 136 */ } | |
/* 137 */ } | |
/* 138 */ | |
/* 139 */ } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import org.apache.spark.sql.catalyst.InternalRow; | |
/** @noinspection rawtypes, unused */ | |
final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
private final Object[] references; | |
private scala.collection.Iterator localtablescan_input_0; | |
private final org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] deserializetoobject_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[5]; | |
private final java.lang.Integer[] mapelements_mutableStateArray_0 = new java.lang.Integer[1]; | |
public GeneratedIteratorForCodegenStage1(Object[] references) { | |
this.references = references; | |
} | |
public void init(int index, scala.collection.Iterator[] inputs) { | |
partitionIndex = index; | |
localtablescan_input_0 = inputs[0]; | |
deserializetoobject_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
deserializetoobject_mutableStateArray_0[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
deserializetoobject_mutableStateArray_0[2] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
deserializetoobject_mutableStateArray_0[3] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
deserializetoobject_mutableStateArray_0[4] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); | |
} | |
/** @noinspection unchecked*/ | |
private void mapelements_doConsume_0(java.lang.Integer mapelements_expr_0_0, boolean mapelements_exprIsNull_0_0) { | |
boolean mapelements_isNull_1; | |
org.jetbrains.spark.api.Arity1 mapelements_value_1 = null; | |
boolean mapelements_resultIsNull_0; | |
mapelements_resultIsNull_0 = mapelements_exprIsNull_0_0; | |
mapelements_mutableStateArray_0[0] = mapelements_expr_0_0; | |
mapelements_isNull_1 = mapelements_resultIsNull_0; | |
if (!mapelements_isNull_1) { | |
Object mapelements_funcResult_0 = null; | |
try { | |
mapelements_funcResult_0 = ((org.apache.spark.api.java.function.MapFunction) references[1] /* literal */).call(mapelements_mutableStateArray_0[0]); | |
} catch (Exception e) { | |
org.apache.spark.unsafe.Platform.throwException(e); | |
} | |
if (mapelements_funcResult_0 != null) { | |
mapelements_value_1 = (org.jetbrains.spark.api.Arity1) mapelements_funcResult_0; | |
} else { | |
mapelements_isNull_1 = true; | |
} | |
} | |
serializefromobject_doConsume_0(mapelements_value_1, mapelements_isNull_1); | |
} | |
private void deserializetoobject_doConsume_0(InternalRow localtablescan_row_0, int deserializetoobject_expr_0_0, boolean deserializetoobject_exprIsNull_0_0) { | |
boolean deserializetoobject_resultIsNull_0; | |
deserializetoobject_resultIsNull_0 = deserializetoobject_exprIsNull_0_0; | |
boolean deserializetoobject_isNull_0 = deserializetoobject_resultIsNull_0; | |
java.lang.Integer deserializetoobject_value_0 = null; | |
if (!deserializetoobject_resultIsNull_0) { | |
deserializetoobject_value_0 = deserializetoobject_expr_0_0; | |
} | |
mapelements_doConsume_0(deserializetoobject_value_0, deserializetoobject_isNull_0); | |
} | |
private void serializefromobject_doConsume_0(org.jetbrains.spark.api.Arity1 serializefromobject_expr_0_0, boolean serializefromobject_exprIsNull_0_0) { | |
if (serializefromobject_exprIsNull_0_0) { | |
throw new NullPointerException(((java.lang.String) references[2] /* errMsg */)); | |
} | |
boolean serializefromobject_isNull_2; | |
java.lang.Integer serializefromobject_value_2 = null; | |
serializefromobject_isNull_2 = false; | |
Object serializefromobject_funcResult_0; | |
serializefromobject_funcResult_0 = serializefromobject_expr_0_0.getA(); | |
if (serializefromobject_funcResult_0 != null) { | |
serializefromobject_value_2 = (Integer) serializefromobject_funcResult_0; | |
} else { | |
serializefromobject_isNull_2 = true; | |
} | |
boolean serializefromobject_isNull_1 = true; | |
int serializefromobject_value_1 = -1; | |
if (!serializefromobject_isNull_2) { | |
serializefromobject_isNull_1 = false; | |
serializefromobject_value_1 = serializefromobject_value_2; | |
} | |
deserializetoobject_mutableStateArray_0[4].reset(); | |
deserializetoobject_mutableStateArray_0[4].zeroOutNullBytes(); | |
if (serializefromobject_isNull_1) { | |
deserializetoobject_mutableStateArray_0[4].setNullAt(0); | |
} else { | |
deserializetoobject_mutableStateArray_0[4].write(0, serializefromobject_value_1); | |
} | |
append((deserializetoobject_mutableStateArray_0[4].getRow())); | |
} | |
protected void processNext() { | |
while (localtablescan_input_0.hasNext()) { | |
InternalRow localtablescan_row_0 = (InternalRow) localtablescan_input_0.next(); | |
((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(1); | |
boolean localtablescan_isNull_0 = localtablescan_row_0.isNullAt(0); | |
int localtablescan_value_0 = localtablescan_isNull_0 ? | |
-1 : (localtablescan_row_0.getInt(0)); | |
deserializetoobject_doConsume_0(localtablescan_row_0, localtablescan_value_0, localtablescan_isNull_0); | |
if (shouldStop()) return; | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment