Created
March 19, 2020 14:18
-
-
Save asm0dey/14d5b8aa289403251a7110e5522cff38 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
* Returns an expression that can be used to deserialize a Spark SQL representation to an object | |
* of java bean `T` with a compatible schema. The Spark SQL representation is located at ordinal | |
* 0 of a row, i.e., `GetColumnByOrdinal(0, _)`. Nested classes will have their fields accessed | |
* using `UnresolvedExtractValue`. | |
*/ | |
def deserializerForDataType(beanClass: Class[_], dt: KDataTypeWrapper): Expression = { | |
val typeToken = TypeToken.of(beanClass) | |
val walkedTypePath = new WalkedTypePath().recordRoot(beanClass.getCanonicalName) | |
val (dataType, nullable) = inferDataType(typeToken) | |
// Assumes we are deserializing the first column of a row. | |
deserializerForWithNullSafetyAndUpcast( | |
GetColumnByOrdinal(0, dataType), | |
dataType, | |
nullable = nullable, | |
walkedTypePath, | |
(casted, walkedTypePath) => { | |
deserializerFor(typeToken, casted, walkedTypePath, Some(dt)) | |
} | |
) | |
} | |
// snip | |
case _ if predefinedDt.isInstanceOf[Some[KDataTypeWrapper]] => | |
val wrapper = predefinedDt.asInstanceOf[Some[KDataTypeWrapper]].get | |
val structType = wrapper.dt | |
val cls = wrapper.cls | |
val args = getJavaBeanReadableProperties(cls) | |
.toSeq | |
.map { field => | |
val fieldName = field.getName | |
val fieldDT = structType.fields.find(_.name == fieldName).get.dataType.asInstanceOf[DataTypeWithClass] | |
val fieldCls = fieldDT.cls | |
val (dataType, nullable) = inferDataType(fieldCls) | |
val fieldType = TypeToken.of(fieldCls) | |
val newTypePath = walkedTypePath.recordField(fieldType.getType.getTypeName, fieldName) | |
val newPath = deserializerFor( | |
fieldType, | |
addToPath(path, fieldName, dataType, newTypePath), | |
newTypePath, | |
Some(fieldDT) | |
) | |
expressionWithNullSafety( | |
newPath, | |
nullable = nullable, | |
newTypePath | |
) | |
} | |
val newInstance = NewInstance(cls, args, wrapper.dt.asInstanceOf[StructType], false) | |
org.apache.spark.sql.catalyst.expressions.If( | |
IsNull(path), | |
org.apache.spark.sql.catalyst.expressions.Literal.create(null, wrapper.dt), | |
newInstance | |
) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def serializerForDataType(cls: java.lang.Class[_], dt: KDataTypeWrapper) = { | |
val nullSafeInput = AssertNotNull(BoundReference(0, ObjectType(cls), nullable = true), Seq("top level input bean")) | |
serializerFor(nullSafeInput, TypeToken.of(cls), Some(dt)) | |
} | |
//snip | |
case _ if predefinedDt.isInstanceOf[Some[StructType]] => | |
val dtWithClass = predefinedDt.get | |
val properties = getJavaBeanReadableProperties(dtWithClass.cls) | |
val fields = dtWithClass.dt.asInstanceOf[StructType] | |
.fields | |
.toSeq | |
.map { field => | |
val fieldName = field.name | |
val fieldDt = field.dataType.asInstanceOf[DataTypeWithClass] | |
val fieldType = fieldDt.cls | |
val fieldValue = Invoke( | |
inputObject, | |
properties.find(_.getName == fieldName).map(_.getReadMethod.getName).get, | |
inferExternalType(fieldType) | |
) | |
(fieldName, serializerFor(fieldValue, TypeToken.of(fieldType), Some(fieldDt))) | |
} | |
createSerializerForObject(inputObject, fields) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* 001 */ public Object generate(Object[] references) { | |
/* 002 */ return new GeneratedIteratorForCodegenStage1(references); | |
/* 003 */ } | |
/* 004 */ | |
/* 005 */ // codegenStageId=1 | |
/* 006 */ final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { | |
/* 007 */ private Object[] references; | |
/* 008 */ private scala.collection.Iterator[] inputs; | |
/* 009 */ private scala.collection.Iterator localtablescan_input_0; | |
/* 010 */ private int deserializetoobject_argValue_0; | |
/* 011 */ private int deserializetoobject_argValue_1; | |
/* 012 */ private boolean mapelements_resultIsNull_0; | |
/* 013 */ private boolean mapelements_resultIsNull_1; | |
/* 014 */ private boolean mapelements_resultIsNull_2; | |
/* 015 */ private InternalRow[] deserializetoobject_mutableStateArray_0 = new InternalRow[4]; | |
/* 016 */ private java.lang.String[] deserializetoobject_mutableStateArray_1 = new java.lang.String[1]; | |
/* 017 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] debug_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[36]; | |
/* 018 */ | |
/* 019 */ public GeneratedIteratorForCodegenStage1(Object[] references) { | |
/* 020 */ this.references = references; | |
/* 021 */ } | |
/* 022 */ | |
/* 023 */ public void init(int index, scala.collection.Iterator[] inputs) { | |
/* 024 */ partitionIndex = index; | |
/* 025 */ this.inputs = inputs; | |
/* 026 */ wholestagecodegen_init_0_0(); | |
/* 027 */ wholestagecodegen_init_0_1(); | |
/* 028 */ wholestagecodegen_init_0_2(); | |
/* 029 */ wholestagecodegen_init_0_3(); | |
/* 030 */ | |
/* 031 */ } | |
/* 032 */ | |
/* 033 */ private void wholestagecodegen_init_0_3() { | |
/* 034 */ debug_mutableStateArray_0[27] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[26], 2); | |
/* 035 */ debug_mutableStateArray_0[28] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 036 */ debug_mutableStateArray_0[29] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[28], 3); | |
/* 037 */ debug_mutableStateArray_0[30] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 038 */ debug_mutableStateArray_0[31] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[30], 3); | |
/* 039 */ debug_mutableStateArray_0[32] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 040 */ debug_mutableStateArray_0[33] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[32], 3); | |
/* 041 */ debug_mutableStateArray_0[34] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(3, 0); | |
/* 042 */ debug_mutableStateArray_0[35] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(3, 0); | |
/* 043 */ | |
/* 044 */ } | |
/* 045 */ | |
/* 046 */ private void mapelements_doConsume_0(InternalRow mapelements_expr_0_0, boolean mapelements_exprIsNull_0_0) throws java.io.IOException { | |
/* 047 */ boolean mapelements_isNull_1 = true; | |
/* 048 */ InternalRow mapelements_value_1 = null; | |
/* 049 */ if (!false) { | |
/* 050 */ mapelements_resultIsNull_0 = false; | |
/* 051 */ | |
/* 052 */ if (!mapelements_resultIsNull_0) { | |
/* 053 */ mapelements_resultIsNull_0 = mapelements_exprIsNull_0_0; | |
/* 054 */ deserializetoobject_mutableStateArray_0[1] = mapelements_expr_0_0; | |
/* 055 */ } | |
/* 056 */ | |
/* 057 */ mapelements_isNull_1 = mapelements_resultIsNull_0; | |
/* 058 */ if (!mapelements_isNull_1) { | |
/* 059 */ Object mapelements_funcResult_0 = null; | |
/* 060 */ | |
/* 061 */ try { | |
/* 062 */ mapelements_funcResult_0 = ((org.apache.spark.api.java.function.MapFunction) references[3] /* literal */).call(deserializetoobject_mutableStateArray_0[1]); | |
/* 063 */ } catch (Exception e) { | |
/* 064 */ org.apache.spark.unsafe.Platform.throwException(e); | |
/* 065 */ } | |
/* 066 */ | |
/* 067 */ if (mapelements_funcResult_0 != null) { | |
/* 068 */ mapelements_value_1 = (InternalRow) mapelements_funcResult_0; | |
/* 069 */ } else { | |
/* 070 */ mapelements_isNull_1 = true; | |
/* 071 */ } | |
/* 072 */ | |
/* 073 */ } | |
/* 074 */ } | |
/* 075 */ | |
/* 076 */ mapelements_doConsume_1(mapelements_value_1, mapelements_isNull_1); | |
/* 077 */ | |
/* 078 */ } | |
/* 079 */ | |
/* 080 */ private void mapelements_doConsume_2(InternalRow mapelements_expr_0_2, boolean mapelements_exprIsNull_0_2) throws java.io.IOException { | |
/* 081 */ boolean mapelements_isNull_11 = true; | |
/* 082 */ InternalRow mapelements_value_11 = null; | |
/* 083 */ if (!false) { | |
/* 084 */ mapelements_resultIsNull_2 = false; | |
/* 085 */ | |
/* 086 */ if (!mapelements_resultIsNull_2) { | |
/* 087 */ mapelements_resultIsNull_2 = mapelements_exprIsNull_0_2; | |
/* 088 */ deserializetoobject_mutableStateArray_0[3] = mapelements_expr_0_2; | |
/* 089 */ } | |
/* 090 */ | |
/* 091 */ mapelements_isNull_11 = mapelements_resultIsNull_2; | |
/* 092 */ if (!mapelements_isNull_11) { | |
/* 093 */ Object mapelements_funcResult_2 = null; | |
/* 094 */ | |
/* 095 */ try { | |
/* 096 */ mapelements_funcResult_2 = ((org.apache.spark.api.java.function.MapFunction) references[5] /* literal */).call(deserializetoobject_mutableStateArray_0[3]); | |
/* 097 */ } catch (Exception e) { | |
/* 098 */ org.apache.spark.unsafe.Platform.throwException(e); | |
/* 099 */ } | |
/* 100 */ | |
/* 101 */ if (mapelements_funcResult_2 != null) { | |
/* 102 */ mapelements_value_11 = (InternalRow) mapelements_funcResult_2; | |
/* 103 */ } else { | |
/* 104 */ mapelements_isNull_11 = true; | |
/* 105 */ } | |
/* 106 */ | |
/* 107 */ } | |
/* 108 */ } | |
/* 109 */ | |
/* 110 */ serializefromobject_doConsume_0(mapelements_value_11, mapelements_isNull_11); | |
/* 111 */ | |
/* 112 */ } | |
/* 113 */ | |
/* 114 */ private void wholestagecodegen_init_0_2() { | |
/* 115 */ debug_mutableStateArray_0[18] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[17], 2); | |
/* 116 */ debug_mutableStateArray_0[19] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 117 */ debug_mutableStateArray_0[20] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[19], 2); | |
/* 118 */ debug_mutableStateArray_0[21] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[20], 2); | |
/* 119 */ debug_mutableStateArray_0[22] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 120 */ debug_mutableStateArray_0[23] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[22], 2); | |
/* 121 */ debug_mutableStateArray_0[24] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[23], 2); | |
/* 122 */ debug_mutableStateArray_0[25] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 123 */ debug_mutableStateArray_0[26] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[25], 2); | |
/* 124 */ | |
/* 125 */ } | |
/* 126 */ | |
/* 127 */ private void wholestagecodegen_init_0_1() { | |
/* 128 */ debug_mutableStateArray_0[9] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[8], 2); | |
/* 129 */ debug_mutableStateArray_0[10] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 130 */ debug_mutableStateArray_0[11] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[10], 2); | |
/* 131 */ debug_mutableStateArray_0[12] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[11], 2); | |
/* 132 */ debug_mutableStateArray_0[13] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 133 */ debug_mutableStateArray_0[14] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[13], 2); | |
/* 134 */ debug_mutableStateArray_0[15] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 135 */ debug_mutableStateArray_0[16] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[15], 2); | |
/* 136 */ debug_mutableStateArray_0[17] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 137 */ | |
/* 138 */ } | |
/* 139 */ | |
/* 140 */ private void deserializetoobject_doConsume_0(int deserializetoobject_expr_0_0, boolean deserializetoobject_exprIsNull_0_0, InternalRow deserializetoobject_expr_1_0, boolean deserializetoobject_exprIsNull_1_0) throws java.io.IOException { | |
/* 141 */ if (deserializetoobject_exprIsNull_0_0) { | |
/* 142 */ throw new NullPointerException(((java.lang.String) references[1] /* errMsg */)); | |
/* 143 */ } | |
/* 144 */ deserializetoobject_argValue_0 = deserializetoobject_expr_0_0; | |
/* 145 */ | |
/* 146 */ boolean deserializetoobject_isNull_5 = false; | |
/* 147 */ InternalRow deserializetoobject_value_5 = null; | |
/* 148 */ if (!false && deserializetoobject_exprIsNull_1_0) { | |
/* 149 */ deserializetoobject_isNull_5 = true; | |
/* 150 */ deserializetoobject_value_5 = ((InternalRow)null); | |
/* 151 */ } else { | |
/* 152 */ boolean deserializetoobject_isNull_11 = deserializetoobject_exprIsNull_1_0; | |
/* 153 */ int deserializetoobject_value_11 = -1; | |
/* 154 */ | |
/* 155 */ if (!deserializetoobject_exprIsNull_1_0) { | |
/* 156 */ if (deserializetoobject_expr_1_0.isNullAt(0)) { | |
/* 157 */ deserializetoobject_isNull_11 = true; | |
/* 158 */ } else { | |
/* 159 */ deserializetoobject_value_11 = deserializetoobject_expr_1_0.getInt(0); | |
/* 160 */ } | |
/* 161 */ | |
/* 162 */ } | |
/* 163 */ if (deserializetoobject_isNull_11) { | |
/* 164 */ throw new NullPointerException(((java.lang.String) references[2] /* errMsg */)); | |
/* 165 */ } | |
/* 166 */ deserializetoobject_argValue_1 = deserializetoobject_value_11; | |
/* 167 */ | |
/* 168 */ boolean deserializetoobject_isNull_14 = deserializetoobject_exprIsNull_1_0; | |
/* 169 */ UTF8String deserializetoobject_value_14 = null; | |
/* 170 */ | |
/* 171 */ if (!deserializetoobject_exprIsNull_1_0) { | |
/* 172 */ if (deserializetoobject_expr_1_0.isNullAt(1)) { | |
/* 173 */ deserializetoobject_isNull_14 = true; | |
/* 174 */ } else { | |
/* 175 */ deserializetoobject_value_14 = deserializetoobject_expr_1_0.getUTF8String(1); | |
/* 176 */ } | |
/* 177 */ | |
/* 178 */ } | |
/* 179 */ boolean deserializetoobject_isNull_13 = true; | |
/* 180 */ java.lang.String deserializetoobject_value_13 = null; | |
/* 181 */ if (!deserializetoobject_isNull_14) { | |
/* 182 */ deserializetoobject_isNull_13 = false; | |
/* 183 */ if (!deserializetoobject_isNull_13) { | |
/* 184 */ Object deserializetoobject_funcResult_0 = null; | |
/* 185 */ deserializetoobject_funcResult_0 = deserializetoobject_value_14.toString(); | |
/* 186 */ | |
/* 187 */ if (deserializetoobject_funcResult_0 != null) { | |
/* 188 */ deserializetoobject_value_13 = (java.lang.String) deserializetoobject_funcResult_0; | |
/* 189 */ } else { | |
/* 190 */ deserializetoobject_isNull_13 = true; | |
/* 191 */ } | |
/* 192 */ | |
/* 193 */ } | |
/* 194 */ } | |
/* 195 */ deserializetoobject_mutableStateArray_1[0] = deserializetoobject_value_13; | |
/* 196 */ | |
/* 197 */ final InternalRow deserializetoobject_value_9 = false ? | |
/* 198 */ null : new kotlin.Pair(deserializetoobject_argValue_1, deserializetoobject_mutableStateArray_1[0]); | |
/* 199 */ deserializetoobject_isNull_5 = false; | |
/* 200 */ deserializetoobject_value_5 = deserializetoobject_value_9; | |
/* 201 */ } | |
/* 202 */ deserializetoobject_mutableStateArray_0[0] = deserializetoobject_value_5; | |
/* 203 */ | |
/* 204 */ final InternalRow deserializetoobject_value_2 = false ? | |
/* 205 */ null : new org.jetbrains.spark.api.Q(deserializetoobject_argValue_0, deserializetoobject_mutableStateArray_0[0]); | |
/* 206 */ | |
/* 207 */ mapelements_doConsume_0(deserializetoobject_value_2, false); | |
/* 208 */ | |
/* 209 */ } | |
/* 210 */ | |
/* 211 */ private void mapelements_doConsume_1(InternalRow mapelements_expr_0_1, boolean mapelements_exprIsNull_0_1) throws java.io.IOException { | |
/* 212 */ boolean mapelements_isNull_6 = true; | |
/* 213 */ InternalRow mapelements_value_6 = null; | |
/* 214 */ if (!false) { | |
/* 215 */ mapelements_resultIsNull_1 = false; | |
/* 216 */ | |
/* 217 */ if (!mapelements_resultIsNull_1) { | |
/* 218 */ mapelements_resultIsNull_1 = mapelements_exprIsNull_0_1; | |
/* 219 */ deserializetoobject_mutableStateArray_0[2] = mapelements_expr_0_1; | |
/* 220 */ } | |
/* 221 */ | |
/* 222 */ mapelements_isNull_6 = mapelements_resultIsNull_1; | |
/* 223 */ if (!mapelements_isNull_6) { | |
/* 224 */ Object mapelements_funcResult_1 = null; | |
/* 225 */ | |
/* 226 */ try { | |
/* 227 */ mapelements_funcResult_1 = ((org.apache.spark.api.java.function.MapFunction) references[4] /* literal */).call(deserializetoobject_mutableStateArray_0[2]); | |
/* 228 */ } catch (Exception e) { | |
/* 229 */ org.apache.spark.unsafe.Platform.throwException(e); | |
/* 230 */ } | |
/* 231 */ | |
/* 232 */ if (mapelements_funcResult_1 != null) { | |
/* 233 */ mapelements_value_6 = (InternalRow) mapelements_funcResult_1; | |
/* 234 */ } else { | |
/* 235 */ mapelements_isNull_6 = true; | |
/* 236 */ } | |
/* 237 */ | |
/* 238 */ } | |
/* 239 */ } | |
/* 240 */ | |
/* 241 */ mapelements_doConsume_2(mapelements_value_6, mapelements_isNull_6); | |
/* 242 */ | |
/* 243 */ } | |
/* 244 */ | |
/* 245 */ private void serializefromobject_doConsume_0(InternalRow serializefromobject_expr_0_0, boolean serializefromobject_exprIsNull_0_0) throws java.io.IOException { | |
/* 246 */ if (serializefromobject_exprIsNull_0_0) { | |
/* 247 */ throw new NullPointerException(((java.lang.String) references[6] /* errMsg */)); | |
/* 248 */ } | |
/* 249 */ boolean serializefromobject_isNull_1 = true; | |
/* 250 */ int serializefromobject_value_1 = -1; | |
/* 251 */ if (!false) { | |
/* 252 */ serializefromobject_isNull_1 = false; | |
/* 253 */ if (!serializefromobject_isNull_1) { | |
/* 254 */ Object serializefromobject_funcResult_0 = null; | |
/* 255 */ serializefromobject_funcResult_0 = serializefromobject_expr_0_0.getFirst(); | |
/* 256 */ | |
/* 257 */ if (serializefromobject_funcResult_0 != null) { | |
/* 258 */ serializefromobject_value_1 = (Integer) serializefromobject_funcResult_0; | |
/* 259 */ } else { | |
/* 260 */ serializefromobject_isNull_1 = true; | |
/* 261 */ } | |
/* 262 */ | |
/* 263 */ } | |
/* 264 */ } | |
/* 265 */ if (serializefromobject_exprIsNull_0_0) { | |
/* 266 */ throw new NullPointerException(((java.lang.String) references[7] /* errMsg */)); | |
/* 267 */ } | |
/* 268 */ boolean serializefromobject_isNull_4 = true; | |
/* 269 */ int serializefromobject_value_4 = -1; | |
/* 270 */ if (!false) { | |
/* 271 */ serializefromobject_isNull_4 = false; | |
/* 272 */ if (!serializefromobject_isNull_4) { | |
/* 273 */ Object serializefromobject_funcResult_1 = null; | |
/* 274 */ serializefromobject_funcResult_1 = serializefromobject_expr_0_0.getSecond(); | |
/* 275 */ | |
/* 276 */ if (serializefromobject_funcResult_1 != null) { | |
/* 277 */ serializefromobject_value_4 = (Integer) serializefromobject_funcResult_1; | |
/* 278 */ } else { | |
/* 279 */ serializefromobject_isNull_4 = true; | |
/* 280 */ } | |
/* 281 */ | |
/* 282 */ } | |
/* 283 */ } | |
/* 284 */ if (serializefromobject_exprIsNull_0_0) { | |
/* 285 */ throw new NullPointerException(((java.lang.String) references[8] /* errMsg */)); | |
/* 286 */ } | |
/* 287 */ boolean serializefromobject_isNull_7 = true; | |
/* 288 */ int serializefromobject_value_7 = -1; | |
/* 289 */ if (!false) { | |
/* 290 */ serializefromobject_isNull_7 = false; | |
/* 291 */ if (!serializefromobject_isNull_7) { | |
/* 292 */ Object serializefromobject_funcResult_2 = null; | |
/* 293 */ serializefromobject_funcResult_2 = serializefromobject_expr_0_0.getThird(); | |
/* 294 */ | |
/* 295 */ if (serializefromobject_funcResult_2 != null) { | |
/* 296 */ serializefromobject_value_7 = (Integer) serializefromobject_funcResult_2; | |
/* 297 */ } else { | |
/* 298 */ serializefromobject_isNull_7 = true; | |
/* 299 */ } | |
/* 300 */ | |
/* 301 */ } | |
/* 302 */ } | |
/* 303 */ debug_mutableStateArray_0[35].reset(); | |
/* 304 */ | |
/* 305 */ debug_mutableStateArray_0[35].zeroOutNullBytes(); | |
/* 306 */ | |
/* 307 */ if (serializefromobject_isNull_1) { | |
/* 308 */ debug_mutableStateArray_0[35].setNullAt(0); | |
/* 309 */ } else { | |
/* 310 */ debug_mutableStateArray_0[35].write(0, serializefromobject_value_1); | |
/* 311 */ } | |
/* 312 */ | |
/* 313 */ if (serializefromobject_isNull_4) { | |
/* 314 */ debug_mutableStateArray_0[35].setNullAt(1); | |
/* 315 */ } else { | |
/* 316 */ debug_mutableStateArray_0[35].write(1, serializefromobject_value_4); | |
/* 317 */ } | |
/* 318 */ | |
/* 319 */ if (serializefromobject_isNull_7) { | |
/* 320 */ debug_mutableStateArray_0[35].setNullAt(2); | |
/* 321 */ } else { | |
/* 322 */ debug_mutableStateArray_0[35].write(2, serializefromobject_value_7); | |
/* 323 */ } | |
/* 324 */ append((debug_mutableStateArray_0[35].getRow())); | |
/* 325 */ | |
/* 326 */ } | |
/* 327 */ | |
/* 328 */ protected void processNext() throws java.io.IOException { | |
/* 329 */ while ( localtablescan_input_0.hasNext()) { | |
/* 330 */ InternalRow localtablescan_row_0 = (InternalRow) localtablescan_input_0.next(); | |
/* 331 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(1); | |
/* 332 */ boolean localtablescan_isNull_0 = localtablescan_row_0.isNullAt(0); | |
/* 333 */ int localtablescan_value_0 = localtablescan_isNull_0 ? | |
/* 334 */ -1 : (localtablescan_row_0.getInt(0)); | |
/* 335 */ boolean localtablescan_isNull_1 = localtablescan_row_0.isNullAt(1); | |
/* 336 */ InternalRow localtablescan_value_1 = localtablescan_isNull_1 ? | |
/* 337 */ null : (localtablescan_row_0.getStruct(1, 2)); | |
/* 338 */ | |
/* 339 */ deserializetoobject_doConsume_0(localtablescan_value_0, localtablescan_isNull_0, localtablescan_value_1, localtablescan_isNull_1); | |
/* 340 */ if (shouldStop()) return; | |
/* 341 */ } | |
/* 342 */ } | |
/* 343 */ | |
/* 344 */ private void wholestagecodegen_init_0_0() { | |
/* 345 */ localtablescan_input_0 = inputs[0]; | |
/* 346 */ debug_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
/* 347 */ debug_mutableStateArray_0[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[0], 2); | |
/* 348 */ debug_mutableStateArray_0[2] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); | |
/* 349 */ debug_mutableStateArray_0[3] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[2], 2); | |
/* 350 */ | |
/* 351 */ debug_mutableStateArray_0[4] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 352 */ debug_mutableStateArray_0[5] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[4], 2); | |
/* 353 */ debug_mutableStateArray_0[6] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[5], 2); | |
/* 354 */ debug_mutableStateArray_0[7] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); | |
/* 355 */ debug_mutableStateArray_0[8] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(debug_mutableStateArray_0[7], 2); | |
/* 356 */ | |
/* 357 */ } | |
/* 358 */ | |
/* 359 */ } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
20/03/19 16:31:18 ERROR CodeGenerator: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 195, Column 74: Assignment conversion not possible from type "kotlin.Pair" to type "org.apache.spark.sql.catalyst.InternalRow" | |
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 195, Column 74: Assignment conversion not possible from type "kotlin.Pair" to type "org.apache.spark.sql.catalyst.InternalRow" | |
at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:12021) | |
at org.codehaus.janino.UnitCompiler.assignmentConversion(UnitCompiler.java:10851) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2581) | |
at org.codehaus.janino.UnitCompiler.access$2700(UnitCompiler.java:226) | |
at org.codehaus.janino.UnitCompiler$6.visitLocalVariableDeclarationStatement(UnitCompiler.java:1506) | |
at org.codehaus.janino.UnitCompiler$6.visitLocalVariableDeclarationStatement(UnitCompiler.java:1490) | |
at org.codehaus.janino.Java$LocalVariableDeclarationStatement.accept(Java.java:3712) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1490) | |
at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1573) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1559) | |
at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:226) | |
at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1496) | |
at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1490) | |
at org.codehaus.janino.Java$Block.accept(Java.java:2969) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1490) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2478) | |
at org.codehaus.janino.UnitCompiler.access$1900(UnitCompiler.java:226) | |
at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1498) | |
at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1490) | |
at org.codehaus.janino.Java$IfStatement.accept(Java.java:3140) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1490) | |
at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1573) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3420) | |
at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1362) | |
at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1335) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:807) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:975) | |
at org.codehaus.janino.UnitCompiler.access$700(UnitCompiler.java:226) | |
at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:392) | |
at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:384) | |
at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1445) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:384) | |
at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1312) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:833) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:410) | |
at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:226) | |
at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:389) | |
at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:384) | |
at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1594) | |
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:384) | |
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:362) | |
at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:226) | |
at org.codehaus.janino.UnitCompiler$1.visitCompilationUnit(UnitCompiler.java:336) | |
at org.codehaus.janino.UnitCompiler$1.visitCompilationUnit(UnitCompiler.java:333) | |
at org.codehaus.janino.Java$CompilationUnit.accept(Java.java:363) | |
at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:333) | |
at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:235) | |
at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:464) | |
at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:314) | |
at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:237) | |
at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:205) | |
at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80) | |
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1370) | |
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1467) | |
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1464) | |
at org.sparkproject.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599) | |
at org.sparkproject.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) | |
at org.sparkproject.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342) | |
at org.sparkproject.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) | |
at org.sparkproject.guava.cache.LocalCache.get(LocalCache.java:4000) | |
at org.sparkproject.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) | |
at org.sparkproject.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874) | |
at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1318) | |
at org.apache.spark.sql.execution.WholeStageCodegenExec.liftedTree1$1(WholeStageCodegenExec.scala:695) | |
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:694) | |
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:175) | |
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:213) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:210) | |
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:171) | |
at org.apache.spark.sql.execution.debug.package$DebugExec.doExecute(package.scala:242) | |
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:175) | |
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:213) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:210) | |
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:171) | |
at org.apache.spark.sql.execution.debug.package$DebugQuery.debug(package.scala:176) | |
at org.apache.spark.sql.MyJavaInference$.debugCg(MyJavaInference.scala:536) | |
at org.apache.spark.sql.MyJavaInference.debugCg(MyJavaInference.scala) | |
at org.jetbrains.spark.api.ApiV1Kt.debugCodegen(ApiV1.kt:80) | |
at org.jetbrains.spark.api.Main.main(Main.kt:32) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment