Skip to content

Instantly share code, notes, and snippets.

@rednaxelafx
Last active June 11, 2020 07:35
Show Gist options
  • Save rednaxelafx/e9ecd09bbd1c448dbddad4f4edf25d48 to your computer and use it in GitHub Desktop.
Save rednaxelafx/e9ecd09bbd1c448dbddad4f4edf25d48 to your computer and use it in GitHub Desktop.
Example comparison of inner-class anonfunc in Scala 2.11 vs indylambda in Scala 2.12, and how that affects Apache Spark's ClosureCleaner
# LMF class generated at runtime, dumped via
# <Scala 2.12.2>/bin/scala -cp . -Djdk.internal.lambda.dumpProxyClasses=dumpclasses Bar
$ javap -verbose -private -c -s -l 'Bar$$$Lambda$101.class'
Classfile /private/tmp/dumpclasses/Bar$$$Lambda$101.class
Last modified Apr 30, 2020; size 695 bytes
MD5 checksum 419f2634b1acdf7f40ba0bb7c22eabf4
final class Bar$$$Lambda$101 implements scala.runtime.java8.JFunction0$mcV$sp,scala.Serializable
minor version: 0
major version: 52
flags: ACC_FINAL, ACC_SUPER, ACC_SYNTHETIC
Constant pool:
#1 = Utf8 Bar$$$Lambda$101
#2 = Class #1 // Bar$$$Lambda$101
#3 = Utf8 java/lang/Object
#4 = Class #3 // java/lang/Object
#5 = Utf8 scala/runtime/java8/JFunction0$mcV$sp
#6 = Class #5 // scala/runtime/java8/JFunction0$mcV$sp
#7 = Utf8 scala/Serializable
#8 = Class #7 // scala/Serializable
#9 = Utf8 <init>
#10 = Utf8 ()V
#11 = NameAndType #9:#10 // "<init>":()V
#12 = Methodref #4.#11 // java/lang/Object."<init>":()V
#13 = Utf8 apply$mcV$sp
#14 = Utf8 Ljava/lang/invoke/LambdaForm$Hidden;
#15 = Utf8 Bar$
#16 = Class #15 // Bar$
#17 = Utf8 $anonfun$new$1
#18 = NameAndType #17:#10 // $anonfun$new$1:()V
#19 = Methodref #16.#18 // Bar$.$anonfun$new$1:()V
#20 = Utf8 writeReplace
#21 = Utf8 ()Ljava/lang/Object;
#22 = Utf8 java/lang/invoke/SerializedLambda
#23 = Class #22 // java/lang/invoke/SerializedLambda
#24 = String #5 // scala/runtime/java8/JFunction0$mcV$sp
#25 = String #13 // apply$mcV$sp
#26 = String #10 // ()V
#27 = Integer 6
#28 = String #15 // Bar$
#29 = String #17 // $anonfun$new$1
#30 = Utf8 (Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#31 = NameAndType #9:#30 // "<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#32 = Methodref #23.#31 // java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#33 = Utf8 Code
#34 = Utf8 RuntimeVisibleAnnotations
{
private Bar$$$Lambda$101();
descriptor: ()V
flags: ACC_PRIVATE
Code:
stack=1, locals=1, args_size=1
0: aload_0
1: invokespecial #12 // Method java/lang/Object."<init>":()V
4: return
public void apply$mcV$sp();
descriptor: ()V
flags: ACC_PUBLIC
Code:
stack=0, locals=1, args_size=1
0: invokestatic #19 // Method Bar$.$anonfun$new$1:()V
3: return
RuntimeVisibleAnnotations:
0: #14()
private final java.lang.Object writeReplace();
descriptor: ()Ljava/lang/Object;
flags: ACC_PRIVATE, ACC_FINAL
Code:
stack=12, locals=1, args_size=1
0: new #23 // class java/lang/invoke/SerializedLambda
3: dup
4: ldc #16 // class Bar$
6: ldc #24 // String scala/runtime/java8/JFunction0$mcV$sp
8: ldc #25 // String apply$mcV$sp
10: ldc #26 // String ()V
12: ldc #27 // int 6
14: ldc #28 // String Bar$
16: ldc #29 // String $anonfun$new$1
18: ldc #26 // String ()V
20: ldc #26 // String ()V
22: iconst_0
23: anewarray #4 // class java/lang/Object
26: invokespecial #32 // Method java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
29: areturn
}
# LMF class generated at runtime, dumped via
# <Scala 2.12.2>/bin/scala -cp . -Djdk.internal.lambda.dumpProxyClasses=dumpclasses Bar
$ javap -verbose -private -c -s -l 'Bar$$$Lambda$102.class'
Classfile /private/tmp/dumpclasses/Bar$$$Lambda$102.class
Last modified Apr 30, 2020; size 1049 bytes
MD5 checksum 0ccd9ec8077dc1a93f3df6b9022dd508
final class Bar$$$Lambda$102 implements scala.Function1,scala.Serializable
minor version: 0
major version: 52
flags: ACC_FINAL, ACC_SUPER, ACC_SYNTHETIC
Constant pool:
#1 = Utf8 Bar$$$Lambda$102
#2 = Class #1 // Bar$$$Lambda$102
#3 = Utf8 java/lang/Object
#4 = Class #3 // java/lang/Object
#5 = Utf8 scala/Function1
#6 = Class #5 // scala/Function1
#7 = Utf8 scala/Serializable
#8 = Class #7 // scala/Serializable
#9 = Utf8 arg$1
#10 = Utf8 Ljava/lang/String;
#11 = Utf8 <init>
#12 = Utf8 (Ljava/lang/String;)V
#13 = Utf8 ()V
#14 = NameAndType #11:#13 // "<init>":()V
#15 = Methodref #4.#14 // java/lang/Object."<init>":()V
#16 = NameAndType #9:#10 // arg$1:Ljava/lang/String;
#17 = Fieldref #2.#16 // Bar$$$Lambda$102.arg$1:Ljava/lang/String;
#18 = Utf8 get$Lambda
#19 = Utf8 (Ljava/lang/String;)Lscala/Function1;
#20 = NameAndType #11:#12 // "<init>":(Ljava/lang/String;)V
#21 = Methodref #2.#20 // Bar$$$Lambda$102."<init>":(Ljava/lang/String;)V
#22 = Utf8 apply
#23 = Utf8 (Ljava/lang/Object;)Ljava/lang/Object;
#24 = Utf8 Ljava/lang/invoke/LambdaForm$Hidden;
#25 = Utf8 Bar$
#26 = Class #25 // Bar$
#27 = Utf8 $anonfun$new$2$adapted
#28 = Utf8 (Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#29 = NameAndType #27:#28 // $anonfun$new$2$adapted:(Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#30 = Methodref #26.#29 // Bar$.$anonfun$new$2$adapted:(Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#31 = Utf8 writeReplace
#32 = Utf8 ()Ljava/lang/Object;
#33 = Utf8 java/lang/invoke/SerializedLambda
#34 = Class #33 // java/lang/invoke/SerializedLambda
#35 = String #5 // scala/Function1
#36 = String #22 // apply
#37 = String #23 // (Ljava/lang/Object;)Ljava/lang/Object;
#38 = Integer 6
#39 = String #25 // Bar$
#40 = String #27 // $anonfun$new$2$adapted
#41 = String #28 // (Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#42 = Utf8 (Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#43 = String #42 // (Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#44 = Utf8 (Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#45 = NameAndType #11:#44 // "<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#46 = Methodref #34.#45 // java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#47 = Utf8 Code
#48 = Utf8 RuntimeVisibleAnnotations
{
private final java.lang.String arg$1;
descriptor: Ljava/lang/String;
flags: ACC_PRIVATE, ACC_FINAL
private Bar$$$Lambda$102(java.lang.String);
descriptor: (Ljava/lang/String;)V
flags: ACC_PRIVATE
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: invokespecial #15 // Method java/lang/Object."<init>":()V
4: aload_0
5: aload_1
6: putfield #17 // Field arg$1:Ljava/lang/String;
9: return
private static scala.Function1 get$Lambda(java.lang.String);
descriptor: (Ljava/lang/String;)Lscala/Function1;
flags: ACC_PRIVATE, ACC_STATIC
Code:
stack=3, locals=1, args_size=1
0: new #2 // class Bar$$$Lambda$102
3: dup
4: aload_0
5: invokespecial #21 // Method "<init>":(Ljava/lang/String;)V
8: areturn
public java.lang.Object apply(java.lang.Object);
descriptor: (Ljava/lang/Object;)Ljava/lang/Object;
flags: ACC_PUBLIC
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: getfield #17 // Field arg$1:Ljava/lang/String;
4: aload_1
5: invokestatic #30 // Method Bar$.$anonfun$new$2$adapted:(Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
8: areturn
RuntimeVisibleAnnotations:
0: #24()
private final java.lang.Object writeReplace();
descriptor: ()Ljava/lang/Object;
flags: ACC_PRIVATE, ACC_FINAL
Code:
stack=15, locals=1, args_size=1
0: new #34 // class java/lang/invoke/SerializedLambda
3: dup
4: ldc #26 // class Bar$
6: ldc #35 // String scala/Function1
8: ldc #36 // String apply
10: ldc #37 // String (Ljava/lang/Object;)Ljava/lang/Object;
12: ldc #38 // int 6
14: ldc #39 // String Bar$
16: ldc #40 // String $anonfun$new$2$adapted
18: ldc #41 // String (Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
20: ldc #43 // String (Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
22: iconst_1
23: anewarray #4 // class java/lang/Object
26: dup
27: iconst_0
28: aload_0
29: getfield #17 // Field arg$1:Ljava/lang/String;
32: aastore
33: invokespecial #46 // Method java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
36: areturn
}
# LMF class generated at runtime, dumped via
# <Scala 2.12.2>/bin/scala -cp . -Djdk.internal.lambda.dumpProxyClasses=dumpclasses Bar
$ javap -verbose -private -c -s -l 'Bar$$$Lambda$103.class'
Classfile /private/tmp/dumpclasses/Bar$$$Lambda$103.class
Last modified Apr 30, 2020; size 1049 bytes
MD5 checksum 7405a472d161a9a2a42ab03dd0877be3
final class Bar$$$Lambda$103 implements scala.Function1,scala.Serializable
minor version: 0
major version: 52
flags: ACC_FINAL, ACC_SUPER, ACC_SYNTHETIC
Constant pool:
#1 = Utf8 Bar$$$Lambda$103
#2 = Class #1 // Bar$$$Lambda$103
#3 = Utf8 java/lang/Object
#4 = Class #3 // java/lang/Object
#5 = Utf8 scala/Function1
#6 = Class #5 // scala/Function1
#7 = Utf8 scala/Serializable
#8 = Class #7 // scala/Serializable
#9 = Utf8 arg$1
#10 = Utf8 Ljava/lang/String;
#11 = Utf8 <init>
#12 = Utf8 (Ljava/lang/String;)V
#13 = Utf8 ()V
#14 = NameAndType #11:#13 // "<init>":()V
#15 = Methodref #4.#14 // java/lang/Object."<init>":()V
#16 = NameAndType #9:#10 // arg$1:Ljava/lang/String;
#17 = Fieldref #2.#16 // Bar$$$Lambda$103.arg$1:Ljava/lang/String;
#18 = Utf8 get$Lambda
#19 = Utf8 (Ljava/lang/String;)Lscala/Function1;
#20 = NameAndType #11:#12 // "<init>":(Ljava/lang/String;)V
#21 = Methodref #2.#20 // Bar$$$Lambda$103."<init>":(Ljava/lang/String;)V
#22 = Utf8 apply
#23 = Utf8 (Ljava/lang/Object;)Ljava/lang/Object;
#24 = Utf8 Ljava/lang/invoke/LambdaForm$Hidden;
#25 = Utf8 Bar$
#26 = Class #25 // Bar$
#27 = Utf8 $anonfun$new$4$adapted
#28 = Utf8 (Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#29 = NameAndType #27:#28 // $anonfun$new$4$adapted:(Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#30 = Methodref #26.#29 // Bar$.$anonfun$new$4$adapted:(Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#31 = Utf8 writeReplace
#32 = Utf8 ()Ljava/lang/Object;
#33 = Utf8 java/lang/invoke/SerializedLambda
#34 = Class #33 // java/lang/invoke/SerializedLambda
#35 = String #5 // scala/Function1
#36 = String #22 // apply
#37 = String #23 // (Ljava/lang/Object;)Ljava/lang/Object;
#38 = Integer 6
#39 = String #25 // Bar$
#40 = String #27 // $anonfun$new$4$adapted
#41 = String #28 // (Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#42 = Utf8 (Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#43 = String #42 // (Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#44 = Utf8 (Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#45 = NameAndType #11:#44 // "<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#46 = Methodref #34.#45 // java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#47 = Utf8 Code
#48 = Utf8 RuntimeVisibleAnnotations
{
private final java.lang.String arg$1;
descriptor: Ljava/lang/String;
flags: ACC_PRIVATE, ACC_FINAL
private Bar$$$Lambda$103(java.lang.String);
descriptor: (Ljava/lang/String;)V
flags: ACC_PRIVATE
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: invokespecial #15 // Method java/lang/Object."<init>":()V
4: aload_0
5: aload_1
6: putfield #17 // Field arg$1:Ljava/lang/String;
9: return
private static scala.Function1 get$Lambda(java.lang.String);
descriptor: (Ljava/lang/String;)Lscala/Function1;
flags: ACC_PRIVATE, ACC_STATIC
Code:
stack=3, locals=1, args_size=1
0: new #2 // class Bar$$$Lambda$103
3: dup
4: aload_0
5: invokespecial #21 // Method "<init>":(Ljava/lang/String;)V
8: areturn
public java.lang.Object apply(java.lang.Object);
descriptor: (Ljava/lang/Object;)Ljava/lang/Object;
flags: ACC_PUBLIC
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: getfield #17 // Field arg$1:Ljava/lang/String;
4: aload_1
5: invokestatic #30 // Method Bar$.$anonfun$new$4$adapted:(Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
8: areturn
RuntimeVisibleAnnotations:
0: #24()
private final java.lang.Object writeReplace();
descriptor: ()Ljava/lang/Object;
flags: ACC_PRIVATE, ACC_FINAL
Code:
stack=15, locals=1, args_size=1
0: new #34 // class java/lang/invoke/SerializedLambda
3: dup
4: ldc #26 // class Bar$
6: ldc #35 // String scala/Function1
8: ldc #36 // String apply
10: ldc #37 // String (Ljava/lang/Object;)Ljava/lang/Object;
12: ldc #38 // int 6
14: ldc #39 // String Bar$
16: ldc #40 // String $anonfun$new$4$adapted
18: ldc #41 // String (Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
20: ldc #43 // String (Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
22: iconst_1
23: anewarray #4 // class java/lang/Object
26: dup
27: iconst_0
28: aload_0
29: getfield #17 // Field arg$1:Ljava/lang/String;
32: aastore
33: invokespecial #46 // Method java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
36: areturn
}
# LMF class generated at runtime, dumped via
# <Scala 2.12.2>/bin/scala -cp . -Djdk.internal.lambda.dumpProxyClasses=dumpclasses Bar
$ javap -verbose -private -c -s -l 'Bar$$$Lambda$104.class'
Classfile /private/tmp/dumpclasses/Bar$$$Lambda$104.class
Last modified Apr 30, 2020; size 1249 bytes
MD5 checksum 9bc69672496ef9b1dd6105ab4693d7db
final class Bar$$$Lambda$104 implements scala.Function3,scala.Serializable
minor version: 0
major version: 52
flags: ACC_FINAL, ACC_SUPER, ACC_SYNTHETIC
Constant pool:
#1 = Utf8 Bar$$$Lambda$104
#2 = Class #1 // Bar$$$Lambda$104
#3 = Utf8 java/lang/Object
#4 = Class #3 // java/lang/Object
#5 = Utf8 scala/Function3
#6 = Class #5 // scala/Function3
#7 = Utf8 scala/Serializable
#8 = Class #7 // scala/Serializable
#9 = Utf8 arg$1
#10 = Utf8 Lscala/Function1;
#11 = Utf8 arg$2
#12 = Utf8 <init>
#13 = Utf8 (Lscala/Function1;Lscala/Function1;)V
#14 = Utf8 ()V
#15 = NameAndType #12:#14 // "<init>":()V
#16 = Methodref #4.#15 // java/lang/Object."<init>":()V
#17 = NameAndType #9:#10 // arg$1:Lscala/Function1;
#18 = Fieldref #2.#17 // Bar$$$Lambda$104.arg$1:Lscala/Function1;
#19 = NameAndType #11:#10 // arg$2:Lscala/Function1;
#20 = Fieldref #2.#19 // Bar$$$Lambda$104.arg$2:Lscala/Function1;
#21 = Utf8 get$Lambda
#22 = Utf8 (Lscala/Function1;Lscala/Function1;)Lscala/Function3;
#23 = NameAndType #12:#13 // "<init>":(Lscala/Function1;Lscala/Function1;)V
#24 = Methodref #2.#23 // Bar$$$Lambda$104."<init>":(Lscala/Function1;Lscala/Function1;)V
#25 = Utf8 apply
#26 = Utf8 (Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;
#27 = Utf8 Ljava/lang/invoke/LambdaForm$Hidden;
#28 = Utf8 Bar$
#29 = Class #28 // Bar$
#30 = Utf8 $anonfun$new$7$adapted
#31 = Utf8 (Lscala/Function1;Lscala/Function1;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#32 = NameAndType #30:#31 // $anonfun$new$7$adapted:(Lscala/Function1;Lscala/Function1;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#33 = Methodref #29.#32 // Bar$.$anonfun$new$7$adapted:(Lscala/Function1;Lscala/Function1;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#34 = Utf8 writeReplace
#35 = Utf8 ()Ljava/lang/Object;
#36 = Utf8 java/lang/invoke/SerializedLambda
#37 = Class #36 // java/lang/invoke/SerializedLambda
#38 = String #5 // scala/Function3
#39 = String #25 // apply
#40 = String #26 // (Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;
#41 = Integer 6
#42 = String #28 // Bar$
#43 = String #30 // $anonfun$new$7$adapted
#44 = String #31 // (Lscala/Function1;Lscala/Function1;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#45 = Utf8 (Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#46 = String #45 // (Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#47 = Utf8 (Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#48 = NameAndType #12:#47 // "<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#49 = Methodref #37.#48 // java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#50 = Utf8 Code
#51 = Utf8 RuntimeVisibleAnnotations
{
private final scala.Function1 arg$1;
descriptor: Lscala/Function1;
flags: ACC_PRIVATE, ACC_FINAL
private final scala.Function1 arg$2;
descriptor: Lscala/Function1;
flags: ACC_PRIVATE, ACC_FINAL
private Bar$$$Lambda$104(scala.Function1, scala.Function1);
descriptor: (Lscala/Function1;Lscala/Function1;)V
flags: ACC_PRIVATE
Code:
stack=2, locals=3, args_size=3
0: aload_0
1: invokespecial #16 // Method java/lang/Object."<init>":()V
4: aload_0
5: aload_1
6: putfield #18 // Field arg$1:Lscala/Function1;
9: aload_0
10: aload_2
11: putfield #20 // Field arg$2:Lscala/Function1;
14: return
private static scala.Function3 get$Lambda(scala.Function1, scala.Function1);
descriptor: (Lscala/Function1;Lscala/Function1;)Lscala/Function3;
flags: ACC_PRIVATE, ACC_STATIC
Code:
stack=4, locals=2, args_size=2
0: new #2 // class Bar$$$Lambda$104
3: dup
4: aload_0
5: aload_1
6: invokespecial #24 // Method "<init>":(Lscala/Function1;Lscala/Function1;)V
9: areturn
public java.lang.Object apply(java.lang.Object, java.lang.Object, java.lang.Object);
descriptor: (Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;
flags: ACC_PUBLIC
Code:
stack=5, locals=4, args_size=4
0: aload_0
1: getfield #18 // Field arg$1:Lscala/Function1;
4: aload_0
5: getfield #20 // Field arg$2:Lscala/Function1;
8: aload_1
9: aload_2
10: aload_3
11: invokestatic #33 // Method Bar$.$anonfun$new$7$adapted:(Lscala/Function1;Lscala/Function1;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
14: areturn
RuntimeVisibleAnnotations:
0: #27()
private final java.lang.Object writeReplace();
descriptor: ()Ljava/lang/Object;
flags: ACC_PRIVATE, ACC_FINAL
Code:
stack=15, locals=1, args_size=1
0: new #37 // class java/lang/invoke/SerializedLambda
3: dup
4: ldc #29 // class Bar$
6: ldc #38 // String scala/Function3
8: ldc #39 // String apply
10: ldc #40 // String (Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;
12: ldc #41 // int 6
14: ldc #42 // String Bar$
16: ldc #43 // String $anonfun$new$7$adapted
18: ldc #44 // String (Lscala/Function1;Lscala/Function1;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
20: ldc #46 // String (Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
22: iconst_2
23: anewarray #4 // class java/lang/Object
26: dup
27: iconst_0
28: aload_0
29: getfield #18 // Field arg$1:Lscala/Function1;
32: aastore
33: dup
34: iconst_1
35: aload_0
36: getfield #20 // Field arg$2:Lscala/Function1;
39: aastore
40: invokespecial #49 // Method java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
43: areturn
}
# LMF class generated at runtime, dumped via
# <Scala 2.12.2>/bin/scala -cp . -Djdk.internal.lambda.dumpProxyClasses=dumpclasses Bar
$ javap -verbose -private -c -s -l 'Bar$$$Lambda$105.class'
Classfile /private/tmp/dumpclasses/Bar$$$Lambda$105.class
Last modified Apr 30, 2020; size 1007 bytes
MD5 checksum 66c1dcf8f7e1599c8c7d33e4018c6569
final class Bar$$$Lambda$105 implements scala.Function1,scala.Serializable
minor version: 0
major version: 52
flags: ACC_FINAL, ACC_SUPER, ACC_SYNTHETIC
Constant pool:
#1 = Utf8 Bar$$$Lambda$105
#2 = Class #1 // Bar$$$Lambda$105
#3 = Utf8 java/lang/Object
#4 = Class #3 // java/lang/Object
#5 = Utf8 scala/Function1
#6 = Class #5 // scala/Function1
#7 = Utf8 scala/Serializable
#8 = Class #7 // scala/Serializable
#9 = Utf8 arg$1
#10 = Utf8 Ljava/lang/String;
#11 = Utf8 <init>
#12 = Utf8 (Ljava/lang/String;)V
#13 = Utf8 ()V
#14 = NameAndType #11:#13 // "<init>":()V
#15 = Methodref #4.#14 // java/lang/Object."<init>":()V
#16 = NameAndType #9:#10 // arg$1:Ljava/lang/String;
#17 = Fieldref #2.#16 // Bar$$$Lambda$105.arg$1:Ljava/lang/String;
#18 = Utf8 get$Lambda
#19 = Utf8 (Ljava/lang/String;)Lscala/Function1;
#20 = NameAndType #11:#12 // "<init>":(Ljava/lang/String;)V
#21 = Methodref #2.#20 // Bar$$$Lambda$105."<init>":(Ljava/lang/String;)V
#22 = Utf8 apply
#23 = Utf8 (Ljava/lang/Object;)Ljava/lang/Object;
#24 = Utf8 Ljava/lang/invoke/LambdaForm$Hidden;
#25 = Utf8 Bar$
#26 = Class #25 // Bar$
#27 = Utf8 $anonfun$new$3$adapted
#28 = Utf8 (Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
#29 = NameAndType #27:#28 // $anonfun$new$3$adapted:(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
#30 = Methodref #26.#29 // Bar$.$anonfun$new$3$adapted:(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
#31 = Utf8 writeReplace
#32 = Utf8 ()Ljava/lang/Object;
#33 = Utf8 java/lang/invoke/SerializedLambda
#34 = Class #33 // java/lang/invoke/SerializedLambda
#35 = String #5 // scala/Function1
#36 = String #22 // apply
#37 = String #23 // (Ljava/lang/Object;)Ljava/lang/Object;
#38 = Integer 6
#39 = String #25 // Bar$
#40 = String #27 // $anonfun$new$3$adapted
#41 = String #28 // (Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
#42 = Utf8 (Ljava/lang/Object;)Ljava/lang/String;
#43 = String #42 // (Ljava/lang/Object;)Ljava/lang/String;
#44 = Utf8 (Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#45 = NameAndType #11:#44 // "<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#46 = Methodref #34.#45 // java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#47 = Utf8 Code
#48 = Utf8 RuntimeVisibleAnnotations
{
private final java.lang.String arg$1;
descriptor: Ljava/lang/String;
flags: ACC_PRIVATE, ACC_FINAL
private Bar$$$Lambda$105(java.lang.String);
descriptor: (Ljava/lang/String;)V
flags: ACC_PRIVATE
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: invokespecial #15 // Method java/lang/Object."<init>":()V
4: aload_0
5: aload_1
6: putfield #17 // Field arg$1:Ljava/lang/String;
9: return
private static scala.Function1 get$Lambda(java.lang.String);
descriptor: (Ljava/lang/String;)Lscala/Function1;
flags: ACC_PRIVATE, ACC_STATIC
Code:
stack=3, locals=1, args_size=1
0: new #2 // class Bar$$$Lambda$105
3: dup
4: aload_0
5: invokespecial #21 // Method "<init>":(Ljava/lang/String;)V
8: areturn
public java.lang.Object apply(java.lang.Object);
descriptor: (Ljava/lang/Object;)Ljava/lang/Object;
flags: ACC_PUBLIC
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: getfield #17 // Field arg$1:Ljava/lang/String;
4: aload_1
5: invokestatic #30 // Method Bar$.$anonfun$new$3$adapted:(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
8: areturn
RuntimeVisibleAnnotations:
0: #24()
private final java.lang.Object writeReplace();
descriptor: ()Ljava/lang/Object;
flags: ACC_PRIVATE, ACC_FINAL
Code:
stack=15, locals=1, args_size=1
0: new #34 // class java/lang/invoke/SerializedLambda
3: dup
4: ldc #26 // class Bar$
6: ldc #35 // String scala/Function1
8: ldc #36 // String apply
10: ldc #37 // String (Ljava/lang/Object;)Ljava/lang/Object;
12: ldc #38 // int 6
14: ldc #39 // String Bar$
16: ldc #40 // String $anonfun$new$3$adapted
18: ldc #41 // String (Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
20: ldc #43 // String (Ljava/lang/Object;)Ljava/lang/String;
22: iconst_1
23: anewarray #4 // class java/lang/Object
26: dup
27: iconst_0
28: aload_0
29: getfield #17 // Field arg$1:Ljava/lang/String;
32: aastore
33: invokespecial #46 // Method java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
36: areturn
}
# LMF class generated at runtime, dumped via
# <Scala 2.12.2>/bin/scala -cp . -Djdk.internal.lambda.dumpProxyClasses=dumpclasses Bar
$ javap -verbose -private -c -s -l 'Bar$$$Lambda$106.class'
Classfile /private/tmp/dumpclasses/Bar$$$Lambda$106.class
Last modified Apr 30, 2020; size 1049 bytes
MD5 checksum d4f83d9a600a52b8a96a98a031ba0b9f
final class Bar$$$Lambda$106 implements scala.Function1,scala.Serializable
minor version: 0
major version: 52
flags: ACC_FINAL, ACC_SUPER, ACC_SYNTHETIC
Constant pool:
#1 = Utf8 Bar$$$Lambda$106
#2 = Class #1 // Bar$$$Lambda$106
#3 = Utf8 java/lang/Object
#4 = Class #3 // java/lang/Object
#5 = Utf8 scala/Function1
#6 = Class #5 // scala/Function1
#7 = Utf8 scala/Serializable
#8 = Class #7 // scala/Serializable
#9 = Utf8 arg$1
#10 = Utf8 Ljava/lang/String;
#11 = Utf8 <init>
#12 = Utf8 (Ljava/lang/String;)V
#13 = Utf8 ()V
#14 = NameAndType #11:#13 // "<init>":()V
#15 = Methodref #4.#14 // java/lang/Object."<init>":()V
#16 = NameAndType #9:#10 // arg$1:Ljava/lang/String;
#17 = Fieldref #2.#16 // Bar$$$Lambda$106.arg$1:Ljava/lang/String;
#18 = Utf8 get$Lambda
#19 = Utf8 (Ljava/lang/String;)Lscala/Function1;
#20 = NameAndType #11:#12 // "<init>":(Ljava/lang/String;)V
#21 = Methodref #2.#20 // Bar$$$Lambda$106."<init>":(Ljava/lang/String;)V
#22 = Utf8 apply
#23 = Utf8 (Ljava/lang/Object;)Ljava/lang/Object;
#24 = Utf8 Ljava/lang/invoke/LambdaForm$Hidden;
#25 = Utf8 Bar$
#26 = Class #25 // Bar$
#27 = Utf8 $anonfun$new$5$adapted
#28 = Utf8 (Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#29 = NameAndType #27:#28 // $anonfun$new$5$adapted:(Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#30 = Methodref #26.#29 // Bar$.$anonfun$new$5$adapted:(Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#31 = Utf8 writeReplace
#32 = Utf8 ()Ljava/lang/Object;
#33 = Utf8 java/lang/invoke/SerializedLambda
#34 = Class #33 // java/lang/invoke/SerializedLambda
#35 = String #5 // scala/Function1
#36 = String #22 // apply
#37 = String #23 // (Ljava/lang/Object;)Ljava/lang/Object;
#38 = Integer 6
#39 = String #25 // Bar$
#40 = String #27 // $anonfun$new$5$adapted
#41 = String #28 // (Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#42 = Utf8 (Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#43 = String #42 // (Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
#44 = Utf8 (Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#45 = NameAndType #11:#44 // "<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#46 = Methodref #34.#45 // java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#47 = Utf8 Code
#48 = Utf8 RuntimeVisibleAnnotations
{
private final java.lang.String arg$1;
descriptor: Ljava/lang/String;
flags: ACC_PRIVATE, ACC_FINAL
private Bar$$$Lambda$106(java.lang.String);
descriptor: (Ljava/lang/String;)V
flags: ACC_PRIVATE
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: invokespecial #15 // Method java/lang/Object."<init>":()V
4: aload_0
5: aload_1
6: putfield #17 // Field arg$1:Ljava/lang/String;
9: return
private static scala.Function1 get$Lambda(java.lang.String);
descriptor: (Ljava/lang/String;)Lscala/Function1;
flags: ACC_PRIVATE, ACC_STATIC
Code:
stack=3, locals=1, args_size=1
0: new #2 // class Bar$$$Lambda$106
3: dup
4: aload_0
5: invokespecial #21 // Method "<init>":(Ljava/lang/String;)V
8: areturn
public java.lang.Object apply(java.lang.Object);
descriptor: (Ljava/lang/Object;)Ljava/lang/Object;
flags: ACC_PUBLIC
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: getfield #17 // Field arg$1:Ljava/lang/String;
4: aload_1
5: invokestatic #30 // Method Bar$.$anonfun$new$5$adapted:(Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
8: areturn
RuntimeVisibleAnnotations:
0: #24()
private final java.lang.Object writeReplace();
descriptor: ()Ljava/lang/Object;
flags: ACC_PRIVATE, ACC_FINAL
Code:
stack=15, locals=1, args_size=1
0: new #34 // class java/lang/invoke/SerializedLambda
3: dup
4: ldc #26 // class Bar$
6: ldc #35 // String scala/Function1
8: ldc #36 // String apply
10: ldc #37 // String (Ljava/lang/Object;)Ljava/lang/Object;
12: ldc #38 // int 6
14: ldc #39 // String Bar$
16: ldc #40 // String $anonfun$new$5$adapted
18: ldc #41 // String (Ljava/lang/String;Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
20: ldc #43 // String (Ljava/lang/Object;)Lscala/collection/immutable/IndexedSeq;
22: iconst_1
23: anewarray #4 // class java/lang/Object
26: dup
27: iconst_0
28: aload_0
29: getfield #17 // Field arg$1:Ljava/lang/String;
32: aastore
33: invokespecial #46 // Method java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
36: areturn
}
# LMF class generated at runtime, dumped via
# <Scala 2.12.2>/bin/scala -cp . -Djdk.internal.lambda.dumpProxyClasses=dumpclasses Bar
$ javap -verbose -private -c -s -l 'Bar$$$Lambda$107.class'
Classfile /private/tmp/dumpclasses/Bar$$$Lambda$107.class
Last modified Apr 30, 2020; size 1007 bytes
MD5 checksum f5b28f5ad45add06ca282c5ff287bc03
final class Bar$$$Lambda$107 implements scala.Function1,scala.Serializable
minor version: 0
major version: 52
flags: ACC_FINAL, ACC_SUPER, ACC_SYNTHETIC
Constant pool:
#1 = Utf8 Bar$$$Lambda$107
#2 = Class #1 // Bar$$$Lambda$107
#3 = Utf8 java/lang/Object
#4 = Class #3 // java/lang/Object
#5 = Utf8 scala/Function1
#6 = Class #5 // scala/Function1
#7 = Utf8 scala/Serializable
#8 = Class #7 // scala/Serializable
#9 = Utf8 arg$1
#10 = Utf8 Ljava/lang/String;
#11 = Utf8 <init>
#12 = Utf8 (Ljava/lang/String;)V
#13 = Utf8 ()V
#14 = NameAndType #11:#13 // "<init>":()V
#15 = Methodref #4.#14 // java/lang/Object."<init>":()V
#16 = NameAndType #9:#10 // arg$1:Ljava/lang/String;
#17 = Fieldref #2.#16 // Bar$$$Lambda$107.arg$1:Ljava/lang/String;
#18 = Utf8 get$Lambda
#19 = Utf8 (Ljava/lang/String;)Lscala/Function1;
#20 = NameAndType #11:#12 // "<init>":(Ljava/lang/String;)V
#21 = Methodref #2.#20 // Bar$$$Lambda$107."<init>":(Ljava/lang/String;)V
#22 = Utf8 apply
#23 = Utf8 (Ljava/lang/Object;)Ljava/lang/Object;
#24 = Utf8 Ljava/lang/invoke/LambdaForm$Hidden;
#25 = Utf8 Bar$
#26 = Class #25 // Bar$
#27 = Utf8 $anonfun$new$6$adapted
#28 = Utf8 (Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
#29 = NameAndType #27:#28 // $anonfun$new$6$adapted:(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
#30 = Methodref #26.#29 // Bar$.$anonfun$new$6$adapted:(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
#31 = Utf8 writeReplace
#32 = Utf8 ()Ljava/lang/Object;
#33 = Utf8 java/lang/invoke/SerializedLambda
#34 = Class #33 // java/lang/invoke/SerializedLambda
#35 = String #5 // scala/Function1
#36 = String #22 // apply
#37 = String #23 // (Ljava/lang/Object;)Ljava/lang/Object;
#38 = Integer 6
#39 = String #25 // Bar$
#40 = String #27 // $anonfun$new$6$adapted
#41 = String #28 // (Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
#42 = Utf8 (Ljava/lang/Object;)Ljava/lang/String;
#43 = String #42 // (Ljava/lang/Object;)Ljava/lang/String;
#44 = Utf8 (Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#45 = NameAndType #11:#44 // "<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#46 = Methodref #34.#45 // java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#47 = Utf8 Code
#48 = Utf8 RuntimeVisibleAnnotations
{
private final java.lang.String arg$1;
descriptor: Ljava/lang/String;
flags: ACC_PRIVATE, ACC_FINAL
private Bar$$$Lambda$107(java.lang.String);
descriptor: (Ljava/lang/String;)V
flags: ACC_PRIVATE
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: invokespecial #15 // Method java/lang/Object."<init>":()V
4: aload_0
5: aload_1
6: putfield #17 // Field arg$1:Ljava/lang/String;
9: return
private static scala.Function1 get$Lambda(java.lang.String);
descriptor: (Ljava/lang/String;)Lscala/Function1;
flags: ACC_PRIVATE, ACC_STATIC
Code:
stack=3, locals=1, args_size=1
0: new #2 // class Bar$$$Lambda$107
3: dup
4: aload_0
5: invokespecial #21 // Method "<init>":(Ljava/lang/String;)V
8: areturn
public java.lang.Object apply(java.lang.Object);
descriptor: (Ljava/lang/Object;)Ljava/lang/Object;
flags: ACC_PUBLIC
Code:
stack=2, locals=2, args_size=2
0: aload_0
1: getfield #17 // Field arg$1:Ljava/lang/String;
4: aload_1
5: invokestatic #30 // Method Bar$.$anonfun$new$6$adapted:(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
8: areturn
RuntimeVisibleAnnotations:
0: #24()
private final java.lang.Object writeReplace();
descriptor: ()Ljava/lang/Object;
flags: ACC_PRIVATE, ACC_FINAL
Code:
stack=15, locals=1, args_size=1
0: new #34 // class java/lang/invoke/SerializedLambda
3: dup
4: ldc #26 // class Bar$
6: ldc #35 // String scala/Function1
8: ldc #36 // String apply
10: ldc #37 // String (Ljava/lang/Object;)Ljava/lang/Object;
12: ldc #38 // int 6
14: ldc #39 // String Bar$
16: ldc #40 // String $anonfun$new$6$adapted
18: ldc #41 // String (Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
20: ldc #43 // String (Ljava/lang/Object;)Ljava/lang/String;
22: iconst_1
23: anewarray #4 // class java/lang/Object
26: dup
27: iconst_0
28: aload_0
29: getfield #17 // Field arg$1:Ljava/lang/String;
32: aastore
33: invokespecial #46 // Method java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
36: areturn
}
# LMF class generated at runtime, dumped via
# <Scala 2.12.2>/bin/scala -cp . -Djdk.internal.lambda.dumpProxyClasses=dumpclasses Bar
$ javap -verbose -private -c -s -l 'Bar$$$Lambda$108.class'
Classfile /private/tmp/dumpclasses/Bar$$$Lambda$108.class
Last modified Apr 30, 2020; size 705 bytes
MD5 checksum d71c67869f9c618c17c1fee03f3dad9c
final class Bar$$$Lambda$108 implements scala.runtime.java8.JFunction1$mcII$sp,scala.Serializable
minor version: 0
major version: 52
flags: ACC_FINAL, ACC_SUPER, ACC_SYNTHETIC
Constant pool:
#1 = Utf8 Bar$$$Lambda$108
#2 = Class #1 // Bar$$$Lambda$108
#3 = Utf8 java/lang/Object
#4 = Class #3 // java/lang/Object
#5 = Utf8 scala/runtime/java8/JFunction1$mcII$sp
#6 = Class #5 // scala/runtime/java8/JFunction1$mcII$sp
#7 = Utf8 scala/Serializable
#8 = Class #7 // scala/Serializable
#9 = Utf8 <init>
#10 = Utf8 ()V
#11 = NameAndType #9:#10 // "<init>":()V
#12 = Methodref #4.#11 // java/lang/Object."<init>":()V
#13 = Utf8 apply$mcII$sp
#14 = Utf8 (I)I
#15 = Utf8 Ljava/lang/invoke/LambdaForm$Hidden;
#16 = Utf8 Bar$
#17 = Class #16 // Bar$
#18 = Utf8 $anonfun$new$8
#19 = NameAndType #18:#14 // $anonfun$new$8:(I)I
#20 = Methodref #17.#19 // Bar$.$anonfun$new$8:(I)I
#21 = Utf8 writeReplace
#22 = Utf8 ()Ljava/lang/Object;
#23 = Utf8 java/lang/invoke/SerializedLambda
#24 = Class #23 // java/lang/invoke/SerializedLambda
#25 = String #5 // scala/runtime/java8/JFunction1$mcII$sp
#26 = String #13 // apply$mcII$sp
#27 = String #14 // (I)I
#28 = Integer 6
#29 = String #16 // Bar$
#30 = String #18 // $anonfun$new$8
#31 = Utf8 (Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#32 = NameAndType #9:#31 // "<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#33 = Methodref #24.#32 // java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
#34 = Utf8 Code
#35 = Utf8 RuntimeVisibleAnnotations
{
private Bar$$$Lambda$108();
descriptor: ()V
flags: ACC_PRIVATE
Code:
stack=1, locals=1, args_size=1
0: aload_0
1: invokespecial #12 // Method java/lang/Object."<init>":()V
4: return
public int apply$mcII$sp(int);
descriptor: (I)I
flags: ACC_PUBLIC
Code:
stack=1, locals=2, args_size=2
0: iload_1
1: invokestatic #20 // Method Bar$.$anonfun$new$8:(I)I
4: ireturn
RuntimeVisibleAnnotations:
0: #15()
private final java.lang.Object writeReplace();
descriptor: ()Ljava/lang/Object;
flags: ACC_PRIVATE, ACC_FINAL
Code:
stack=12, locals=1, args_size=1
0: new #24 // class java/lang/invoke/SerializedLambda
3: dup
4: ldc #17 // class Bar$
6: ldc #25 // String scala/runtime/java8/JFunction1$mcII$sp
8: ldc #26 // String apply$mcII$sp
10: ldc #27 // String (I)I
12: ldc #28 // int 6
14: ldc #29 // String Bar$
16: ldc #30 // String $anonfun$new$8
18: ldc #27 // String (I)I
20: ldc #27 // String (I)I
22: iconst_0
23: anewarray #4 // class java/lang/Object
26: invokespecial #33 // Method java/lang/invoke/SerializedLambda."<init>":(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
29: areturn
}
object Bar {
def test(desc: String)(f: => Unit) = {
println(s"Running test: $desc")
f
}
// example taken from https://github.com/apache/spark/blob/7195a18bf24d9506d2f8d9d4d93ff679b3d21b65/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite2.scala#L150-L167
test("try a ClosureCleanerSuite2 example") {
val localValue = "someSerializableValue"
val closure1 = (i: Int) => {
(1 to i).map { x => x + localValue } // 1 level of nesting
}
val closure2 = (j: Int) => {
(1 to j).flatMap { x =>
(1 to x).map { y => y + localValue } // 2 levels
}
}
val closure3 = (k: Int, l: Int, m: Int) => {
(1 to k).flatMap(closure2) ++ // 4 levels
(1 to l).flatMap(closure1) ++ // 3 levels
(1 to m).map { x => x + 1 } // 2 levels
}
val closure1r = closure1(1)
val closure2r = closure2(2)
val closure3r = closure3(3, 4, 5)
}
def main(args: Array[String]): Unit = {
println("Hello from main()")
}
}

A note on how NSC (New Scala Compiler) lowers lambdas

Author: Kris Mok Date: 2020-04-29

Link to this Gist: https://gist.github.com/rednaxelafx/e9ecd09bbd1c448dbddad4f4edf25d48

This note is written in the context of Apache Spark 3.0's ClosureCleaner support for the "indylambda" style lowering of closures, which is the new default since Scala 2.12, SPARK-31399.

For discussions on Apache Spark's ClosureCleaner, please refer to the spark_closurecleaner_notes.md file in this Gist.

c.f. indylambda: Putting invokedynamic to work for Scala by @retronym (Jason Zaugg) on the design of indylambda in Scala.

Jason also kindly gave some comments on this note with extra information: https://twitter.com/retronym/status/1256015846784159744

Jargons:

  • Old way = inner-class based = delambdafy:inline (equivalent to default behavior in Scala 2.11)
  • New way = indylambda (invokedynamic+LambdaMetaFactory based) = delambdafy:inline (new default since Scala 2.12)
  • LMF = LambdaMetaFactory = a set of built-in BootstrapMethods for Java 8 lambdas.

Quick intro to Scala lambda lowering in indylambda

In Scala 2.12, the Scala compiler (NSC) adopted a new way of implementing lowering of Scala lambdas down to Java bytecode, much like Java 8's lambdas. This feature is called "indylambda" in Scala, controlled by the Scala compiler option -Ydelambdafy:method.

The most relevant phases in NSC for lowering lambdas are: uncurry, lambdalift, delambdafy. For Scala 2.12, I'm using scalac -Xprint:delambdafy -Xshow:delambdafy for the examples in this Gist to poke into what NSC is up to.

For example, a lambda literal that looks like this in Scala source code:

// assuming we're in a named method bar() in class Foo here, and there's local variable freeVar:Int
(arg: Int) => arg + freeVar + 42

NOTE: this declares a lambda function, and also creates a lambda closure object for representing a runtime instance of this lambda. In the Java world, it's classes that ultimately implement interfaces, so a scala.Function1 reference ultimately needs to point to an object backed by a class.

In the indylambda style of lambda lowering, the lambda literal would be lowered to multiple parts:

  • The lambda body (e.g. arg + freeVar + 42) gets lowered into a static method in the class that declared this lambda
  • The lambda closure object creation site gets lowered into an invokedynamic instruction that calls a bootstrap method to create the object, passing in whatever state that needs to be captured as arguments (e.g. freeVar)
  • other misc supporting code (e.g. lambda deserialization)

... so we've got a method for the body, and the invokedynamic will magically create an object instance that represents an instance of this lambda, but where's the backing class that glues together the functional interface/trait and the implementation method?

That's where the LambdaMetaFactory in Java's standard library comes in. It'll generate an equivalent of the annonymous inner class "glue" at runtime for these lambdas, implementing the specified functional interface. So, in effect, at runtime it's not that different from the old way of eagerly generating inner classes.

Simple contrived pseudocode for the example above would be:

// lambda closure object creation site: generated by NSC
// This is in the place of the lambda literal
// InvokeDynamic #0:apply$mcII$sp:(I)Lscala/runtime/java8/JFunction1$mcII$sp;
invokedynamic LambdaMetaFactory.altMetaFactory(freeVar) // returns an object whose class implements scala.Function1

// lambda body: generated by NSC in class Foo
// notice how the argument list is a list of captured state passed in as argument, then the actual explicit 
public static def $anonfun$bar$1(freeVar: Int, arg: Int): Int = arg + freeVar + 42

// lambda deserialization: generated by NSC in class Foo
private static def $deserializeLambda$(lambda: java.lang.invoke.SerializedLambda): Any = {
  // InvokeDynamic #1:lambdaDeserialize:(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;
  invokedynamic scala.runtime.LambdaDeserialize.bootstrap(...)
}

// lambda closure class: generated by java.lang.invoke.LambdaMetafactory (in OpenJDK8 the actual impl is java.lang.invoke.InnerClassLambdatory)
public /* synthetic */ class Foo$$Lambda$1
    extends scala.runtime.java8.JFunction1$mcII$sp,
    scala.Serializable {

  private val arg$1: Int = _

  def <init>(arg$1: Int): Unit = {
    this.arg$1 = arg$1
  }

  def apply$mcII$sp(unnamed_arg1: Int): Int = {
    Foo.$anonfun$bar$1(this.freeVar$1, unnamed_arg1) // invoke the actual implementation
  }

  private static def get$Lambda(arg$1: Int): scala.runtime.java8.JFunction1$mcII$sp = {
    new Foo$$Lambda$1(arg$1)
  }

  private def writeReplace(): Any = {
    new java.lang.invoke.SerializedLambda( // this is a serialization proxy
      capturingClass = classOf[Foo],
      functionalInterfaceClass = "scala/runtime/java8/JFunction1$mcII$sp",
      functionalInterfaceMethodName = "apply$mcII$sp",
      functionalInterfaceMethodSignature = "(I)I",
      implMethodKind = 6, // java.lang.invoke.MethodHandleInfo.REF_invokeStatic, c.f. https://docs.oracle.com/javase/8/docs/api/java/lang/invoke/MethodHandleInfo.html
      implClass = "Foo",
      implMethodName = "$anonfun$bar$1",
      implMethodSignature = "(II)I",
      instantiatedMethodType = "(I)I",
      capturedArgs = new Array[Object](java.lang.Integer.valueOf(this.freeVar$1))
    )
  }
}

At the first execution of the invokedynamic instruction, the lambda closure object creation site:

invokedynamic LambdaMetaFactory.altMetaFactory(freeVar)

is resolved into an equivalent of

Foo$$Lambda$1.getLambda(freeVar)

which creates a new instance of the generated Foo$$Lambda$1 class on every invocation.

Side-note: The getLambda(...) call is for capturing lambdas. JDK8's LMF performs an optimization when a lambda doesn't capture any state -- it'll spin up the backing class just like above, but the lambda closure object creation site will actually reuse the same instance (embedded into the call site via a MethodHandle to a constant).


Quick comparison of the old and new way of lowering Scala lambdas

Inner-class lambda:

  • Closures are lowered into inner classes, similar to how Java lowers annonymous inner classes.
  • Captured state is stored as instance fields on the inner class.
  • These synthetic classes can have $outer field for the closure environment chain. e.g.
    • one-level nesting, capturing a mutable local variable: final def apply(x: Int): String = x.+(anonfun$apply$3.this.$outer.localVar$1.elem.$asInstanceOf[String]()); where $outer.localVar$1 refers to a scala.runtime.ObjectRef
    • one-level nesting, capturing an immutable local value: final def apply(x: Int): String = x.+(anonfun$apply$2.this.$outer.localValue$1); where the $outer.localValue$1 refers to the captured value
    • two-level nesting, capturing a mutable local variable: final def apply(y: Int): String = y.+(anonfun$apply$9.this.$outer.$outer().localVar$1.elem.$asInstanceOf[String]());, notice how the $outer chain now becomes two levels as well.
    • two-level nesting, capturing an immutable local value: final def apply(y: Int): String = y.+(anonfun$apply$7.this.$outer.$outer().localValue$1); same as above
    • two-level nesting, capturing the enclosing this due to accessing a field: final def apply(x: Int): String = x.+(anonfun$apply$4.this.$outer.$outer().$outer().mutableField()); where there are 3 $outers in the chain, the first two referencing closures and the last referencing the enclosing this
  • Such $outer fields could reference an enclosing "this" (and this is very often in REPL), or it could reference another closure (for nested closures)
    • See closure2 for example: it gets lowered to logic like: final def apply(y: Int): String = y.+(anonfun$apply$4.this.$outer.$outer().localValue$1); where it follows the $outer chain to reach the captured localValue state.

indylambda:

  • Closures are lowered into static methods, and the lambda instance creation sites become invokedynamic+LambdaMetafactory call.
    • There could be corner cases where the lowered method cannot be made static NSC always generates static method for the lambda body impl. There are some stale comments in NSC that still mentions the possibility of generating non-static method here but that's no longer relevant.
  • Captured state is stored as instance fields automatically generated by the LMF, namely from InnerClassLambdaMetafactory
    • The fields names are "arg$" + (i + 1) where i is [0, capturedArgCount)
  • Immutable captured state is captured-by-copy (etc direct reference copy or primitive type copy)
    • For example, see how localValue is captured in the delambdafy:method mode: it's captured by closures of all nesting levels by copy, i.e. making a direct reference to the original captured object. No $outer involved here.
  • Mutable captured state is captured-by-ref via the scala.runtime.XXXRef types, e.g. IntRef, ObjectRef etc (c.f. the *Ref.java files in https://github.com/scala/scala/tree/2.12.x/src/library/scala/runtime). This is typically done for mutable local variable captures, regardless of the nesting level of the closure from the owner function/closure of the local variable.
  • These indylambdas may keep an outer reference to an enclosing "this", in which case the first non-receiver argument in the lowered synthetic method would be a $this.
    • When the lowered synthetic method is static, the $this argument's value would come from the arg$1 field on the indylambda closure object. This is equivalent to the $outer field in the old world.
    • When the lowered synthetic method is non-static, the $this argument doesn't need to exist because it'll be the same as the "capturing receiver", and the "capturing receiver"'s value would come from the arg$1 field on the indylambda closure object. this case doesn't exist in Scala 2.12 anymore
  • These indylambdas do NOT reference outer closures via an equivalent $outer mechanism. e.g.
    • one-level nesting, capturing a mutable local variable: final <static> <artifact> def $anonfun$new$5(localVar$1: runtime.ObjectRef, x: Int): String = x.+(localVar$1.elem.$asInstanceOf[String]()); where the localVar$1 is a scala.runtime.ObjectRef that stores the captured mutable variable.
    • one-level nesting, capturing an immutable local value: final <static> <artifact> def $anonfun$new$3(localValue$1: String, x: Int): String = x.+(localValue$1);
    • two-level ntesting, capturing a mutable local variable: final <static> <artifact> def $anonfun$new$15(localVar$1: runtime.ObjectRef, y: Int): String = y.+(localVar$1.elem.$asInstanceOf[String]());, compare this with the above: no extra $outer chain, the "ref"s are copied into the nested closure object and stored as fields
    • two-level nesting, capturing an immutable local value: final <static> <artifact> def $anonfun$new$12(localValue$1: String, y: Int): String = y.+(localValue$1);, compare this with the above: no extra $outer chain, the direct references are copied into the nested closure object and stored as fields
    • one-level nesting, capturing enclosing this due to accessing a mutable field: final <static> <artifact> def $anonfun$new$7($this: Baz, x: Int): String = x.+($this.mutableField());
    • two-level nesting, capturing enclosing this due to accessing a mutable field: final <static> <artifact> def $anonfun$new$18($this: Baz, y: Int): String = y.+($this.mutableField());, compare this with the above: no extra $outer chain, the captured $this comes directly from the current indylambda closure object.
  • For serializable lambdas (if a function type implements java.lang.Serializable; note that Scala's scala.Serializable trait extends the Java counterpart, and all Scala scala.FunctionN traits are marked Serializable),
    • there'd be a private void writeReplace() method generated on the indylambda class by LMF, so that the lambda instance state can be serialized via the java.lang.invoke.SerializedLambda serialization proxy. The connection between the instance fields on the indylambda closure object and the SerializedLambda is codegen'd into this writeReplace method.
    • There's also a corresponding $deserializeLambda$ static method on the same class as the synthetic method for the lambda body, similar to how Java 8 implements it (but because Scala can be very lambda-heavy, doing it the exact same way as Java 8 would create a single huge method and blow up the code size). This static method is responsible for deserialization of all indylambdas in this class. This is generated by NSC. This method uses invokedynamic to create the indylambda closure objects as well, and the bootstrap method is scala.runtime.LambdaDeserialize.bootstrap(...).

Walkthrough a few examples in indylambda

The NSC internal syntax tree for delambdafy:method mode in Scala 2.12 can be found in the syntax_tree_after_delambdafy-2.12 file.

For the example given in this Gist, the generated bytecode from InnerClassLambdaMetafactory can be found in the Bar$$$Lambda$10[1-8].class.javap files.

  • closure1 ((1 to i).map { x => x + localValue })

    • Lowered to final <static> <artifact> def $anonfun$new$3$adapted(localValue$1: String, x: Object): String = Bar.this.$anonfun$new$3(localValue$1, unbox(x)); by delambdafy:method
      • which further invokes into the actual body: final <static> <artifact> def $anonfun$new$3(localValue$1: String, x: Int): String = x.+(localValue$1);
      • Notice how the adaptor method version has an extra $adapted in the method name.
    • Runtime generated indylambda closure object code is Bar$$$Lambda$105.class.javap
    • This captures an immutable reference localValue, so the reference is directly captured as a field arg$1 in the indylambda closure object
    • The NSC syntax tree for closure1's assignment looks like this:
            val closure1: Function1 = {
              $anonfun(localValue)
            };
      where this $anonfun(localValue) expression actually represents an invokedynamic call to an LMF bootstrap method with localValue as a capturing argument. In bytecode this looks like: (in javap format)
      invokedynamic #202,  0            // InvokeDynamic #4:apply:(Ljava/lang/String;)Lscala/Function1;
      ...
      BootstrapMethods:
        0: #116 invokestatic java/lang/invoke/LambdaMetafactory.altMetafactory:(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;
          Method arguments:
            #118 (Ljava/lang/Object;)Ljava/lang/Object;
            #123 invokestatic Bar$.$anonfun$new$3$adapted:(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;
            #125 (Ljava/lang/Object;)Ljava/lang/String;
            #126 3
            #127 1
            #129 scala/Serializable
      
  • closure2

    • Lowered to final <static> <artifact> def $anonfun$new$4$adapted(localValue$1: String, j: Object): scala.collection.immutable.IndexedSeq = Bar.this.$anonfun$new$4(localValue$1, unbox(j));
    • Runtime generated indylambda closure object code is Bar$$$Lambda$103.class.javap
  • closure3

    • Lowered to final <static> <artifact> def $anonfun$new$7$adapted(closure1$1: Function1, closure2$1: Function1, k: Object, l: Object, m: Object): scala.collection.immutable.IndexedSeq = Bar.this.$anonfun$new$7(closure1$1, closure2$1, unbox(k), unbox(l), unbox(m))
    • Runtime generated indylambda closure object code is Bar$$$Lambda$104.class.javap

None of the examples in this Gist capture a $this. But running the following snippet in a Scala 2.12 REPL will demonstrate a $this-capturing lambda:

$ <Scala 2.12.2>/bin/scala -Xprint:delambdafy -Yshow:delambdafy -Yrepl-class-based
:pa
class NotSerializableClass1(val x: Int)
case class Foo(id: String)
val ns = new NotSerializableClass1(42)
val func: Any => Foo = { _ => Foo("") }
<Ctrl+D>

In this REPL example, NSC somehow forces both NotSerializableClass1 and Foo to capture the enclosing REPL line object, thus func also needs to capture a $this for the REPL line object so that it can call Foo's constructor, passing the captured $this as the arg$outer argument to Foo's constructor.

A Note on Apache Spark's ClosureCleaner (WIP)

Author: Kris Mok

Date: 2020-04-30

Link to this Gist: https://gist.github.com/rednaxelafx/e9ecd09bbd1c448dbddad4f4edf25d48

This note is written in the context of Apache Spark 3.0's ClosureCleaner support for the "indylambda" style lowering of closures, which is the new default since Scala 2.12, SPARK-31399.

c.f. Josh Rosen's analysis on Spark's ClosureCleaner support for Scala 2.11 vs 2.12 right now: https://issues.apache.org/jira/browse/SPARK-31399?focusedCommentId=17080359&page=com.atlassian.jira.plugin.system.issuetabpanels%3Acomment-tabpanel#comment-17080359

For some details of how Scala lower its lambda literals to Java bytecode, please refer to the Notes.md file in this Gist.

Apache Spark and its ClosureCleaner

Apache Spark provides a (few) set of declarative APIs that allow users to express queries in a concise way.

Spark's APIs are most expressive in its Scala bindings, and Scala lambdas/closures play an important part in the story. It's very common for users to use Scala closures in:

  • RDD: sc.parallelize(0L to 100L).map { x => x + 3L }
  • Typed DataFrame operations: spark.range(100).map { x => x + 3L }
  • Scala UDFs: spark.range(100).select(udf((x: Long) => x + 3L, LongType)($"id"))

To enable distributed execution, the closures in the user code that come into Spark on the Spark driver would have to be sent over to the Spark executors. This involves serializing the closures on the driver, and deserializing them on the executors. Sounds straightforward. (Note that the bytecode backing these closures are usually already present on both Spark driver and executor, if they're packaged into JARs; otherwise if they're defined in Spark Shell (a Scala REPL embedded into Spark), then there's a mechanism for Spark executor to fetch them from the driver.)

But problems can arise if the closure captures non-serializable state. One aspect of this is that the Scala compiler may accidentally capture more state than what's needed. In that case, user would think a closure they wrote should be serializable, and would be very frustrated to get a weird NotSerializableException instead.

To help improve user experience and work around Scala compiler limitations, Spark introduced a ClosureCleaner that specifically targets implementation details of how Scala lowers lambdas/closures, and try to figure out what state is probably not needed and thus can be "cleaned". This allows the removal of accidentally captured non-serializable state on a best-effort basis.

The initial implementation of this ClosureCleaner existed since the initial commit of Apache Spark(also the same in the Mesos version of Spark the initial commit of Spark in Mesos) by Spark's original creator Matei Zaharia back in 2010. The core concepts hasn't changed much since then. The most significant development over the original one was probably the change that enhanced inner closure handling support by Andrew Or in 2015.

Spark's ClosureCleaner is conservative in the sense that it'll only clear out "unreferenced" fields on two kinds of objects:

  • Scala's compiler-synthesized inner-class based closure objects, whose fields are known to be only for the captured state
  • Scala REPL's synthesized "line" objects, whose fields are known to be only for emulating top-level variables in the REPL

But the same ClosureCleaner is also aggressive (and potentially harmful) because its analysis is far from precise, and may aggressively clean state on the REPL line objects that are still needed. In pathological cases, what should have been better reported as NotSerializableException becomes serializable, and then a NullPointerException can happen unexpectedly. We'll come back to an example of such case later in this note.

The nature of Spark's ClosureCleaner is a hack upon hacks. It heavily depends on implementation details of both Scala's compiler+runtime and Java's standard library in Sun/Oracle JDK / OpenJDK. As such, it's sensitive to changes in Scala and JDK's internals, and it's very brittle. A better solution would have been to implement better closure capturing analysis in the Scala compiler itself, which voids the need to have a reflection-heavy ClosureCleaner at runtime.

Scala 2.12 support

The ClosureCleaner in Apache Spark master as of now (2020-04-30) is only fully implemented for Scala 2.11 and below; both points above need to be adjusted for the new "indylambda" lowering strategy in Scala 2.12 and above. This note tries to explain the current implementation strategy and what's needed to support indylambda.

Lightbend had helped with laying the groundwork of the Scala 2.12 support in Apache Spark. SPARK-14540 captures the initial work, and SPARK-31399 captures a missing piece in the initial work.

The main example

We'll be using the following code snippet in a Spark Shell as the main example in this note.

org.apache.log4j.Logger.getLogger("org.apache.spark.util.ClosureCleaner").setLevel(org.apache.log4j.Level.DEBUG)
:pa
class NotSerializableClass1(val x: Int)
case class Foo(id: String)
val ns = new NotSerializableClass1(42)
val func: Any => Foo = { _ => Foo("") }
<Ctrl+D>
sc.parallelize(0 to 2).map(func)

The :pa command sets the Scala REPL into paste mode, and the EOF (Ctrl and D keys) exits paste mode and allows Scala REPL to continue evaluating the input.

We can start the Spark Shell with

$ bin/spark-shell -Xprint:jvm -Yshow:jvm

to see the lowered syntax tree that NSC produced just before final bytecode generation.

ClosureCleaner's algorithm of cleaning Scala closures

The basic idea is very simple (but the assumption may be too aggressive). It's just a mark-sweep algorithm:

  • Mark: mark the starting closure and its nested ("inner") closures' code and mark fields on outer objects that are accessed;
  • Sweep: null out unaccessed fields in outer objects.

For a starting closure object, it may reference a chain of "outer" objects for the captured state, and it may create nested ("inner") closure objects with its own code. Note that there's a strong distinction between the "outer" and "inner" objects involved:

  • The "outer" objects are a part of the captured state, and already exists when the starting object came into being. They are referenced by the given starting closure object, and they're subject to cleaning.
  • The "inner" closure objects can be grouped into two categories:
    • Lexically nested closures. i.e. a closure literal declared inside another closure literal. The object that represent them don't exist yet. They'll be created afresh every time the given starting closure is invoked, so they're not subject to cleaning -- you can't clean what's not there yet. We don't care about inner objects created in some previous invocation of the given starting closure.
      val outerClosure = (x: Int) => {
        val innerClosure = (y: Int) => {
          // ...
        }
      }
    • Captured closures. i.e. a closure referencing another closure. The referenced closure object already exists when the capturing closure exists, so it's subject to cleaning as well. But Spark's ClosureCleaner doesn't handle this case well yet. More example on that later in this note.
      {
        val siblingClosure = (x: Int) => x + this.fieldA   // captures `this`, references `fieldA` on `this`.
        val startingClosure = (y: Int) => y + this.fieldB + siblingClosure(y)  // captures `this` and `siblingClosure`, references `fieldB` on `this`.
      }

An "outer" object can either be another closure, or a REPL line object, or something else.

  • enclosing closure
    • NOTE: this only applies to the inner-class based Scala lambda lowering (default in Scala 2.11 and below), and does NOT apply to the new indylambda (new default in Scala 2.12 and above)
    • This is for when a closure has captured local variables from its enclosing closure, and/or for when a closure has captured something further up the outer chain.
  • enclosing REPL line object
    • NOTE: applies to both inner-class/indylambda style of Scala lambda lowering
    • This is for when a closure needs to reference a top-level variable, which in turn is implemented as a field on the enclosing REPL line object, so a reference to this REPL line object is captured by the closure.
  • something else
    • Well, that's in unknown territories in user code, so better not touch its state.

To "safely" clean the outer objects, we need to know which fields are being accessed by the starting and inner closures. This assumes that closures are the only code that may access fields on the outer objects, and disregards uses of fields on the outer objects in case references to them escape outside of the closure. (THIS ASSUMPTION IS UNSAFE! That's why I'm handwaving with the quoted "safely" ...)

Given the assumption, cleaning the outer objects would involve:

  1. Finding all the "outer" objects until the first one that is not an enclosing closure. Populate a mapping of "an outer object's class" -> "a set of fields being accessed by starting/inner closures". Initially the sets in the map would all be empty. NOTE: Super classes of an outer object's class are also added to this mapping to make sure fields on super classes are also handled correctly.
    • For inner-class based Scala closures, this means following the $outer chain until coming across a non-closure "outer".
    • For indylambda, there's no "chain", but just 0 or 1 enclosing $this, which could a REPL line object or something else, but not an enclosing closure object.
  2. Finding all the nested ("inner") closures' implementation method, and scan their bytecodes to find all the fields they've accessed on the outer objects. Mark the accessed fields down in the mapping.
    • (missing in Spark 2.4/3.0) scan "sibling" closures as well, i.e. the ones that are not lexically nested in the starting closure, but may be referenced and invoked somewhere inside the starting closure (or one of its nested closures). See an example in the Scala 2.11 section below.
  3. Traverse the chain of "outer" objects in reverse order and clone+clean their unaccessed fields. Start from outermost and stop at just before the given starting closure.
    • Closure objects and REPL line objects will be cloned, and only the accessed fields will be copied over to the cloned object, leaving the unaccessed fields null.
    • Outer closures can optionally be transitively cleaned
  4. Fixup the "outer" reference from the starting closure to its immediate cloned+cleaned outer object.

Safer alternative assumption

As mentioned above, this design is unsafe because it doesn't take into account of the escaped references to the captured "outer" references. An "escape" of a reference is:

  • When it's assigned to a field. Both instance fields and static fields count.
  • When it's passed as an argument to a method/function/constructor call. Once a reference has made its way outside the scope of what we can analyze, we have to assume the anything can happen with that reference.

To be safe, one approach is to go pessimistic and assume that any occurrence of an escaped reference to any of the outer objects blacklists the chain of outer objects starting from that object to the outermost as NOT eligible for cleaning. This may be an overkill though. Spark users might not like the fact that what used to run okay suddenly starts complaining NotSerializableException...

Another route is to implement a slightly more advanced escape analysis, i.e. expand the scope of what we can analyze. That's a whole story on its own... maybe something simple like the BCEscapeAnalyzer in the HotSpot JVM.

But of course, the best way to deal with this is to make closure capturing as clean as possible right from the roots -- inside the compiler.

The case with Scala 2.11's closures

Pseudocode for relevant classes after lowering:

  // case class Foo(id: String)
  case class iw$Foo($outer: $line15.iw, id: String)

  // (x: Any) => Foo("")
  @SerialVersionUID(value = 0)
  final <synthetic> class anonfun$1
      extends scala.runtime.AbstractFunction1
      with Serializable {
    final def apply(x$1: Object): $line15.iw$Foo = new $line15.iw$Foo(anonfun$1.this.$outer, "");
    <synthetic> <paramaccessor> <artifact> private[this] val $outer: $line15.iw = _;
    final <bridge> <artifact> def apply(v1: Object): Object = anonfun$1.this.apply(v1);
    def <init>($outer: $line15.iw): <$anon: Function1> = {
      if ($outer.eq(null))
        throw null
      else
        anonfun$1.this.$outer = $outer;
      anonfun$1.super.<init>();
      ()
    }
  }

Notice how the closure gets lowered into a class similar to a Java annonymous inner class, with an $outer chain that implements the dynamic environment chain, parallel to the static lexical scope.

Log from Spark's ClosureCleaner:

20/04/30 19:21:57 DEBUG ClosureCleaner: +++ Cleaning closure <function1> ($line15.$read$$iw$$iw$$anonfun$1) +++
20/04/30 19:21:57 DEBUG ClosureCleaner:  + declared fields: 2
20/04/30 19:21:57 DEBUG ClosureCleaner:      public static final long $line15.$read$$iw$$iw$$anonfun$1.serialVersionUID
20/04/30 19:21:57 DEBUG ClosureCleaner:      private final $line15.$read$$iw$$iw $line15.$read$$iw$$iw$$anonfun$1.$outer
20/04/30 19:21:57 DEBUG ClosureCleaner:  + declared methods: 2
20/04/30 19:21:57 DEBUG ClosureCleaner:      public final java.lang.Object $line15.$read$$iw$$iw$$anonfun$1.apply(java.lang.Object)
20/04/30 19:21:57 DEBUG ClosureCleaner:      public final $line15.$read$$iw$$iw$Foo $line15.$read$$iw$$iw$$anonfun$1.apply(java.lang.Object)
20/04/30 19:21:57 DEBUG ClosureCleaner:  + inner classes: 0
20/04/30 19:21:57 DEBUG ClosureCleaner:  + outer classes: 1
20/04/30 19:21:57 DEBUG ClosureCleaner:      $line15.$read$$iw$$iw
20/04/30 19:21:57 DEBUG ClosureCleaner:  + outer objects: 1
20/04/30 19:21:57 DEBUG ClosureCleaner:      $line15.$read$$iw$$iw@295e989
20/04/30 19:21:57 DEBUG ClosureCleaner:  + populating accessed fields because this is the starting closure
20/04/30 19:21:57 DEBUG ClosureCleaner:  + fields accessed by starting closure: 2
20/04/30 19:21:57 DEBUG ClosureCleaner:      (class $line15.$read$$iw$$iw,Set())
20/04/30 19:21:57 DEBUG ClosureCleaner:      (class java.lang.Object,Set())
20/04/30 19:21:57 DEBUG ClosureCleaner:  + outermost object is a REPL line object, so we clone it: (class $line15.$read$$iw$$iw,$line15.$read$$iw$$iw@295e989)
20/04/30 19:21:57 DEBUG ClosureCleaner:  + cloning the object $line15.$read$$iw$$iw@295e989 of class $line15.$read$$iw$$iw
20/04/30 19:21:57 DEBUG ClosureCleaner:  +++ closure <function1> ($line15.$read$$iw$$iw$$anonfun$1) is now cleaned +++

Notice how it scans the closure's bytecode and doesn't find any field access on known outer objects, so it decides that all fields on the outer REPL line object are eligible for cleaning -- erasing to null.

Going too aggressive?

Now, the existing ClosureCleaner algorithm assumes that if a field on an "outer" object isn't directly accessed by the starting closure or its inner closures, then it's safe to clean.

But that's too aggressive and doesn't take reference escaping into account. The main example in this note demonstrates that an $outer reference to the enclosing REPL line object escapes the closure and gets passed into the Foo object. The ClosureCleaner does NOT analyze what Foo is using the escaped reference for and just assumes it's not doing anything.

While that worked for the main example, if we slightly tweak it into:

:pa
class NotSerializableClass1(val x: Int)
val ns = new NotSerializableClass1(42)
case class Foo(id: String) {
  def foo() = this.id + ns.x
}
val func: Any => Foo = { _ => Foo("") }
<Ctrl+D>
val result = sc.parallelize(0 to 2).map(func).collect()
result.map(_.foo())

(Notice how Foo.foo() now depends on $outer.ns, which means to maintain a strict sense of correctness, the ns field on the enclosing REPL line object should NOT be cleaned.)

We'll get a NullPointerException on the driver, after collecting the results back:

scala> val result = sc.parallelize(0 to 2).map(func).collect()
result: Array[Foo] = Array(Foo(), Foo(), Foo())

scala> result.map(_.foo())
java.lang.NullPointerException
  at Foo.foo(<console>:14)
  at $anonfun$1.apply(<console>:26)
  at $anonfun$1.apply(<console>:26)
  at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
  at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
  at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
  at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
  ... 49 elided

If we tweak it to call Foo.foo() on the executor side:

:pa
class NotSerializableClass1(val x: Int)
val ns = new NotSerializableClass1(42)
case class Foo(id: String) {
  def foo() = this.id + ns.x
}
val func: Any => String = { _ => Foo("").foo() }
<Ctrl+D>
sc.parallelize(0 to 2).map(func).collect

It'll NPE on the executor instead:

20/05/01 01:58:51 ERROR Executor: Exception in task 7.0 in stage 0.0 (TID 7)
java.lang.NullPointerException
	at $line14.$read$$iw$$iw$Foo.foo(<console>:14)
	at $line14.$read$$iw$$iw$$anonfun$1.apply(<console>:16)
	at $line14.$read$$iw$$iw$$anonfun$1.apply(<console>:16)
	at scala.collection.Iterator$$anon$11.next(Iterator.scala:410)
	at scala.collection.Iterator$class.foreach(Iterator.scala:891)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
	at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
	at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310)
	at scala.collection.AbstractIterator.to(Iterator.scala:1334)
	at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302)
	at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334)
	at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289)
	at scala.collection.AbstractIterator.toArray(Iterator.scala:1334)
	at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:945)
	at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:945)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:121)
	at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)

This kind of NPE would be fairly hard to debug, unless the person knows the nitty gritty details of Spark's ClosureCleaner.

Implementation limitations with regards to indirectly referenced closures

Run this example in a Spark 2.4 Spark Shell:

org.apache.log4j.Logger.getLogger("org.apache.spark.util.ClosureCleaner").setLevel(org.apache.log4j.Level.DEBUG)
:pa
case class Foo(id: String)
val topLevelFoo = Foo("top-level")
val otherFunc = () => topLevelFoo
val func: Any => Foo = { _ => otherFunc() }
<Ctrl+D>
sc.parallelize(0 to 2).map(func).collect

this returns res1: Array[Foo] = Array(Foo(top-level), Foo(top-level), Foo(top-level)), so that's good... right?

Nope.

Because if we check the logs from the ClosureCleaner:

20/05/03 00:09:00 DEBUG ClosureCleaner: +++ Cleaning closure <function1> ($line15.$read$$iw$$iw$$anonfun$2) +++
20/05/03 00:09:00 DEBUG ClosureCleaner:  + declared fields: 2
20/05/03 00:09:00 DEBUG ClosureCleaner:      public static final long $line15.$read$$iw$$iw$$anonfun$2.serialVersionUID
20/05/03 00:09:00 DEBUG ClosureCleaner:      private final $line15.$read$$iw$$iw $line15.$read$$iw$$iw$$anonfun$2.$outer
20/05/03 00:09:00 DEBUG ClosureCleaner:  + declared methods: 2
20/05/03 00:09:00 DEBUG ClosureCleaner:      public final java.lang.Object $line15.$read$$iw$$iw$$anonfun$2.apply(java.lang.Object)
20/05/03 00:09:00 DEBUG ClosureCleaner:      public final $line15.$read$$iw$$iw$Foo $line15.$read$$iw$$iw$$anonfun$2.apply(java.lang.Object)
20/05/03 00:09:00 DEBUG ClosureCleaner:  + inner classes: 0
20/05/03 00:09:00 DEBUG ClosureCleaner:  + outer classes: 1
20/05/03 00:09:00 DEBUG ClosureCleaner:      $line15.$read$$iw$$iw
20/05/03 00:09:00 DEBUG ClosureCleaner:  + outer objects: 1
20/05/03 00:09:00 DEBUG ClosureCleaner:      $line15.$read$$iw$$iw@7719b257
20/05/03 00:09:00 DEBUG ClosureCleaner:  + populating accessed fields because this is the starting closure
20/05/03 00:09:00 DEBUG ClosureCleaner:  + fields accessed by starting closure: 2
20/05/03 00:09:00 DEBUG ClosureCleaner:      (class $line15.$read$$iw$$iw,Set(otherFunc))
20/05/03 00:09:00 DEBUG ClosureCleaner:      (class java.lang.Object,Set())
20/05/03 00:09:00 DEBUG ClosureCleaner:  + outermost object is a REPL line object, so we clone it: (class $line15.$read$$iw$$iw,$line15.$read$$iw$$iw@7719b257)
20/05/03 00:09:00 DEBUG ClosureCleaner:  + cloning the object $line15.$read$$iw$$iw@7719b257 of class $line15.$read$$iw$$iw
20/05/03 00:09:00 DEBUG ClosureCleaner:  +++ closure <function1> ($line15.$read$$iw$$iw$$anonfun$2) is now cleaned +++

The outermost object is a REPL line object and it's claimed to be cleaned, and the only field identified to be accessed is otherFunc. So topLevelFoo field on the cloned+cleaned REPL line object should be null, and if we check func.$outer.topLevelFoo (through reflection), it is indeed a null.

Then how did this code run without hitting NPE? The answer is simple: the current implementation of ClosureCleaner doesn't transitively scan otherFunc from func, so it misses the fact that otherFunc needs cleaning as well (and fixup of its $outer to point to the same cloned+cleaned REPL line object).

If we reflectively check func.$outer and func.$outer.otherFunc.$outer, which should refer to the same object instance, are actually pointing to different objects after cleaning: $iw@16d31c48 vs $iw@7719b257, and the latter's topLevelFoo field is still the original Foo("top-level") one.

Now, tweaking this to make the REPL line object reference a non-serializable object:

:pa
defined class NotSerializableClass1
ns: NotSerializableClass1 = NotSerializableClass1@1d208795
defined class Foo
topLevelFoo: Foo = Foo(top-level)
otherFunc: () => Foo = <function0>
func: Any => Foo = <function1>
<Ctrl+D>
sc.parallelize(0 to 2).map(func).collect

would give us NotSerializableException:

org.apache.spark.SparkException: Task not serializable
...
Caused by: java.io.NotSerializableException: NotSerializableClass1
Serialization stack:
	- object not serializable (class: NotSerializableClass1, value: NotSerializableClass1@1d208795)
	- field (class: $iw, name: ns, type: class NotSerializableClass1)
	- object (class $iw, $iw@ab5954d)
	- field (class: $anonfun$1, name: $outer, type: class $iw)
	- object (class $anonfun$1, <function0>)
	- field (class: $iw, name: otherFunc, type: interface scala.Function0)
	- object (class $iw, $iw@2f349170)
	- field (class: $anonfun$2, name: $outer, type: class $iw)
	- object (class $anonfun$2, <function1>)

of course. The ClosureCleaner missed cleaning a sibiling closure. No surprise at all.

The case with Scala 2.12's indylambda closures

A quick recap of a few important differences between the Scala 2.11 and 2.12 closures:

  • Both involve a notion of "outer" objects, but
    • Scala 2.11: "outer" objects can be enclosing closures and/or enclosing object. There can be 0 to many of them. They're singly-linked through a $outer chain -- the inner-classes that represent a closure would host a field called $outer to form that chain.
    • Scala 2.12: the "outer" object can only be 0 or 1 enclosing object, never an enclosing closure. The enclosing object is referred to as the $this parameter in the implementation method, and on the LMF-generated class it'll be in the arg$1 field (in the serialization proxy java.lang.invoke.SerializedLambda, it can be accessed via serializedLambda.getCapturedArg(0))
  • Both involve a notion of nested ("inner") closures, but the way it's encoded in bytecode is different
    • TODO

A quick recap of the discussion in the algorithm section, what we need to do to make Spark's ClosureCleaner get to feature parity between the Scala 2.11 and 2.12 support are:

  • Determine whether or not an object passed in is a indylambda closure object
    • (already in Spark 2.4/3.0, needs slight tweak) quick check on the object's class to see if it meets all of
      • is synthetic
      • implements scala.Serializable
      • (not in Spark 2.4/3.0 yet) implements an interface that starts with scala.Function (e.g. scala.Function1)
    • (already in Spark 2.4/3.0) get the serialization proxy SerializedLambda by reflectively invoke the writeReplace() method on the input object
      • if there is no writeReplace() method, or if the returned object isn't a SerializedLambda, then it's not an indylambda closure object
    • (not in Spark 2.4/3.0 yet) check the SerializedLambda to see if the implementation method meets all of the following:
      • is static and synthetic
      • has $anonfunc in its method name
  • Determine whether or not there's an enclosing $this that requires cleaning. It has to meet all of the following criteria:
    • The implementation method of the lambda (which can be found through the SerializedLambda proxy) has to be
      • the first (0th) parameter has the exact name "$this", and the parameter type matches the owning class
    • The type of the enclosing $this is a REPL line class
  • If there's no enclosing $this then there's nothing to clean, done.
  • Otherwise, find all the inner closures and mark the fields they access on the enclosing $this REPL line object
  • Clone and clean the enclosing $this REPL line object
  • Fixup the reference from the starting closure to its enclosing $this to the cloned+cleaned one. Done.

Spark ClosureCleaner Tidbits

A few things to improve

TODO

ClosureCleaner.isClosure is too loose

How does ClosureCleaner determine whether or not a class is a Scala closure? Well...

  // Check whether a class represents a Scala closure
  private def isClosure(cls: Class[_]): Boolean = {
    cls.getName.contains("$anonfun$")
  }

Wow that's loose. This is still the case for all Spark versions as of 2020-05-06.

The thing is, this check doesn't take nesting level into account:

  • there can be anonymous functions, mangled as $anonfun in the name, or anonymous classes, mangled as $anon in the name
  • there can be zero or more $anonfun and $anon in a class name, and the order represents nesting level (left is outer, right is inner)
  • the last component in the mangled name tells what it actually is
    • the last component doesn't even have to be one of the two above. It could even be a named inner class inside of a Scala closure.

This existing check matches everything that has at least one level of anonymous function (closure) in it, but the actual entity could be anything, e.g. class, trait, object, actual closures, etc...

If we run the test suite org.apache.spark.util.ClosureCleanerSuite in Spark 2.4 branch (as of 3936b1442eb17709ff19d305b3da5ca520a79c56), you can find the debug-level log of one of its test cases to be:

+++ Cleaning closure <function1> (org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5$$anonfun$getData$2) +++
 + declared fields: 2
     public static final long org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5$$anonfun$getData$2.serialVersionUID
     private final org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5 org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5$$anonfun$getData$2.$outer
 + declared methods: 2
     public final java.lang.Object org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5$$anonfun$getData$2.apply(java.lang.Object)
     public final scala.Tuple8 org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5$$anonfun$getData$2.apply(int)
 + inner classes: 0
 + outer classes: 2
     org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5
     org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3
 + outer objects: 2
     org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5@72ef7f1c
     org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3@3a7fc9d0
 + populating accessed fields because this is the starting closure
 + fields accessed by starting closure: 4
     (class org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3,Set(innerObject))
     (class org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5,Set(s3, n3, n1, d3, $outer))
     (class org.apache.spark.util.TestAbstractClass2,Set(s1, n1))
     (class java.lang.Object,Set())
 + outermost object is a closure, so we clone it: (class org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3,org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3@3a7fc9d0)

So org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5$$anonfun$getData$2 is indeed a closure class. But neither org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3$$anon$5 nor org.apache.spark.util.ClosureCleanerSuite$$anonfun$18$$anon$3 are closure classes, and they shouldn't be considered as a part of the supported $outer chain.

Interestingly, because of this bug, there's a "bugfix" in Spark's ClosureCleaner to support finding fields on super classes. That doesn't fix the actual bug of the isClosure check being too loose, but instead adds a level of unnecessary workaround on top.

If we assume that Spark's ClosureCleaner only intended to clean real closures and REPL line objects, and it explicitly wanted to refrain from cleaning instances of user-defined classes, then the super class support is completely unnecessary -- we can assume that Scala REPL's line objects don't involve complex inheritance.

A note on the infamous $iwC

As of 2020-05-04, the ClosureCleaner in Apache Spark's master branch's latest commit still contains the following lines of code: https://github.com/apache/spark/blob/5052d9557d964c07d0b8bd2e2b08ede7c6958118/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala#L494-L498

          // Check for calls a getter method for a variable in an interpreter wrapper object.
          // This means that the corresponding field will be accessed, so we should save it.
          if (op == INVOKEVIRTUAL && owner.endsWith("$iwC") && !name.endsWith("$outer")) {
            fields(cl) += name
          }

So what is this "$iwC" thingy? If we try to search for it, let's say Google the terms "Scala REPL $iwC", we'd only find entries that mention Spark, instead of Scala in general.

That's because this was a hack that existed since the initial commit of Spark (can be traced back both via Apache Spark and via the original Spark in Mesos). Let's use the Apache Spark repo's commit here: https://github.com/apache/spark/blob/df29d0ea4c8b7137fdd1844219c7d489e3b0d9c9/src/scala/spark/repl/SparkInterpreter.scala#L372-L387

    val impname = compiler.nme.INTERPRETER_IMPORT_WRAPPER // "$iw"
    // ...
    def addWrapper() {
      code.append("@serializable class " + impname + "C {\n")
      trailingLines.append("}\nval " + impname + " = new " + impname + "C;\n")
      accessPath.append("." + impname)
      currentImps.clear
    }

Note that "@serializable class " + impname + "C {\n" is "@serializable class $iwC {\n". So where a regular Scala REPL would have generated $iw classes for the REPL's "import wrappers", Spark's hacked version would use the "$iwC" name instead.

And early versions of Spark's ClosureCleaner depended on that hack, by explicitly checking the class name for "$iwC" as a means to identitfy Spark's Scala REPL line objects.

This hack existed in Spark from the start when it only supported Scala 2.7.7, all the way to Apache Spark 1.2.0 in its Scala 2.10.x support; the Scala 2.11 support in the same release DOES NOT have this hack anymore.

So since the Scala 2.11 support in Apache Spark, that piece of "$iwC" checking code has been effectively dead.

A few known forks / copies (as of 2020-05)

// Default delambdafy strategy ("method"), aka "indylambda"
// via <Scala 2.12.2>/bin/scalac -Xprint:delambdafy,jvm -Yshow:delambdafy,jvm bar.scala
[[syntax trees at end of delambdafy]] // bar.scala
package <empty> {
object Bar extends Object {
def test(desc: String, f: Function0): Unit = {
scala.Predef.println(new StringContext(scala.Predef.wrapRefArray(Array[String]{"Running test: ", ""}.$asInstanceOf[Array[Object]]())).s(scala.Predef.genericWrapArray(Array[Object]{desc})));
f.apply$mcV$sp()
};
def main(args: Array[String]): Unit = scala.Predef.println("Hello from main()");
final <static> <artifact> def $anonfun$new$3(localValue$1: String, x: Int): String = x.+(localValue$1);
final <static> <artifact> def $anonfun$new$2(localValue$1: String, i: Int): scala.collection.immutable.IndexedSeq = scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), i).map({
$anonfun(localValue$1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <static> <artifact> def $anonfun$new$6(localValue$1: String, y: Int): String = y.+(localValue$1);
final <static> <artifact> def $anonfun$new$5(localValue$1: String, x: Int): scala.collection.immutable.IndexedSeq = scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), x).map({
$anonfun(localValue$1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <static> <artifact> def $anonfun$new$4(localValue$1: String, j: Int): scala.collection.immutable.IndexedSeq = scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), j).flatMap({
$anonfun(localValue$1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <static> <artifact> def $anonfun$new$8(x: Int): Int = x.+(1);
final <static> <artifact> def $anonfun$new$7(closure1$1: Function1, closure2$1: Function1, k: Int, l: Int, m: Int): scala.collection.immutable.IndexedSeq = scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), k).flatMap(closure2$1, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[collection.TraversableLike]().++(scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), l).flatMap(closure1$1, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.GenTraversableOnce](), immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[collection.TraversableLike]().++(scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), m).map({
$anonfun()
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.GenTraversableOnce](), immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <static> <artifact> def $anonfun$new$1(): Unit = {
val localValue: String = "someSerializableValue";
val closure1: Function1 = {
$anonfun(localValue)
};
val closure2: Function1 = {
$anonfun(localValue)
};
val closure3: Function3 = {
$anonfun(closure1, closure2)
};
val closure1r: scala.collection.immutable.IndexedSeq = closure1.apply(scala.Int.box(1)).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
val closure2r: scala.collection.immutable.IndexedSeq = closure2.apply(scala.Int.box(2)).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
val closure3r: scala.collection.immutable.IndexedSeq = closure3.apply(scala.Int.box(3), scala.Int.box(4), scala.Int.box(5)).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
()
};
def <init>(): Bar.type = {
Bar.super.<init>();
Bar.this.test("try a ClosureCleanerSuite2 example", {
$anonfun()
});
()
};
final <static> <artifact> def $anonfun$new$3$adapted(localValue$1: String, x: Object): String = Bar.this.$anonfun$new$3(localValue$1, unbox(x));
final <static> <artifact> def $anonfun$new$6$adapted(localValue$1: String, y: Object): String = Bar.this.$anonfun$new$6(localValue$1, unbox(y));
final <static> <artifact> def $anonfun$new$5$adapted(localValue$1: String, x: Object): scala.collection.immutable.IndexedSeq = Bar.this.$anonfun$new$5(localValue$1, unbox(x));
final <static> <artifact> def $anonfun$new$2$adapted(localValue$1: String, i: Object): scala.collection.immutable.IndexedSeq = Bar.this.$anonfun$new$2(localValue$1, unbox(i));
final <static> <artifact> def $anonfun$new$4$adapted(localValue$1: String, j: Object): scala.collection.immutable.IndexedSeq = Bar.this.$anonfun$new$4(localValue$1, unbox(j));
final <static> <artifact> def $anonfun$new$7$adapted(closure1$1: Function1, closure2$1: Function1, k: Object, l: Object, m: Object): scala.collection.immutable.IndexedSeq = Bar.this.$anonfun$new$7(closure1$1, closure2$1, unbox(k), unbox(l), unbox(m))
}
}
[[syntax trees at end of jvm]] // bar.scala: tree is unchanged since delambdafy
// Old-style delambdafy strategy ("inline"), aka inner-class lambda
// via <Scala 2.12.2>/bin/scalac -Xprint:delambdafy,jvm -Yshow:delambdafy,jvm -Ydelambdafy:inlne bar.scala
[[syntax trees at end of delambdafy]] // bar.scala
package <empty> {
object Bar extends Object {
def test(desc: String, f: Function0): Unit = {
scala.Predef.println(new StringContext(scala.Predef.wrapRefArray(Array[String]{"Running test: ", ""}.$asInstanceOf[Array[Object]]())).s(scala.Predef.genericWrapArray(Array[Object]{desc})));
f.apply$mcV$sp()
};
def main(args: Array[String]): Unit = scala.Predef.println("Hello from main()");
def <init>(): Bar.type = {
Bar.super.<init>();
Bar.this.test("try a ClosureCleanerSuite2 example", {
(new <$anon: Function0>(): Function0)
});
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable {
final def apply(): Unit = anonfun$1.this.apply$mcV$sp();
<specialized> def apply$mcV$sp(): Unit = {
val localValue: String = "someSerializableValue";
val closure1: Function1 = {
(new <$anon: Function1>(anonfun$1.this, localValue): Function1)
};
val closure2: Function1 = {
(new <$anon: Function1>(anonfun$1.this, localValue): Function1)
};
val closure3: Function3 = {
(new <$anon: Function3>(anonfun$1.this, closure1, closure2): Function3)
};
val closure1r: scala.collection.immutable.IndexedSeq = closure1.apply(scala.Int.box(1)).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
val closure2r: scala.collection.immutable.IndexedSeq = closure2.apply(scala.Int.box(2)).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
val closure3r: scala.collection.immutable.IndexedSeq = closure3.apply(scala.Int.box(3), scala.Int.box(4), scala.Int.box(5)).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
()
};
final <bridge> <artifact> def apply(): Object = {
anonfun$1.this.apply();
scala.runtime.BoxedUnit.UNIT
};
def <init>(): <$anon: Function0> = {
anonfun$1.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$2 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(i: Int): scala.collection.immutable.IndexedSeq = scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), i).map({
(new <$anon: Function1>(anonfun$2.this): Function1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$2.this.apply(scala.Int.unbox(v1));
<synthetic> <paramaccessor> val localValue$1: String = _;
def <init>($outer: <$anon: Function0>, localValue$1: String): <$anon: Function1> = {
anonfun$2.this.localValue$1 = localValue$1;
anonfun$2.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$apply$2 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(x: Int): String = x.+(anonfun$apply$2.this.$outer.localValue$1);
<synthetic> <paramaccessor> <artifact> private[this] val $outer: <$anon: Function1> = _;
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$apply$2.this.apply(scala.Int.unbox(v1));
def <init>($outer: <$anon: Function1>): <$anon: Function1> = {
if ($outer.eq(null))
throw null
else
anonfun$apply$2.this.$outer = $outer;
anonfun$apply$2.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$3 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(j: Int): scala.collection.immutable.IndexedSeq = scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), j).flatMap({
(new <$anon: Function1>(anonfun$3.this): Function1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$3.this.apply(scala.Int.unbox(v1));
<synthetic> <paramaccessor> val localValue$1: String = _;
def <init>($outer: <$anon: Function0>, localValue$1: String): <$anon: Function1> = {
anonfun$3.this.localValue$1 = localValue$1;
anonfun$3.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$apply$3 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(x: Int): scala.collection.immutable.IndexedSeq = scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), x).map({
(new <$anon: Function1>(anonfun$apply$3.this): Function1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
<synthetic> <paramaccessor> <artifact> private[this] val $outer: <$anon: Function1> = _;
<synthetic> <stable> <artifact> def $outer(): <$anon: Function1> = anonfun$apply$3.this.$outer;
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$apply$3.this.apply(scala.Int.unbox(v1));
def <init>($outer: <$anon: Function1>): <$anon: Function1> = {
if ($outer.eq(null))
throw null
else
anonfun$apply$3.this.$outer = $outer;
anonfun$apply$3.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$apply$4 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(y: Int): String = y.+(anonfun$apply$4.this.$outer.$outer().localValue$1);
<synthetic> <paramaccessor> <artifact> private[this] val $outer: <$anon: Function1> = _;
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$apply$4.this.apply(scala.Int.unbox(v1));
def <init>($outer: <$anon: Function1>): <$anon: Function1> = {
if ($outer.eq(null))
throw null
else
anonfun$apply$4.this.$outer = $outer;
anonfun$apply$4.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$4 extends scala.runtime.AbstractFunction3 with Serializable {
final def apply(k: Int, l: Int, m: Int): scala.collection.immutable.IndexedSeq = scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), k).flatMap(anonfun$4.this.closure2$1, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[collection.TraversableLike]().++(scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), l).flatMap(anonfun$4.this.closure1$1, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.GenTraversableOnce](), immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[collection.TraversableLike]().++(scala.runtime.RichInt.to$extension0(scala.Predef.intWrapper(1), m).map({
(new <$anon: Function1>(anonfun$4.this): Function1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.GenTraversableOnce](), immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <bridge> <artifact> def apply(v1: Object, v2: Object, v3: Object): Object = anonfun$4.this.apply(scala.Int.unbox(v1), scala.Int.unbox(v2), scala.Int.unbox(v3));
<synthetic> <paramaccessor> private[this] val closure1$1: Function1 = _;
<synthetic> <paramaccessor> private[this] val closure2$1: Function1 = _;
def <init>($outer: <$anon: Function0>, closure1$1: Function1, closure2$1: Function1): <$anon: Function3> = {
anonfun$4.this.closure1$1 = closure1$1;
anonfun$4.this.closure2$1 = closure2$1;
anonfun$4.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$apply$1 extends scala.runtime.AbstractFunction1$mcII$sp with Serializable {
final def apply(x: Int): Int = anonfun$apply$1.this.apply$mcII$sp(x);
<specialized> def apply$mcII$sp(x: Int): Int = x.+(1);
final <bridge> <artifact> def apply(v1: Object): Object = scala.Int.box(anonfun$apply$1.this.apply(scala.Int.unbox(v1)));
def <init>($outer: <$anon: Function3>): <$anon: Function1> = {
anonfun$apply$1.super.<init>();
()
}
}
}
[[syntax trees at end of jvm]] // bar.scala: tree is unchanged since delambdafy
// via <Scala 2.11.8>/bin/scalac -Xprint:jvm -Yshow:jvm bar.scala
[[syntax trees at end of jvm]] // bar.scala
package <empty> {
object Bar extends Object {
def test(desc: String, f: Function0): Unit = {
scala.this.Predef.println(new StringContext(scala.this.Predef.wrapRefArray(Array[String]{"Running test: ", ""}.$asInstanceOf[Array[Object]]())).s(scala.this.Predef.genericWrapArray(Array[Object]{desc})));
f.apply$mcV$sp()
};
def main(args: Array[String]): Unit = scala.this.Predef.println("Hello from main()");
def <init>(): Bar.type = {
Bar.super.<init>();
Bar.this.test("try a ClosureCleanerSuite2 example", {
(new <$anon: Function0>(): Function0)
});
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable {
final def apply(): Unit = anonfun$1.this.apply$mcV$sp();
<specialized> def apply$mcV$sp(): Unit = {
val localValue: String = "someSerializableValue";
val closure1: Function1 = {
(new <$anon: Function1>(anonfun$1.this, localValue): Function1)
};
val closure2: Function1 = {
(new <$anon: Function1>(anonfun$1.this, localValue): Function1)
};
val closure3: Function3 = {
(new <$anon: Function3>(anonfun$1.this, closure1, closure2): Function3)
};
val closure1r: scala.collection.immutable.IndexedSeq = closure1.apply(scala.Int.box(1)).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
val closure2r: scala.collection.immutable.IndexedSeq = closure2.apply(scala.Int.box(2)).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
val closure3r: scala.collection.immutable.IndexedSeq = closure3.apply(scala.Int.box(3), scala.Int.box(4), scala.Int.box(5)).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
()
};
final <bridge> <artifact> def apply(): Object = {
anonfun$1.this.apply();
scala.runtime.BoxedUnit.UNIT
};
def <init>(): <$anon: Function0> = {
anonfun$1.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$2 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(i: Int): scala.collection.immutable.IndexedSeq = RichInt.this.to$extension0(scala.this.Predef.intWrapper(1), i).map({
(new <$anon: Function1>(anonfun$2.this): Function1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$2.this.apply(scala.Int.unbox(v1));
<synthetic> <paramaccessor> val localValue$1: String = _;
def <init>($outer: <$anon: Function0>, localValue$1: String): <$anon: Function1> = {
anonfun$2.this.localValue$1 = localValue$1;
anonfun$2.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$apply$2 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(x: Int): String = x.+(anonfun$apply$2.this.$outer.localValue$1);
<synthetic> <paramaccessor> <artifact> private[this] val $outer: <$anon: Function1> = _;
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$apply$2.this.apply(scala.Int.unbox(v1));
def <init>($outer: <$anon: Function1>): <$anon: Function1> = {
if ($outer.eq(null))
throw null
else
anonfun$apply$2.this.$outer = $outer;
anonfun$apply$2.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$3 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(j: Int): scala.collection.immutable.IndexedSeq = RichInt.this.to$extension0(scala.this.Predef.intWrapper(1), j).flatMap({
(new <$anon: Function1>(anonfun$3.this): Function1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$3.this.apply(scala.Int.unbox(v1));
<synthetic> <paramaccessor> val localValue$1: String = _;
def <init>($outer: <$anon: Function0>, localValue$1: String): <$anon: Function1> = {
anonfun$3.this.localValue$1 = localValue$1;
anonfun$3.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$apply$3 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(x: Int): scala.collection.immutable.IndexedSeq = RichInt.this.to$extension0(scala.this.Predef.intWrapper(1), x).map({
(new <$anon: Function1>(anonfun$apply$3.this): Function1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
<synthetic> <paramaccessor> <artifact> private[this] val $outer: <$anon: Function1> = _;
<synthetic> <stable> <artifact> def $outer(): <$anon: Function1> = anonfun$apply$3.this.$outer;
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$apply$3.this.apply(scala.Int.unbox(v1));
def <init>($outer: <$anon: Function1>): <$anon: Function1> = {
if ($outer.eq(null))
throw null
else
anonfun$apply$3.this.$outer = $outer;
anonfun$apply$3.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$apply$4 extends scala.runtime.AbstractFunction1 with Serializable {
final def apply(y: Int): String = y.+(anonfun$apply$4.this.$outer.$outer().localValue$1);
<synthetic> <paramaccessor> <artifact> private[this] val $outer: <$anon: Function1> = _;
final <bridge> <artifact> def apply(v1: Object): Object = anonfun$apply$4.this.apply(scala.Int.unbox(v1));
def <init>($outer: <$anon: Function1>): <$anon: Function1> = {
if ($outer.eq(null))
throw null
else
anonfun$apply$4.this.$outer = $outer;
anonfun$apply$4.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$4 extends scala.runtime.AbstractFunction3 with Serializable {
final def apply(k: Int, l: Int, m: Int): scala.collection.immutable.IndexedSeq = RichInt.this.to$extension0(scala.this.Predef.intWrapper(1), k).flatMap(anonfun$4.this.closure2$1, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[collection.TraversableLike]().++(RichInt.this.to$extension0(scala.this.Predef.intWrapper(1), l).flatMap(anonfun$4.this.closure1$1, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.GenTraversableOnce](), immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[collection.TraversableLike]().++(RichInt.this.to$extension0(scala.this.Predef.intWrapper(1), m).map({
(new <$anon: Function1>(anonfun$4.this): Function1)
}, immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.GenTraversableOnce](), immutable.this.IndexedSeq.canBuildFrom()).$asInstanceOf[scala.collection.immutable.IndexedSeq]();
final <bridge> <artifact> def apply(v1: Object, v2: Object, v3: Object): Object = anonfun$4.this.apply(scala.Int.unbox(v1), scala.Int.unbox(v2), scala.Int.unbox(v3));
<synthetic> <paramaccessor> private[this] val closure1$1: Function1 = _;
<synthetic> <paramaccessor> private[this] val closure2$1: Function1 = _;
def <init>($outer: <$anon: Function0>, closure1$1: Function1, closure2$1: Function1): <$anon: Function3> = {
anonfun$4.this.closure1$1 = closure1$1;
anonfun$4.this.closure2$1 = closure2$1;
anonfun$4.super.<init>();
()
}
};
@SerialVersionUID(value = 0) final <synthetic> class anonfun$apply$1 extends scala.runtime.AbstractFunction1$mcII$sp with Serializable {
final def apply(x: Int): Int = anonfun$apply$1.this.apply$mcII$sp(x);
<specialized> def apply$mcII$sp(x: Int): Int = x.+(1);
final <bridge> <artifact> def apply(v1: Object): Object = scala.Int.box(anonfun$apply$1.this.apply(scala.Int.unbox(v1)));
def <init>($outer: <$anon: Function3>): <$anon: Function1> = {
anonfun$apply$1.super.<init>();
()
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment