Skip to content

Instantly share code, notes, and snippets.

@pashu123
Created March 15, 2022 15:18
Show Gist options
  • Save pashu123/5abb43e635d43dd5c8be8dc0ab0b2e22 to your computer and use it in GitHub Desktop.
Save pashu123/5abb43e635d43dd5c8be8dc0ab0b2e22 to your computer and use it in GitHub Desktop.
module attributes {torch.debug_module_name = "GraphModule"} {
func private @__torch__.torch.fx.graph_module.___torch_mangle_0.GraphModule.forward(%arg0: !torch.nn.Module<"__torch__.torch.fx.graph_module.___torch_mangle_0.GraphModule">, %arg1: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg2: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg3: !torch.tensor {torch.type_bound = !torch.vtensor<[64,3,7,7],f32>}, %arg4: !torch.tensor {torch.type_bound = !torch.vtensor<[1000],f32>}, %arg5: !torch.tensor {torch.type_bound = !torch.vtensor<[1000,2048],f32>}, %arg6: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg7: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg8: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg9: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg10: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg11: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg12: !torch.tensor {torch.type_bound = !torch.vtensor<[64,64,1,1],f32>}, %arg13: !torch.tensor {torch.type_bound = !torch.vtensor<[64,64,3,3],f32>}, %arg14: !torch.tensor {torch.type_bound = !torch.vtensor<[256,64,1,1],f32>}, %arg15: !torch.tensor {torch.type_bound = !torch.vtensor<[256,64,1,1],f32>}, %arg16: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg17: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg18: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg19: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg20: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg21: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg22: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg23: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg24: !torch.tensor {torch.type_bound = !torch.vtensor<[64,256,1,1],f32>}, %arg25: !torch.tensor {torch.type_bound = !torch.vtensor<[64,64,3,3],f32>}, %arg26: !torch.tensor {torch.type_bound = !torch.vtensor<[256,64,1,1],f32>}, %arg27: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg28: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg29: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg30: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg31: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg32: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg33: !torch.tensor {torch.type_bound = !torch.vtensor<[64,256,1,1],f32>}, %arg34: !torch.tensor {torch.type_bound = !torch.vtensor<[64,64,3,3],f32>}, %arg35: !torch.tensor {torch.type_bound = !torch.vtensor<[256,64,1,1],f32>}, %arg36: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg37: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg38: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg39: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg40: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg41: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg42: !torch.tensor {torch.type_bound = !torch.vtensor<[128,256,1,1],f32>}, %arg43: !torch.tensor {torch.type_bound = !torch.vtensor<[128,128,3,3],f32>}, %arg44: !torch.tensor {torch.type_bound = !torch.vtensor<[512,128,1,1],f32>}, %arg45: !torch.tensor {torch.type_bound = !torch.vtensor<[512,256,1,1],f32>}, %arg46: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg47: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg48: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg49: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg50: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg51: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg52: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg53: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg54: !torch.tensor {torch.type_bound = !torch.vtensor<[128,512,1,1],f32>}, %arg55: !torch.tensor {torch.type_bound = !torch.vtensor<[128,128,3,3],f32>}, %arg56: !torch.tensor {torch.type_bound = !torch.vtensor<[512,128,1,1],f32>}, %arg57: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg58: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg59: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg60: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg61: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg62: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg63: !torch.tensor {torch.type_bound = !torch.vtensor<[128,512,1,1],f32>}, %arg64: !torch.tensor {torch.type_bound = !torch.vtensor<[128,128,3,3],f32>}, %arg65: !torch.tensor {torch.type_bound = !torch.vtensor<[512,128,1,1],f32>}, %arg66: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg67: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg68: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg69: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg70: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg71: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg72: !torch.tensor {torch.type_bound = !torch.vtensor<[128,512,1,1],f32>}, %arg73: !torch.tensor {torch.type_bound = !torch.vtensor<[128,128,3,3],f32>}, %arg74: !torch.tensor {torch.type_bound = !torch.vtensor<[512,128,1,1],f32>}, %arg75: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg76: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg77: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg78: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg79: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg80: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg81: !torch.tensor {torch.type_bound = !torch.vtensor<[256,512,1,1],f32>}, %arg82: !torch.tensor {torch.type_bound = !torch.vtensor<[256,256,3,3],f32>}, %arg83: !torch.tensor {torch.type_bound = !torch.vtensor<[1024,256,1,1],f32>}, %arg84: !torch.tensor {torch.type_bound = !torch.vtensor<[1024,512,1,1],f32>}, %arg85: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg86: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg87: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg88: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg89: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg90: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg91: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg92: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg93: !torch.tensor {torch.type_bound = !torch.vtensor<[256,1024,1,1],f32>}, %arg94: !torch.tensor {torch.type_bound = !torch.vtensor<[256,256,3,3],f32>}, %arg95: !torch.tensor {torch.type_bound = !torch.vtensor<[1024,256,1,1],f32>}, %arg96: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg97: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg98: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg99: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg100: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg101: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg102: !torch.tensor {torch.type_bound = !torch.vtensor<[256,1024,1,1],f32>}, %arg103: !torch.tensor {torch.type_bound = !torch.vtensor<[256,256,3,3],f32>}, %arg104: !torch.tensor {torch.type_bound = !torch.vtensor<[1024,256,1,1],f32>}, %arg105: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg106: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg107: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg108: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg109: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg110: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg111: !torch.tensor {torch.type_bound = !torch.vtensor<[256,1024,1,1],f32>}, %arg112: !torch.tensor {torch.type_bound = !torch.vtensor<[256,256,3,3],f32>}, %arg113: !torch.tensor {torch.type_bound = !torch.vtensor<[1024,256,1,1],f32>}, %arg114: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg115: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg116: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg117: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg118: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg119: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg120: !torch.tensor {torch.type_bound = !torch.vtensor<[256,1024,1,1],f32>}, %arg121: !torch.tensor {torch.type_bound = !torch.vtensor<[256,256,3,3],f32>}, %arg122: !torch.tensor {torch.type_bound = !torch.vtensor<[1024,256,1,1],f32>}, %arg123: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg124: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg125: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg126: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg127: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg128: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg129: !torch.tensor {torch.type_bound = !torch.vtensor<[256,1024,1,1],f32>}, %arg130: !torch.tensor {torch.type_bound = !torch.vtensor<[256,256,3,3],f32>}, %arg131: !torch.tensor {torch.type_bound = !torch.vtensor<[1024,256,1,1],f32>}, %arg132: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg133: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg134: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg135: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg136: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg137: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg138: !torch.tensor {torch.type_bound = !torch.vtensor<[512,1024,1,1],f32>}, %arg139: !torch.tensor {torch.type_bound = !torch.vtensor<[512,512,3,3],f32>}, %arg140: !torch.tensor {torch.type_bound = !torch.vtensor<[2048,512,1,1],f32>}, %arg141: !torch.tensor {torch.type_bound = !torch.vtensor<[2048,1024,1,1],f32>}, %arg142: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg143: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg144: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg145: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg146: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg147: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg148: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg149: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg150: !torch.tensor {torch.type_bound = !torch.vtensor<[512,2048,1,1],f32>}, %arg151: !torch.tensor {torch.type_bound = !torch.vtensor<[512,512,3,3],f32>}, %arg152: !torch.tensor {torch.type_bound = !torch.vtensor<[2048,512,1,1],f32>}, %arg153: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg154: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg155: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg156: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg157: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg158: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg159: !torch.tensor {torch.type_bound = !torch.vtensor<[512,2048,1,1],f32>}, %arg160: !torch.tensor {torch.type_bound = !torch.vtensor<[512,512,3,3],f32>}, %arg161: !torch.tensor {torch.type_bound = !torch.vtensor<[2048,512,1,1],f32>}, %arg162: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg163: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg164: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg165: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg166: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg167: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg168: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg169: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg170: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg171: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg172: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg173: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg174: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg175: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg176: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg177: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg178: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg179: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg180: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg181: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg182: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg183: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg184: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg185: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg186: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg187: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg188: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg189: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg190: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg191: !torch.tensor {torch.type_bound = !torch.vtensor<[64],f32>}, %arg192: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg193: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg194: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg195: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg196: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg197: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg198: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg199: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg200: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg201: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg202: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg203: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg204: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg205: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg206: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg207: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg208: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg209: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg210: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg211: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg212: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg213: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg214: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg215: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg216: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg217: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg218: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg219: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg220: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg221: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg222: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg223: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg224: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg225: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg226: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg227: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg228: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg229: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg230: !torch.tensor {torch.type_bound = !torch.vtensor<[128],f32>}, %arg231: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg232: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg233: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg234: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg235: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg236: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg237: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg238: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg239: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg240: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg241: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg242: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg243: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg244: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg245: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg246: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg247: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg248: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg249: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg250: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg251: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg252: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg253: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg254: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg255: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg256: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg257: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg258: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg259: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg260: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg261: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg262: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg263: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg264: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg265: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg266: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg267: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg268: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg269: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg270: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg271: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg272: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg273: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg274: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg275: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg276: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg277: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg278: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg279: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg280: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg281: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg282: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg283: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg284: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg285: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg286: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg287: !torch.tensor {torch.type_bound = !torch.vtensor<[256],f32>}, %arg288: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg289: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg290: !torch.tensor {torch.type_bound = !torch.vtensor<[1024],f32>}, %arg291: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg292: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg293: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg294: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg295: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg296: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg297: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg298: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg299: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg300: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg301: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg302: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg303: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg304: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg305: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg306: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg307: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg308: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg309: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg310: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg311: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg312: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg313: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg314: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg315: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg316: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg317: !torch.tensor {torch.type_bound = !torch.vtensor<[512],f32>}, %arg318: !torch.tensor {torch.type_bound = !torch.vtensor<[],si64>}, %arg319: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg320: !torch.tensor {torch.type_bound = !torch.vtensor<[2048],f32>}, %arg321: !torch.tensor {torch.type_bound = !torch.vtensor<[1,3,224,224],f32>}) -> !torch.tuple<!torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor> {
%true = torch.constant.bool true
%int1 = torch.constant.int 1
%1 = torch.prim.ListConstruct %int1, %int1 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%false = torch.constant.bool false
%none = torch.constant.none
%int2 = torch.constant.int 2
%int3 = torch.constant.int 3
%int1_0 = torch.constant.int 1
%int0 = torch.constant.int 0
%float1.000000e-01 = torch.constant.float 1.000000e-01
%float1.000000e-05 = torch.constant.float 1.000000e-05
%int-1 = torch.constant.int -1
%int-2 = torch.constant.int -2
%int2048 = torch.constant.int 2048
%2 = torch.prim.ListConstruct %int2, %int2 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%3 = torch.prim.ListConstruct %int3, %int3 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%4 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%5 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%6 = torch.operator "aten.convolution"(%arg321, %arg3, %none, %2, %3, %4, %false, %5, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0, %result1, %result2 = torch.aten.native_batch_norm %6, %arg2, %arg1, %arg163, %arg164, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%7 = torch.aten.relu_ %result0 : !torch.tensor -> !torch.tensor
%8 = torch.prim.ListConstruct %int3, %int3 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%9 = torch.prim.ListConstruct %int2, %int2 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%10 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%11:2 = torch.operator "aten.max_pool2d_with_indices"(%7, %8, %9, %10, %1, %false) : (!torch.tensor, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool) -> (!torch.tensor, !torch.tensor)
%12 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%13 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%14 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%15 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%16 = torch.operator "aten.convolution"(%11#0, %arg12, %none, %12, %13, %14, %false, %15, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_1, %result1_2, %result2_3 = torch.aten.native_batch_norm %16, %arg7, %arg6, %arg166, %arg167, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%17 = torch.aten.relu_ %result0_1 : !torch.tensor -> !torch.tensor
%18 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%19 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%20 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%21 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%22 = torch.operator "aten.convolution"(%17, %arg13, %none, %18, %19, %20, %false, %21, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_4, %result1_5, %result2_6 = torch.aten.native_batch_norm %22, %arg9, %arg8, %arg169, %arg170, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%23 = torch.aten.relu_ %result0_4 : !torch.tensor -> !torch.tensor
%24 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%25 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%26 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%27 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%28 = torch.operator "aten.convolution"(%23, %arg14, %none, %24, %25, %26, %false, %27, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_7, %result1_8, %result2_9 = torch.aten.native_batch_norm %28, %arg11, %arg10, %arg172, %arg173, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%29 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%30 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%31 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%32 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%33 = torch.operator "aten.convolution"(%11#0, %arg15, %none, %29, %30, %31, %false, %32, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_10, %result1_11, %result2_12 = torch.aten.native_batch_norm %33, %arg17, %arg16, %arg175, %arg176, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%34 = torch.aten.add_.Tensor %result0_7, %result0_10, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%35 = torch.aten.relu_ %34 : !torch.tensor -> !torch.tensor
%36 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%37 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%38 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%39 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%40 = torch.operator "aten.convolution"(%35, %arg24, %none, %36, %37, %38, %false, %39, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_13, %result1_14, %result2_15 = torch.aten.native_batch_norm %40, %arg19, %arg18, %arg178, %arg179, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%41 = torch.aten.relu_ %result0_13 : !torch.tensor -> !torch.tensor
%42 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%43 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%44 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%45 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%46 = torch.operator "aten.convolution"(%41, %arg25, %none, %42, %43, %44, %false, %45, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_16, %result1_17, %result2_18 = torch.aten.native_batch_norm %46, %arg21, %arg20, %arg181, %arg182, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%47 = torch.aten.relu_ %result0_16 : !torch.tensor -> !torch.tensor
%48 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%49 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%50 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%51 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%52 = torch.operator "aten.convolution"(%47, %arg26, %none, %48, %49, %50, %false, %51, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_19, %result1_20, %result2_21 = torch.aten.native_batch_norm %52, %arg23, %arg22, %arg184, %arg185, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%53 = torch.aten.add_.Tensor %result0_19, %35, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%54 = torch.aten.relu_ %53 : !torch.tensor -> !torch.tensor
%55 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%56 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%57 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%58 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%59 = torch.operator "aten.convolution"(%54, %arg33, %none, %55, %56, %57, %false, %58, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_22, %result1_23, %result2_24 = torch.aten.native_batch_norm %59, %arg28, %arg27, %arg187, %arg188, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%60 = torch.aten.relu_ %result0_22 : !torch.tensor -> !torch.tensor
%61 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%62 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%63 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%64 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%65 = torch.operator "aten.convolution"(%60, %arg34, %none, %61, %62, %63, %false, %64, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_25, %result1_26, %result2_27 = torch.aten.native_batch_norm %65, %arg30, %arg29, %arg190, %arg191, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%66 = torch.aten.relu_ %result0_25 : !torch.tensor -> !torch.tensor
%67 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%68 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%69 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%70 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%71 = torch.operator "aten.convolution"(%66, %arg35, %none, %67, %68, %69, %false, %70, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_28, %result1_29, %result2_30 = torch.aten.native_batch_norm %71, %arg32, %arg31, %arg193, %arg194, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%72 = torch.aten.add_.Tensor %result0_28, %54, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%73 = torch.aten.relu_ %72 : !torch.tensor -> !torch.tensor
%74 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%75 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%76 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%77 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%78 = torch.operator "aten.convolution"(%73, %arg42, %none, %74, %75, %76, %false, %77, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_31, %result1_32, %result2_33 = torch.aten.native_batch_norm %78, %arg37, %arg36, %arg196, %arg197, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%79 = torch.aten.relu_ %result0_31 : !torch.tensor -> !torch.tensor
%80 = torch.prim.ListConstruct %int2, %int2 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%81 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%82 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%83 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%84 = torch.operator "aten.convolution"(%79, %arg43, %none, %80, %81, %82, %false, %83, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_34, %result1_35, %result2_36 = torch.aten.native_batch_norm %84, %arg39, %arg38, %arg199, %arg200, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%85 = torch.aten.relu_ %result0_34 : !torch.tensor -> !torch.tensor
%86 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%87 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%88 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%89 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%90 = torch.operator "aten.convolution"(%85, %arg44, %none, %86, %87, %88, %false, %89, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_37, %result1_38, %result2_39 = torch.aten.native_batch_norm %90, %arg41, %arg40, %arg202, %arg203, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%91 = torch.prim.ListConstruct %int2, %int2 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%92 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%93 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%94 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%95 = torch.operator "aten.convolution"(%73, %arg45, %none, %91, %92, %93, %false, %94, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_40, %result1_41, %result2_42 = torch.aten.native_batch_norm %95, %arg47, %arg46, %arg205, %arg206, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%96 = torch.aten.add_.Tensor %result0_37, %result0_40, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%97 = torch.aten.relu_ %96 : !torch.tensor -> !torch.tensor
%98 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%99 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%100 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%101 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%102 = torch.operator "aten.convolution"(%97, %arg54, %none, %98, %99, %100, %false, %101, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_43, %result1_44, %result2_45 = torch.aten.native_batch_norm %102, %arg49, %arg48, %arg208, %arg209, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%103 = torch.aten.relu_ %result0_43 : !torch.tensor -> !torch.tensor
%104 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%105 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%106 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%107 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%108 = torch.operator "aten.convolution"(%103, %arg55, %none, %104, %105, %106, %false, %107, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_46, %result1_47, %result2_48 = torch.aten.native_batch_norm %108, %arg51, %arg50, %arg211, %arg212, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%109 = torch.aten.relu_ %result0_46 : !torch.tensor -> !torch.tensor
%110 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%111 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%112 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%113 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%114 = torch.operator "aten.convolution"(%109, %arg56, %none, %110, %111, %112, %false, %113, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_49, %result1_50, %result2_51 = torch.aten.native_batch_norm %114, %arg53, %arg52, %arg214, %arg215, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%115 = torch.aten.add_.Tensor %result0_49, %97, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%116 = torch.aten.relu_ %115 : !torch.tensor -> !torch.tensor
%117 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%118 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%119 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%120 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%121 = torch.operator "aten.convolution"(%116, %arg63, %none, %117, %118, %119, %false, %120, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_52, %result1_53, %result2_54 = torch.aten.native_batch_norm %121, %arg58, %arg57, %arg217, %arg218, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%122 = torch.aten.relu_ %result0_52 : !torch.tensor -> !torch.tensor
%123 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%124 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%125 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%126 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%127 = torch.operator "aten.convolution"(%122, %arg64, %none, %123, %124, %125, %false, %126, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_55, %result1_56, %result2_57 = torch.aten.native_batch_norm %127, %arg60, %arg59, %arg220, %arg221, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%128 = torch.aten.relu_ %result0_55 : !torch.tensor -> !torch.tensor
%129 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%130 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%131 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%132 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%133 = torch.operator "aten.convolution"(%128, %arg65, %none, %129, %130, %131, %false, %132, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_58, %result1_59, %result2_60 = torch.aten.native_batch_norm %133, %arg62, %arg61, %arg223, %arg224, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%134 = torch.aten.add_.Tensor %result0_58, %116, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%135 = torch.aten.relu_ %134 : !torch.tensor -> !torch.tensor
%136 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%137 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%138 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%139 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%140 = torch.operator "aten.convolution"(%135, %arg72, %none, %136, %137, %138, %false, %139, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_61, %result1_62, %result2_63 = torch.aten.native_batch_norm %140, %arg67, %arg66, %arg226, %arg227, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%141 = torch.aten.relu_ %result0_61 : !torch.tensor -> !torch.tensor
%142 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%143 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%144 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%145 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%146 = torch.operator "aten.convolution"(%141, %arg73, %none, %142, %143, %144, %false, %145, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_64, %result1_65, %result2_66 = torch.aten.native_batch_norm %146, %arg69, %arg68, %arg229, %arg230, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%147 = torch.aten.relu_ %result0_64 : !torch.tensor -> !torch.tensor
%148 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%149 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%150 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%151 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%152 = torch.operator "aten.convolution"(%147, %arg74, %none, %148, %149, %150, %false, %151, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_67, %result1_68, %result2_69 = torch.aten.native_batch_norm %152, %arg71, %arg70, %arg232, %arg233, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%153 = torch.aten.add_.Tensor %result0_67, %135, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%154 = torch.aten.relu_ %153 : !torch.tensor -> !torch.tensor
%155 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%156 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%157 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%158 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%159 = torch.operator "aten.convolution"(%154, %arg81, %none, %155, %156, %157, %false, %158, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_70, %result1_71, %result2_72 = torch.aten.native_batch_norm %159, %arg76, %arg75, %arg235, %arg236, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%160 = torch.aten.relu_ %result0_70 : !torch.tensor -> !torch.tensor
%161 = torch.prim.ListConstruct %int2, %int2 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%162 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%163 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%164 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%165 = torch.operator "aten.convolution"(%160, %arg82, %none, %161, %162, %163, %false, %164, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_73, %result1_74, %result2_75 = torch.aten.native_batch_norm %165, %arg78, %arg77, %arg238, %arg239, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%166 = torch.aten.relu_ %result0_73 : !torch.tensor -> !torch.tensor
%167 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%168 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%169 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%170 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%171 = torch.operator "aten.convolution"(%166, %arg83, %none, %167, %168, %169, %false, %170, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_76, %result1_77, %result2_78 = torch.aten.native_batch_norm %171, %arg80, %arg79, %arg241, %arg242, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%172 = torch.prim.ListConstruct %int2, %int2 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%173 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%174 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%175 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%176 = torch.operator "aten.convolution"(%154, %arg84, %none, %172, %173, %174, %false, %175, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_79, %result1_80, %result2_81 = torch.aten.native_batch_norm %176, %arg86, %arg85, %arg244, %arg245, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%177 = torch.aten.add_.Tensor %result0_76, %result0_79, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%178 = torch.aten.relu_ %177 : !torch.tensor -> !torch.tensor
%179 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%180 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%181 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%182 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%183 = torch.operator "aten.convolution"(%178, %arg93, %none, %179, %180, %181, %false, %182, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_82, %result1_83, %result2_84 = torch.aten.native_batch_norm %183, %arg88, %arg87, %arg247, %arg248, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%184 = torch.aten.relu_ %result0_82 : !torch.tensor -> !torch.tensor
%185 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%186 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%187 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%188 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%189 = torch.operator "aten.convolution"(%184, %arg94, %none, %185, %186, %187, %false, %188, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_85, %result1_86, %result2_87 = torch.aten.native_batch_norm %189, %arg90, %arg89, %arg250, %arg251, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%190 = torch.aten.relu_ %result0_85 : !torch.tensor -> !torch.tensor
%191 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%192 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%193 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%194 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%195 = torch.operator "aten.convolution"(%190, %arg95, %none, %191, %192, %193, %false, %194, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_88, %result1_89, %result2_90 = torch.aten.native_batch_norm %195, %arg92, %arg91, %arg253, %arg254, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%196 = torch.aten.add_.Tensor %result0_88, %178, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%197 = torch.aten.relu_ %196 : !torch.tensor -> !torch.tensor
%198 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%199 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%200 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%201 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%202 = torch.operator "aten.convolution"(%197, %arg102, %none, %198, %199, %200, %false, %201, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_91, %result1_92, %result2_93 = torch.aten.native_batch_norm %202, %arg97, %arg96, %arg256, %arg257, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%203 = torch.aten.relu_ %result0_91 : !torch.tensor -> !torch.tensor
%204 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%205 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%206 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%207 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%208 = torch.operator "aten.convolution"(%203, %arg103, %none, %204, %205, %206, %false, %207, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_94, %result1_95, %result2_96 = torch.aten.native_batch_norm %208, %arg99, %arg98, %arg259, %arg260, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%209 = torch.aten.relu_ %result0_94 : !torch.tensor -> !torch.tensor
%210 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%211 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%212 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%213 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%214 = torch.operator "aten.convolution"(%209, %arg104, %none, %210, %211, %212, %false, %213, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_97, %result1_98, %result2_99 = torch.aten.native_batch_norm %214, %arg101, %arg100, %arg262, %arg263, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%215 = torch.aten.add_.Tensor %result0_97, %197, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%216 = torch.aten.relu_ %215 : !torch.tensor -> !torch.tensor
%217 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%218 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%219 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%220 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%221 = torch.operator "aten.convolution"(%216, %arg111, %none, %217, %218, %219, %false, %220, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_100, %result1_101, %result2_102 = torch.aten.native_batch_norm %221, %arg106, %arg105, %arg265, %arg266, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%222 = torch.aten.relu_ %result0_100 : !torch.tensor -> !torch.tensor
%223 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%224 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%225 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%226 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%227 = torch.operator "aten.convolution"(%222, %arg112, %none, %223, %224, %225, %false, %226, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_103, %result1_104, %result2_105 = torch.aten.native_batch_norm %227, %arg108, %arg107, %arg268, %arg269, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%228 = torch.aten.relu_ %result0_103 : !torch.tensor -> !torch.tensor
%229 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%230 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%231 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%232 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%233 = torch.operator "aten.convolution"(%228, %arg113, %none, %229, %230, %231, %false, %232, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_106, %result1_107, %result2_108 = torch.aten.native_batch_norm %233, %arg110, %arg109, %arg271, %arg272, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%234 = torch.aten.add_.Tensor %result0_106, %216, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%235 = torch.aten.relu_ %234 : !torch.tensor -> !torch.tensor
%236 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%237 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%238 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%239 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%240 = torch.operator "aten.convolution"(%235, %arg120, %none, %236, %237, %238, %false, %239, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_109, %result1_110, %result2_111 = torch.aten.native_batch_norm %240, %arg115, %arg114, %arg274, %arg275, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%241 = torch.aten.relu_ %result0_109 : !torch.tensor -> !torch.tensor
%242 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%243 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%244 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%245 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%246 = torch.operator "aten.convolution"(%241, %arg121, %none, %242, %243, %244, %false, %245, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_112, %result1_113, %result2_114 = torch.aten.native_batch_norm %246, %arg117, %arg116, %arg277, %arg278, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%247 = torch.aten.relu_ %result0_112 : !torch.tensor -> !torch.tensor
%248 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%249 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%250 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%251 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%252 = torch.operator "aten.convolution"(%247, %arg122, %none, %248, %249, %250, %false, %251, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_115, %result1_116, %result2_117 = torch.aten.native_batch_norm %252, %arg119, %arg118, %arg280, %arg281, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%253 = torch.aten.add_.Tensor %result0_115, %235, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%254 = torch.aten.relu_ %253 : !torch.tensor -> !torch.tensor
%255 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%256 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%257 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%258 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%259 = torch.operator "aten.convolution"(%254, %arg129, %none, %255, %256, %257, %false, %258, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_118, %result1_119, %result2_120 = torch.aten.native_batch_norm %259, %arg124, %arg123, %arg283, %arg284, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%260 = torch.aten.relu_ %result0_118 : !torch.tensor -> !torch.tensor
%261 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%262 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%263 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%264 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%265 = torch.operator "aten.convolution"(%260, %arg130, %none, %261, %262, %263, %false, %264, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_121, %result1_122, %result2_123 = torch.aten.native_batch_norm %265, %arg126, %arg125, %arg286, %arg287, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%266 = torch.aten.relu_ %result0_121 : !torch.tensor -> !torch.tensor
%267 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%268 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%269 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%270 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%271 = torch.operator "aten.convolution"(%266, %arg131, %none, %267, %268, %269, %false, %270, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_124, %result1_125, %result2_126 = torch.aten.native_batch_norm %271, %arg128, %arg127, %arg289, %arg290, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%272 = torch.aten.add_.Tensor %result0_124, %254, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%273 = torch.aten.relu_ %272 : !torch.tensor -> !torch.tensor
%274 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%275 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%276 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%277 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%278 = torch.operator "aten.convolution"(%273, %arg138, %none, %274, %275, %276, %false, %277, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_127, %result1_128, %result2_129 = torch.aten.native_batch_norm %278, %arg133, %arg132, %arg292, %arg293, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%279 = torch.aten.relu_ %result0_127 : !torch.tensor -> !torch.tensor
%280 = torch.prim.ListConstruct %int2, %int2 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%281 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%282 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%283 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%284 = torch.operator "aten.convolution"(%279, %arg139, %none, %280, %281, %282, %false, %283, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_130, %result1_131, %result2_132 = torch.aten.native_batch_norm %284, %arg135, %arg134, %arg295, %arg296, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%285 = torch.aten.relu_ %result0_130 : !torch.tensor -> !torch.tensor
%286 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%287 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%288 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%289 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%290 = torch.operator "aten.convolution"(%285, %arg140, %none, %286, %287, %288, %false, %289, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_133, %result1_134, %result2_135 = torch.aten.native_batch_norm %290, %arg137, %arg136, %arg298, %arg299, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%291 = torch.prim.ListConstruct %int2, %int2 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%292 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%293 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%294 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%295 = torch.operator "aten.convolution"(%273, %arg141, %none, %291, %292, %293, %false, %294, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_136, %result1_137, %result2_138 = torch.aten.native_batch_norm %295, %arg143, %arg142, %arg301, %arg302, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%296 = torch.aten.add_.Tensor %result0_133, %result0_136, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%297 = torch.aten.relu_ %296 : !torch.tensor -> !torch.tensor
%298 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%299 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%300 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%301 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%302 = torch.operator "aten.convolution"(%297, %arg150, %none, %298, %299, %300, %false, %301, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_139, %result1_140, %result2_141 = torch.aten.native_batch_norm %302, %arg145, %arg144, %arg304, %arg305, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%303 = torch.aten.relu_ %result0_139 : !torch.tensor -> !torch.tensor
%304 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%305 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%306 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%307 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%308 = torch.operator "aten.convolution"(%303, %arg151, %none, %304, %305, %306, %false, %307, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_142, %result1_143, %result2_144 = torch.aten.native_batch_norm %308, %arg147, %arg146, %arg307, %arg308, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%309 = torch.aten.relu_ %result0_142 : !torch.tensor -> !torch.tensor
%310 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%311 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%312 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%313 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%314 = torch.operator "aten.convolution"(%309, %arg152, %none, %310, %311, %312, %false, %313, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_145, %result1_146, %result2_147 = torch.aten.native_batch_norm %314, %arg149, %arg148, %arg310, %arg311, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%315 = torch.aten.add_.Tensor %result0_145, %297, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%316 = torch.aten.relu_ %315 : !torch.tensor -> !torch.tensor
%317 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%318 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%319 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%320 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%321 = torch.operator "aten.convolution"(%316, %arg159, %none, %317, %318, %319, %false, %320, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_148, %result1_149, %result2_150 = torch.aten.native_batch_norm %321, %arg154, %arg153, %arg313, %arg314, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%322 = torch.aten.relu_ %result0_148 : !torch.tensor -> !torch.tensor
%323 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%324 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%325 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%326 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%327 = torch.operator "aten.convolution"(%322, %arg160, %none, %323, %324, %325, %false, %326, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_151, %result1_152, %result2_153 = torch.aten.native_batch_norm %327, %arg156, %arg155, %arg316, %arg317, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%328 = torch.aten.relu_ %result0_151 : !torch.tensor -> !torch.tensor
%329 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%330 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%331 = torch.prim.ListConstruct %int1_0, %int1_0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%332 = torch.prim.ListConstruct %int0, %int0 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%333 = torch.operator "aten.convolution"(%328, %arg161, %none, %329, %330, %331, %false, %332, %int1_0) : (!torch.tensor, !torch.tensor, !torch.none, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.list<!torch.int>, !torch.bool, !torch.list<!torch.int>, !torch.int) -> !torch.tensor
%result0_154, %result1_155, %result2_156 = torch.aten.native_batch_norm %333, %arg158, %arg157, %arg319, %arg320, %false, %float1.000000e-01, %float1.000000e-05 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.bool, !torch.float, !torch.float -> !torch.tensor, !torch.tensor, !torch.tensor
%334 = torch.aten.add_.Tensor %result0_154, %316, %int1_0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
%335 = torch.aten.relu_ %334 : !torch.tensor -> !torch.tensor
%336 = torch.prim.ListConstruct %int-1, %int-2 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%337 = torch.aten.mean.dim %335, %336, %true, %none : !torch.tensor, !torch.list<!torch.int>, !torch.bool, !torch.none -> !torch.tensor
%338 = torch.prim.ListConstruct %int1_0, %int2048 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%339 = torch.aten.view %337, %338 : !torch.tensor, !torch.list<!torch.int> -> !torch.tensor
%340 = torch.aten.t %arg5 : !torch.tensor -> !torch.tensor
%341 = torch.aten.addmm %arg4, %339, %340, %int1_0, %int1_0 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%342 = torch.aten.t %340 : !torch.tensor -> !torch.tensor
%343 = torch.prim.TupleConstruct %341, %result2_69, %arg287, %arg226, %arg140, %228, %41, %arg269, %84, %arg272, %140, %141, %result2_120, %result1_104, %result1_134, %arg54, %arg112, %arg34, %result1_110, %arg32, %arg170, %241, %arg194, %135, %333, %arg39, %result2_75, %127, %arg317, %arg310, %303, %arg113, %arg3, %result1_116, %result2_141, %arg271, %arg245, %arg83, %309, %arg203, %79, %arg86, %result2_81, %arg115, %arg166, %result1, %arg84, %arg23, %arg265, %arg119, %arg56, %result2_123, %result2_138, %240, %122, %128, %arg44, %339, %arg64, %result1_23, %17, %result2_156, %arg301, %arg60, %arg92, %arg319, %arg160, %result2_30, %arg17, %result1_95, %266, %47, %arg159, %arg308, %327, %arg126, %279, %arg21, %result1_122, %arg90, %85, %arg295, %arg208, %arg139, %183, %result2_105, %322, %arg209, %result2_6, %147, %302, %result2_45, %arg239, %result1_143, %297, %arg122, %arg244, %result1_125, %arg320, %246, %328, %arg256, %arg230, %arg129, %result1_77, %arg2, %16, %arg93, %35, %arg71, %222, %result1_146, %result1_47, %result1_44, %arg150, %54, %52, %arg266, %arg283, %arg298, %133, %arg281, %arg88, %259, %321, %arg238, %214, %222, %arg248, %285, %221, %result1_131, %59, %result2_93, %arg47, %28, %235, %178, %arg33, %arg304, %arg53, %result2_147, %arg277, %result2_126, %arg247, %309, %result2_63, %result2_96, %285, %arg72, %216, %arg314, %result1_152, %arg206, %arg104, %arg250, %result1_11, %result1_38, %66, %result2_48, %result1_29, %290, %arg65, %arg152, %33, %result2_78, %result2_99, %result2_144, %178, %result2_153, %arg290, %arg151, %7, %279, %65, %result1_83, %arg99, %273, %arg78, %result2_42, %arg268, %116, %176, %arg55, %result1_80, %result1_71, %result2_9, %result2_54, %result2_72, %121, %arg241, %result2_102, %arg121, %arg321, %arg316, %arg164, %arg26, %arg190, %208, %184, %arg251, %result2_21, %result1_8, %arg67, %166, %arg108, %arg9, %arg233, %108, %arg274, %arg296, %35, %arg128, %arg130, %97, %arg133, %46, %arg280, %result2_39, %260, %165, %54, %result1_50, %arg200, %202, %result2_135, %arg7, %322, %314, %arg286, %result1_32, %arg30, %arg257, %arg263, %95, %arg41, %arg24, %result1_74, %arg73, %arg215, %result1_140, %23, %103, %209, %arg182, %arg82, %160, %result1_56, %arg163, %result2_129, %arg62, %241, %arg197, %247, %arg145, %result2_114, %17, %184, %arg42, %arg35, %arg45, %266, %265, %arg25, %arg223, %arg124, %arg218, %arg220, %190, %arg37, %arg103, %result1_5, %arg117, %result1_119, %arg147, %233, %arg111, %203, %arg12, %result2_150, %result1_149, %arg135, %arg156, %arg81, %271, %171, %result1_20, %195, %result2_15, %result1_113, %result2_36, %arg260, %116, %284, %arg69, %arg120, %arg158, %arg202, %66, %arg63, %23, %result1_98, %273, %71, %result1_86, %result2_117, %6, %arg43, %arg188, %7, %arg235, %278, %result1_107, %235, %result2_33, %arg14, %arg97, %197, %203, %135, %result2_108, %arg284, %arg187, %result1_2, %result1_62, %arg58, %arg102, %90, %arg149, %73, %arg141, %102, %128, %result2_87, %arg161, %arg80, %result1_35, %result2, %result1_17, %arg106, %result1_128, %arg179, %arg292, %arg214, %arg175, %22, %41, %260, %result2_51, %arg143, %arg229, %arg275, %result2_60, %arg289, %arg217, %342, %arg184, %254, %result2_12, %328, %arg185, %335, %result1_59, %297, %arg138, %arg13, %197, %result2_3, %arg299, %result2_66, %316, %arg167, %result2_90, %result2_132, %result1_68, %arg191, %result1_41, %arg254, %arg221, %arg236, %254, %103, %114, %arg313, %arg181, %arg193, %arg253, %result1_14, %arg278, %216, %154, %11#0, %160, %arg95, %arg110, %arg11, %arg262, %159, %154, %arg101, %arg293, %60, %arg232, %result1_89, %result1_65, %result1_92, %252, %146, %arg211, %47, %arg131, %303, %arg305, %78, %40, %arg76, %arg176, %79, %316, %arg28, %arg224, %arg242, %arg227, %result2_84, %arg172, %166, %arg307, %308, %190, %arg311, %73, %arg259, %227, %arg94, %295, %147, %result1_137, %arg19, %arg205, %85, %209, %result2_24, %11#1, %60, %result1_155, %122, %arg137, %result2_57, %result2_111, %arg74, %97, %arg199, %arg49, %arg212, %141, %result2_27, %arg51, %arg302, %arg178, %result1_101, %arg173, %arg196, %result1_53, %189, %228, %arg169, %result2_18, %arg15, %152, %247, %arg154, %result1_26, %109, %109 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor -> !torch.tuple<!torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor>
return %343 : !torch.tuple<!torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor>
}
torch.class_type @__torch__.torch.fx.graph_module.___torch_mangle_0.GraphModule {
torch.method "forward", @__torch__.torch.fx.graph_module.___torch_mangle_0.GraphModule.forward
}
%0 = torch.nn_module {
} : !torch.nn.Module<"__torch__.torch.fx.graph_module.___torch_mangle_0.GraphModule">
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment