Skip to content

Instantly share code, notes, and snippets.

@pashu123
Created May 5, 2022 10:27
Show Gist options
  • Select an option

  • Save pashu123/22e0bf4f9f913a366df0c7cd00ce0205 to your computer and use it in GitHub Desktop.

Select an option

Save pashu123/22e0bf4f9f913a366df0c7cd00ce0205 to your computer and use it in GitHub Desktop.
#loc0 = loc(unknown)
module attributes {torch.debug_module_name = "forward"} {
func private @__torch__.torch.fx.graph_module.forward.__code_getter(%arg0: !torch.nn.Module<"__torch__.torch.fx.graph_module.forward"> loc(unknown)) -> !torch.str {
%1 = torch.prim.GetAttr %arg0["_code"] : !torch.nn.Module<"__torch__.torch.fx.graph_module.forward"> -> !torch.str loc(#loc0)
return %1 : !torch.str loc(#loc0)
} loc(#loc0)
func private @__torch__.torch.fx.graph_module.forward.forward(%arg0: !torch.nn.Module<"__torch__.torch.fx.graph_module.forward"> loc(unknown), %arg1: !torch.tensor {torch.type_bound = !torch.vtensor<[3,3],f32>} loc(unknown), %arg2: !torch.tensor {torch.type_bound = !torch.vtensor<[3],f32>} loc(unknown), %arg3: !torch.tensor {torch.type_bound = !torch.vtensor<[3,3],f32>} loc(unknown)) -> !torch.tuple<tensor, tensor> {
%float-1.000000e-02 = torch.constant.float -1.000000e-02 loc(#loc1)
%true_0 = torch.constant.bool true loc(#loc2)
%false = torch.constant.bool false loc(#loc3)
%cpu = torch.constant.device "cpu" loc(#loc0)
%none_1 = torch.constant.none loc(#loc0)
%int6 = torch.constant.int 6 loc(#loc4)
%int0 = torch.constant.int 0 loc(#loc5)
%int1 = torch.constant.int 1 loc(#loc6)
%int3 = torch.constant.int 3 loc(#loc7)
%1 = torch.aten.t %arg1 : !torch.tensor -> !torch.tensor loc(#loc8)
%2 = torch.aten.addmm %arg2, %arg3, %1, %int1, %int1 : !torch.tensor, !torch.tensor, !torch.tensor, !torch.int, !torch.int -> !torch.tensor loc(#loc9)
%3 = torch.aten.sum %2, %none_1 : !torch.tensor, !torch.none -> !torch.tensor loc(#loc10)
%4 = torch.aten.ones_like %3, %int6, %int0, %cpu, %false, %int1 : !torch.tensor, !torch.int, !torch.int, !torch.Device, !torch.bool, !torch.int -> !torch.tensor loc(#loc11)
%5 = torch.prim.ListConstruct %int3, %int3 : (!torch.int, !torch.int) -> !torch.list<int> loc(#loc0)
%6 = torch.aten.expand %4, %5, %false : !torch.tensor, !torch.list<int>, !torch.bool -> !torch.tensor loc(#loc12)
%7 = torch.aten.t %6 : !torch.tensor -> !torch.tensor loc(#loc13)
%8 = torch.aten.mm %7, %arg3 : !torch.tensor, !torch.tensor -> !torch.tensor loc(#loc14)
%9 = torch.aten.t %8 : !torch.tensor -> !torch.tensor loc(#loc15)
%10 = torch.prim.ListConstruct %int0 : (!torch.int) -> !torch.list<int> loc(#loc0)
%11 = torch.aten.sum.dim_IntList %6, %10, %true_0, %none_1 : !torch.tensor, !torch.list<int>, !torch.bool, !torch.none -> !torch.tensor loc(#loc16)
%12 = torch.prim.ListConstruct %int3 : (!torch.int) -> !torch.list<int> loc(#loc0)
%13 = torch.aten.view %11, %12 : !torch.tensor, !torch.list<int> -> !torch.tensor loc(#loc17)
%14 = torch.aten.detach %13 : !torch.tensor -> !torch.tensor loc(#loc18)
%15 = torch.aten.detach %14 : !torch.tensor -> !torch.tensor loc(#loc19)
%16 = torch.aten.t %9 : !torch.tensor -> !torch.tensor loc(#loc20)
%17 = torch.aten.detach %16 : !torch.tensor -> !torch.tensor loc(#loc21)
%18 = torch.aten.detach %17 : !torch.tensor -> !torch.tensor loc(#loc22)
%19 = torch.aten.add_.Tensor %arg2, %15, %float-1.000000e-02 : !torch.tensor, !torch.tensor, !torch.float -> !torch.tensor loc(#loc23)
%20 = torch.aten.add_.Tensor %arg1, %18, %float-1.000000e-02 : !torch.tensor, !torch.tensor, !torch.float -> !torch.tensor loc(#loc24)
%21 = torch.prim.TupleConstruct %20, %19 : !torch.tensor, !torch.tensor -> !torch.tuple<tensor, tensor> loc(#loc0)
return %21 : !torch.tuple<tensor, tensor> loc(#loc0)
} loc(#loc0)
torch.class_type @__torch__.torch.fx.graph_module.forward {
torch.attr private "training" : !torch.bool loc(#loc0)
torch.attr private "_is_full_backward_hook" : !torch.optional<bool> loc(#loc0)
torch.attr private "_code" : !torch.str loc(#loc0)
torch.method private "__code_getter", @__torch__.torch.fx.graph_module.forward.__code_getter loc(#loc0)
torch.method "forward", @__torch__.torch.fx.graph_module.forward.forward loc(#loc0)
} loc(#loc0)
%true = torch.constant.bool true loc(#loc0)
%none = torch.constant.none loc(#loc0)
%str = torch.constant.str "\0A\0A\0Adef forward(self, params_1, params_2, args_1):\0A t = torch.ops.aten.t(params_1)\0A addmm = torch.ops.aten.addmm(params_2, args_1, t); t = None\0A sum_1 = torch.ops.aten.sum(addmm); addmm = None\0A ones_like = torch.ops.aten.ones_like(sum_1, dtype = 6, layout = 0, device = device(type='cpu'), pin_memory = False, memory_format = 1); sum_1 = None\0A expand = torch.ops.aten.expand(ones_like, [3, 3]); ones_like = None\0A t_1 = torch.ops.aten.t(expand)\0A mm = torch.ops.aten.mm(t_1, args_1); t_1 = args_1 = None\0A t_2 = torch.ops.aten.t(mm); mm = None\0A sum_2 = torch.ops.aten.sum(expand, [0], True); expand = None\0A view = torch.ops.aten.view(sum_2, [3]); sum_2 = None\0A detach = torch.ops.aten.detach(view); view = None\0A detach_1 = torch.ops.aten.detach(detach); detach = None\0A t_3 = torch.ops.aten.t(t_2); t_2 = None\0A detach_2 = torch.ops.aten.detach(t_3); t_3 = None\0A detach_3 = torch.ops.aten.detach(detach_2); detach_2 = None\0A add_ = torch.ops.aten.add_(params_2, detach_1, alpha = -0.01); params_2 = detach_1 = None\0A add__1 = torch.ops.aten.add_(params_1, detach_3, alpha = -0.01); params_1 = detach_3 = None\0A return (add__1, add_)\0A " loc(#loc0)
%0 = torch.nn_module {
torch.slot "training", %true : !torch.bool loc(#loc0)
torch.slot "_is_full_backward_hook", %none : !torch.none loc(#loc0)
torch.slot "_code", %str : !torch.str loc(#loc0)
} : !torch.nn.Module<"__torch__.torch.fx.graph_module.forward"> loc(#loc0)
} loc(#loc0)
#loc1 = loc("<eval_with_key>.3":20:59)
#loc2 = loc("<eval_with_key>.3":13:44)
#loc3 = loc("<eval_with_key>.3":8:113)
#loc4 = loc("<eval_with_key>.3":8:56)
#loc5 = loc("<eval_with_key>.3":8:68)
#loc6 = loc("<eval_with_key>.3":8:136)
#loc7 = loc("<eval_with_key>.3":9:47)
#loc8 = loc("<eval_with_key>.3":5:8)
#loc9 = loc("<eval_with_key>.3":6:12)
#loc10 = loc("<eval_with_key>.3":7:12)
#loc11 = loc("<eval_with_key>.3":8:16)
#loc12 = loc("<eval_with_key>.3":9:13)
#loc13 = loc("<eval_with_key>.3":10:10)
#loc14 = loc("<eval_with_key>.3":11:9)
#loc15 = loc("<eval_with_key>.3":12:10)
#loc16 = loc("<eval_with_key>.3":13:12)
#loc17 = loc("<eval_with_key>.3":14:11)
#loc18 = loc("<eval_with_key>.3":15:13)
#loc19 = loc("<eval_with_key>.3":16:15)
#loc20 = loc("<eval_with_key>.3":17:10)
#loc21 = loc("<eval_with_key>.3":18:15)
#loc22 = loc("<eval_with_key>.3":19:15)
#loc23 = loc("<eval_with_key>.3":20:11)
#loc24 = loc("<eval_with_key>.3":21:13)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment