Skip to content

Instantly share code, notes, and snippets.

@AmosLewis
Created March 3, 2023 17:07
Show Gist options
  • Save AmosLewis/d960c815d3de2f68949615c683cafca1 to your computer and use it in GitHub Desktop.
Save AmosLewis/d960c815d3de2f68949615c683cafca1 to your computer and use it in GitHub Desktop.
module attributes {torch.debug_module_name = "_lambda"} {
func.func private @__torch__.torch.fx.graph_module._lambda.forward(%arg0: !torch.nn.Module<"__torch__.torch.fx.graph_module._lambda">, %arg1: !torch.tensor {torch.type_bound = !torch.vtensor<[1,15],si64>}, %arg2: !torch.tensor {torch.type_bound = !torch.vtensor<[1,4],si64>}) -> !torch.tensor {
%none_1 = torch.constant.none
%int-1 = torch.constant.int -1
%false = torch.constant.bool false
%cpu = torch.constant.device "cpu"
%int1 = torch.constant.int 1
%int4 = torch.constant.int 4
%int0 = torch.constant.int 0
%int-100 = torch.constant.int -100
%int9223372036854775807 = torch.constant.int 9223372036854775807
%133 = torch.prim.ListConstruct %int1, %int4 : (!torch.int, !torch.int) -> !torch.list<int>
%134 = torch.aten.new_zeros %arg2, %133, %int4, %int0, %cpu, %false : !torch.tensor, !torch.list<int>, !torch.int, !torch.int, !torch.Device, !torch.bool -> !torch.tensor
%135 = torch.aten.slice.Tensor %arg2, %int1, %int0, %int-1, %int1 : !torch.tensor, !torch.int, !torch.int, !torch.int, !torch.int -> !torch.tensor
%136 = torch.aten.clone %135, %none_1 : !torch.tensor, !torch.none -> !torch.tensor
%137 = torch.aten.slice.Tensor %134, %int1, %int1, %int9223372036854775807, %int1 : !torch.tensor, !torch.int, !torch.int, !torch.int, !torch.int -> !torch.tensor
%138 = torch.aten.copy_ %137, %136, %false : !torch.tensor, !torch.tensor, !torch.bool -> !torch.tensor
%141 = torch.aten.select.int %134, %int1, %int0 : !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%143 = torch.aten.eq.Scalar %134, %int-100 : !torch.tensor, !torch.int -> !torch.tensor
%144 = torch.aten.masked_fill_.Scalar %134, %143, %int0 : !torch.tensor, !torch.tensor, !torch.int -> !torch.tensor
return %144 : !torch.tensor
}
torch.class_type @__torch__.torch.fx.graph_module._lambda {
torch.method "forward", @__torch__.torch.fx.graph_module._lambda.forward
}
%132 = torch.nn_module {
} : !torch.nn.Module<"__torch__.torch.fx.graph_module._lambda">
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment