Created
December 17, 2024 08:09
-
-
Save AmosLewis/d9a0193b5600139836c331f6bfa19e98 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
torch-mlir-opt -pass-pipeline='builtin.module(func.func(torch-match-quantized-custom-ops), torchdynamo-export-to-torch-backend-pipeline{extra-library=})' /tmp/UnnammedModule.mlir --debug | |
Args: torch-mlir-opt -pass-pipeline=builtin.module(func.func(torch-match-quantized-custom-ops), torchdynamo-export-to-torch-backend-pipeline{extra-library=}) /tmp/UnnammedModule.mlir --debug | |
Load new dialect in Context builtin | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::ShapedType) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::MemRefLayoutAttrInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::TypedAttr) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::ElementsAttr) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::DistinctAttr) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::BytecodeOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::SymbolOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpAsmOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::RegionKindInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::ConditionallySpeculatable) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::MemoryEffectOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::ResourceBlobManagerDialectInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpAsmDialectInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::BytecodeDialectInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::detail::AffineBinaryOpExprStorage) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::detail::AffineConstantExprStorage) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::detail::AffineDimExprStorage) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::detail::AffineMapStorage) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::detail::IntegerSetStorage) | |
Load new dialect in Context builtin | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::ZeroOperands<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::OneRegion<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::ZeroResults<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::ZeroSuccessors<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::NoRegionArguments<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::NoTerminator<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::SingleBlock<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::OpInvariants<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::BytecodeOpInterface::Trait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::AffineScope<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::IsIsolatedFromAbove<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::SymbolTable<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::SymbolOpInterface::Trait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpAsmOpInterface::Trait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::RegionKindInterface::Trait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::HasOnlyGraphRegion<Empty>) | |
Load new dialect in Context func | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::CallOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::SymbolUserOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::CallableOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::FunctionOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::RegionBranchTerminatorOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::DialectInlinerInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::ConvertToLLVMPatternInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::bufferization::BufferizableOpInterface) | |
Load new dialect in Context cf | |
Load new dialect in Context arith | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::arith::ArithFastMathInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::VectorUnrollOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::InferTypeOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::InferIntRangeInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::arith::ArithIntegerOverflowFlagsInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::CastOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::arith::ArithRoundingModeInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::SelectLikeOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::bufferization::BufferDeallocationOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::ValueBoundsOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::BranchOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::AutomaticAllocationScope<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::CallableOpInterface::Trait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::FunctionOpInterface::Trait<Empty>) | |
Load new dialect in Context torch | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::RegionBranchOpInterface) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::ZeroRegions<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::OneResult<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::OneTypedResult<mlir::torch::Torch::IntType>::Impl<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::ConditionallySpeculatable::Trait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::AlwaysSpeculatableImplTrait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::MemoryEffectOpInterface::Trait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::InferTypeOpInterface::Trait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::detail::SymbolicIntOpGenericAdaptorBase::Properties) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::OneTypedResult<mlir::Type>::Impl<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::OneOperand<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::OpTrait::AllowsTypeRefinement<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::OpTrait::HasValueSemantics<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::OpTrait::ReadOnly<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::AttributeTrait::IsLocation<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::AtLeastNOperands<1>::Impl<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::ConstantLike<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::OpTrait::AllowedInModuleInitializer<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::NOperands<2>::Impl<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::OneTypedResult<mlir::torch::Torch::NoneType>::Impl<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::VariadicRegions<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::VariadicResults<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::VariadicOperands<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::detail::OperatorOpGenericAdaptorBase::Properties) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::OneTypedResult<mlir::torch::Torch::BoolType>::Impl<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::OneTypedResult<mlir::torch::Torch::StringType>::Impl<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::detail::ConstantStrOpGenericAdaptorBase::Properties) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::HasParent<mlir::func::FuncOp>::Impl<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::MemRefsNormalizable<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::RegionBranchTerminatorOpInterface::Trait<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::ReturnLike<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::IsTerminator<Empty>) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::detail::OpToOpPassAdaptor) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::DialectFoldInterface) | |
** Replace : 'torch.constant.none'(0x55c0eb111c60) | |
** Modified: 'torch.operator'(0x55c0eb1126c0) | |
** Erase : 'torch.constant.none'(0x55c0eb111c60) | |
** Replace : 'torch.constant.int'(0x55c0eb112c10) | |
** Modified: 'torch.aten.ge.int'(0x55c0eb112d20) | |
** Erase : 'torch.constant.int'(0x55c0eb112c10) | |
//===-------------------------------------------===// | |
Processing operation : 'func.return'(0x55c0eb113b10) { | |
"func.return"(%6) : (!torch.vtensor<[?,1],si64>) -> () | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.operator'(0x55c0eb113a70) { | |
"torch.operator"(%11, %0) <{name = "torch.aten._assert_scalar"}> : (!torch.int, !torch.str) -> () | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::OpTrait::HasRecursiveMemoryEffects<Empty>) | |
* Pattern (anonymous namespace)::MatchQuantizeOperator : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::MatchQuantizeOperator" | |
"(anonymous namespace)::MatchQuantizeOperator" result 0 | |
} -> failure : pattern failed to match | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.str'(0x55c0eb0e01f0) { | |
%0 = "torch.constant.str"() <{value = "Runtime assertion failed for expression u0 <= 6 on node 'le_1'"}> : () -> !torch.str | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.Int.bool'(0x55c0eb0e0070) { | |
%11 = "torch.aten.Int.bool"(%10) : (!torch.bool) -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.le.int'(0x55c0eb113610) { | |
%10 = "torch.aten.le.int"(%7, %1) : (!torch.int, !torch.int) -> !torch.bool | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.int'(0x55c0eb113500) { | |
%1 = "torch.constant.int"() <{value = 6 : i64}> : () -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.operator'(0x55c0eb113450) { | |
"torch.operator"(%9, %2) <{name = "torch.aten._assert_scalar"}> : (!torch.int, !torch.str) -> () | |
* Pattern (anonymous namespace)::MatchQuantizeOperator : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::MatchQuantizeOperator" | |
"(anonymous namespace)::MatchQuantizeOperator" result 0 | |
} -> failure : pattern failed to match | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.str'(0x55c0eb112f90) { | |
%2 = "torch.constant.str"() <{value = "Runtime assertion failed for expression u0 >= 0 on node 'ge_1'"}> : () -> !torch.str | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.Int.bool'(0x55c0eb112e30) { | |
%9 = "torch.aten.Int.bool"(%8) : (!torch.bool) -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.ge.int'(0x55c0eb112d20) { | |
%8 = "torch.aten.ge.int"(%7, %4) : (!torch.int, !torch.int) -> !torch.bool | |
} -> success : operation was folded | |
//===-------------------------------------------===// | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::detail::ConstantBoolOpGenericAdaptorBase::Properties) | |
** Insert : 'torch.constant.bool'(0x55c0eb112c10) | |
** Replace : 'torch.aten.ge.int'(0x55c0eb112d20) | |
** Modified: 'torch.aten.Int.bool'(0x55c0eb112e30) | |
** Erase : 'torch.aten.ge.int'(0x55c0eb112d20) | |
// *** IR Dump After Successful Folding *** | |
func.func @AtenNonzero1DDynamicModule(%arg0: !torch.vtensor<[6],i1>) -> !torch.vtensor<[?,1],si64> { | |
%str = torch.constant.str "Runtime assertion failed for expression u0 <= 6 on node 'le_1'" | |
%int6 = torch.constant.int 6 | |
%str_0 = torch.constant.str "Runtime assertion failed for expression u0 >= 0 on node 'ge_1'" | |
%none = torch.constant.none | |
%int0 = torch.constant.int 0 | |
%0 = torch.symbolic_int "u0" {min_val = 0, max_val = 6} : !torch.int | |
%1 = torch.aten.nonzero %arg0 : !torch.vtensor<[6],i1> -> !torch.vtensor<[?,1],si64> | |
torch.bind_symbolic_shape %1, [%0], affine_map<()[s0] -> (s0, 1)> : !torch.vtensor<[?,1],si64> | |
%2 = torch.aten.size.int %1, %int0 : !torch.vtensor<[?,1],si64>, !torch.int -> !torch.int | |
torch.operator "torch.aten.sym_constrain_range_for_size"(%2, %none, %none) : (!torch.int, !torch.none, !torch.none) -> () | |
%true = torch.constant.bool true | |
%3 = torch.aten.Int.bool %true : !torch.bool -> !torch.int | |
torch.operator "torch.aten._assert_scalar"(%3, %str_0) : (!torch.int, !torch.str) -> () | |
%4 = torch.aten.le.int %2, %int6 : !torch.int, !torch.int -> !torch.bool | |
%5 = torch.aten.Int.bool %4 : !torch.bool -> !torch.int | |
torch.operator "torch.aten._assert_scalar"(%5, %str) : (!torch.int, !torch.str) -> () | |
return %1 : !torch.vtensor<[?,1],si64> | |
} | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.Int.bool'(0x55c0eb112e30) { | |
%9 = "torch.aten.Int.bool"(%8) : (!torch.bool) -> !torch.int | |
} -> success : operation was folded | |
//===-------------------------------------------===// | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::detail::ConstantIntOpGenericAdaptorBase::Properties) | |
** Insert : 'torch.constant.int'(0x55c0eb111c60) | |
** Replace : 'torch.aten.Int.bool'(0x55c0eb112e30) | |
** Modified: 'torch.operator'(0x55c0eb113450) | |
** Erase : 'torch.aten.Int.bool'(0x55c0eb112e30) | |
// *** IR Dump After Successful Folding *** | |
func.func @AtenNonzero1DDynamicModule(%arg0: !torch.vtensor<[6],i1>) -> !torch.vtensor<[?,1],si64> { | |
%str = torch.constant.str "Runtime assertion failed for expression u0 <= 6 on node 'le_1'" | |
%int6 = torch.constant.int 6 | |
%str_0 = torch.constant.str "Runtime assertion failed for expression u0 >= 0 on node 'ge_1'" | |
%none = torch.constant.none | |
%int0 = torch.constant.int 0 | |
%0 = torch.symbolic_int "u0" {min_val = 0, max_val = 6} : !torch.int | |
%1 = torch.aten.nonzero %arg0 : !torch.vtensor<[6],i1> -> !torch.vtensor<[?,1],si64> | |
torch.bind_symbolic_shape %1, [%0], affine_map<()[s0] -> (s0, 1)> : !torch.vtensor<[?,1],si64> | |
%2 = torch.aten.size.int %1, %int0 : !torch.vtensor<[?,1],si64>, !torch.int -> !torch.int | |
torch.operator "torch.aten.sym_constrain_range_for_size"(%2, %none, %none) : (!torch.int, !torch.none, !torch.none) -> () | |
%true = torch.constant.bool true | |
%int1 = torch.constant.int 1 | |
torch.operator "torch.aten._assert_scalar"(%int1, %str_0) : (!torch.int, !torch.str) -> () | |
%3 = torch.aten.le.int %2, %int6 : !torch.int, !torch.int -> !torch.bool | |
%4 = torch.aten.Int.bool %3 : !torch.bool -> !torch.int | |
torch.operator "torch.aten._assert_scalar"(%4, %str) : (!torch.int, !torch.str) -> () | |
return %1 : !torch.vtensor<[?,1],si64> | |
} | |
//===-------------------------------------------===// | |
Processing operation : 'torch.operator'(0x55c0eb113450) { | |
"torch.operator"(%9, %2) <{name = "torch.aten._assert_scalar"}> : (!torch.int, !torch.str) -> () | |
* Pattern (anonymous namespace)::MatchQuantizeOperator : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::MatchQuantizeOperator" | |
"(anonymous namespace)::MatchQuantizeOperator" result 0 | |
} -> failure : pattern failed to match | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.int'(0x55c0eb111c60) { | |
%9 = "torch.constant.int"() <{value = 1 : i64}> : () -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.bool'(0x55c0eb112c10) { | |
%8 = "torch.constant.bool"() <{value = true}> : () -> !torch.bool | |
** Erase : 'torch.constant.bool'(0x55c0eb112c10) | |
} -> success : operation is trivially dead | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.operator'(0x55c0eb1126c0) { | |
"torch.operator"(%7, %3, %3) <{name = "torch.aten.sym_constrain_range_for_size"}> : (!torch.int, !torch.none, !torch.none) -> () | |
* Pattern (anonymous namespace)::MatchQuantizeOperator : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::MatchQuantizeOperator" | |
"(anonymous namespace)::MatchQuantizeOperator" result 0 | |
} -> failure : pattern failed to match | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.none'(0x55c0eb111ba0) { | |
%3 = "torch.constant.none"() : () -> !torch.none | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.size.int'(0x55c0eb111670) { | |
%7 = "torch.aten.size.int"(%6, %4) : (!torch.vtensor<[?,1],si64>, !torch.int) -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.int'(0x55c0eb08e400) { | |
%4 = "torch.constant.int"() <{value = 0 : i64}> : () -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.bind_symbolic_shape'(0x55c0eb110310) { | |
"torch.bind_symbolic_shape"(%6, %5) <{shape_expressions = affine_map<()[s0] -> (s0, 1)>}> : (!torch.vtensor<[?,1],si64>, !torch.int) -> () | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.nonzero'(0x55c0eb08fd20) { | |
%6 = "torch.aten.nonzero"(%arg0) : (!torch.vtensor<[6],i1>) -> !torch.vtensor<[?,1],si64> | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.symbolic_int'(0x55c0eb0e0c60) { | |
%5 = "torch.symbolic_int"() <{max_val = 6 : i64, min_val = 0 : i64, symbol_name = "u0"}> : () -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'func.return'(0x55c0eb113b10) { | |
"func.return"(%7) : (!torch.vtensor<[?,1],si64>) -> () | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.operator'(0x55c0eb113a70) { | |
"torch.operator"(%10, %1) <{name = "torch.aten._assert_scalar"}> : (!torch.int, !torch.str) -> () | |
* Pattern (anonymous namespace)::MatchQuantizeOperator : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::MatchQuantizeOperator" | |
"(anonymous namespace)::MatchQuantizeOperator" result 0 | |
} -> failure : pattern failed to match | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.Int.bool'(0x55c0eb0e0070) { | |
%10 = "torch.aten.Int.bool"(%9) : (!torch.bool) -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.le.int'(0x55c0eb113610) { | |
%9 = "torch.aten.le.int"(%8, %2) : (!torch.int, !torch.int) -> !torch.bool | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.operator'(0x55c0eb113450) { | |
"torch.operator"(%0, %3) <{name = "torch.aten._assert_scalar"}> : (!torch.int, !torch.str) -> () | |
* Pattern (anonymous namespace)::MatchQuantizeOperator : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::MatchQuantizeOperator" | |
"(anonymous namespace)::MatchQuantizeOperator" result 0 | |
} -> failure : pattern failed to match | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.int'(0x55c0eb111c60) { | |
%0 = "torch.constant.int"() <{value = 1 : i64}> : () -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.operator'(0x55c0eb1126c0) { | |
"torch.operator"(%8, %4, %4) <{name = "torch.aten.sym_constrain_range_for_size"}> : (!torch.int, !torch.none, !torch.none) -> () | |
* Pattern (anonymous namespace)::MatchQuantizeOperator : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::MatchQuantizeOperator" | |
"(anonymous namespace)::MatchQuantizeOperator" result 0 | |
} -> failure : pattern failed to match | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.size.int'(0x55c0eb111670) { | |
%8 = "torch.aten.size.int"(%7, %5) : (!torch.vtensor<[?,1],si64>, !torch.int) -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.bind_symbolic_shape'(0x55c0eb110310) { | |
"torch.bind_symbolic_shape"(%7, %6) <{shape_expressions = affine_map<()[s0] -> (s0, 1)>}> : (!torch.vtensor<[?,1],si64>, !torch.int) -> () | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.aten.nonzero'(0x55c0eb08fd20) { | |
%7 = "torch.aten.nonzero"(%arg0) : (!torch.vtensor<[6],i1>) -> !torch.vtensor<[?,1],si64> | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.symbolic_int'(0x55c0eb0e0c60) { | |
%6 = "torch.symbolic_int"() <{max_val = 6 : i64, min_val = 0 : i64, symbol_name = "u0"}> : () -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.int'(0x55c0eb08e400) { | |
%5 = "torch.constant.int"() <{value = 0 : i64}> : () -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.none'(0x55c0eb111ba0) { | |
%4 = "torch.constant.none"() : () -> !torch.none | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.str'(0x55c0eb112f90) { | |
%3 = "torch.constant.str"() <{value = "Runtime assertion failed for expression u0 >= 0 on node 'ge_1'"}> : () -> !torch.str | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.int'(0x55c0eb113500) { | |
%2 = "torch.constant.int"() <{value = 6 : i64}> : () -> !torch.int | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Processing operation : 'torch.constant.str'(0x55c0eb0e01f0) { | |
%1 = "torch.constant.str"() <{value = "Runtime assertion failed for expression u0 <= 6 on node 'le_1'"}> : () -> !torch.str | |
} -> failure : pattern failed to match | |
//===-------------------------------------------===// | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::detail::PreservedAnalyses::AllAnalysesType) | |
ImplicitTypeIDRegistry::lookupOrInsert(mlir::torch::Torch::OpTrait::IsTrailingUnderscoreInplaceVariant<Empty>) | |
//===-------------------------------------------===// | |
Legalizing operation : 'func.func'(0x55c0eb0e1160) { | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.constant.int'(0x55c0eb111c60) { | |
%0 = "torch.constant.int"() <{value = 1 : i64}> : () -> !torch.int | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.constant.str'(0x55c0eb0e01f0) { | |
%1 = "torch.constant.str"() <{value = "Runtime assertion failed for expression u0 <= 6 on node 'le_1'"}> : () -> !torch.str | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.constant.int'(0x55c0eb113500) { | |
%2 = "torch.constant.int"() <{value = 6 : i64}> : () -> !torch.int | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.constant.str'(0x55c0eb112f90) { | |
%3 = "torch.constant.str"() <{value = "Runtime assertion failed for expression u0 >= 0 on node 'ge_1'"}> : () -> !torch.str | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.constant.none'(0x55c0eb111ba0) { | |
%4 = "torch.constant.none"() : () -> !torch.none | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.constant.int'(0x55c0eb08e400) { | |
%5 = "torch.constant.int"() <{value = 0 : i64}> : () -> !torch.int | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.symbolic_int'(0x55c0eb0e0c60) { | |
%6 = "torch.symbolic_int"() <{max_val = 6 : i64, min_val = 0 : i64, symbol_name = "u0"}> : () -> !torch.int | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.aten.nonzero'(0x55c0eb08fd20) { | |
%7 = "torch.aten.nonzero"(%arg0) : (!torch.vtensor<[6],i1>) -> !torch.vtensor<[?,1],si64> | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.bind_symbolic_shape'(0x55c0eb110310) { | |
"torch.bind_symbolic_shape"(%7, %6) <{shape_expressions = affine_map<()[s0] -> (s0, 1)>}> : (!torch.vtensor<[?,1],si64>, !torch.int) -> () | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.aten.size.int'(0x55c0eb111670) { | |
%8 = "torch.aten.size.int"(%7, %5) : (!torch.vtensor<[?,1],si64>, !torch.int) -> !torch.int | |
} -> SUCCESS : operation marked legal by the target | |
//===-------------------------------------------===// | |
//===-------------------------------------------===// | |
Legalizing operation : 'torch.operator'(0x55c0eb1126c0) { | |
"torch.operator"(%8, %4, %4) <{name = "torch.aten.sym_constrain_range_for_size"}> : (!torch.int, !torch.none, !torch.none) -> () | |
* Fold { | |
} -> FAILURE : unable to fold | |
* Pattern : 'torch.operator -> ()' { | |
Trying to match "" | |
"" result 0 | |
} -> FAILURE : pattern failed to match | |
* Pattern : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::ConvertHasValueSemanticsOpsToValueTensors" | |
** Failure : does not have value semantics | |
"(anonymous namespace)::ConvertHasValueSemanticsOpsToValueTensors" result 0 | |
} -> FAILURE : pattern failed to match | |
* Pattern : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::ReduceTrailingUnderscoreInplaceVariant" | |
** Failure : is not trailing_ variant | |
"(anonymous namespace)::ReduceTrailingUnderscoreInplaceVariant" result 0 | |
} -> FAILURE : pattern failed to match | |
* Pattern : 'torch.operator -> ()' { | |
Trying to match "(anonymous namespace)::ReduceNonValueSemanticOps" | |
"(anonymous namespace)::ReduceNonValueSemanticOps" result 0 | |
} -> FAILURE : pattern failed to match | |
} -> FAILURE : no matched legalization pattern | |
//===-------------------------------------------===// | |
torch/fx/passes/runtime_assert.py:24:0: error: failed to legalize operation 'torch.operator' that was explicitly marked illegal | |
torch/fx/passes/runtime_assert.py:24:0: note: see current operation: "torch.operator"(%8, %4, %4) <{name = "torch.aten.sym_constrain_range_for_size"}> : (!torch.int, !torch.none, !torch.none) -> () |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment