Add Dialect in op definition to capture prefix and documentation.

Enables specifying the documentation for dialect along with defining the ops of the dialect. The doc generator will be expanded in follow up to emit the documentation in the autogenerated files. This is precursor to allowing common base for all ops in a dialect.

    All the dialect documentation is super sparse and just added as placeholder.

    I was tempted (and started) to move ConstantOp to be generated too, but this will be easier post adding extra_methods, so deferring until then.

--

PiperOrigin-RevId: 245759984
This commit is contained in:
Jacques Pienaar 2019-04-29 09:24:09 -07:00 committed by Mehdi Amini
parent aae8a7446e
commit 0ea6154b2a
26 changed files with 240 additions and 106 deletions

View File

@ -30,6 +30,10 @@ include "mlir/IR/OpBase.td"
include "mlir/Quantization/QuantPredicates.td"
def fxpmath_Dialect : Dialect {
let name = "fxpmath";
}
//===----------------------------------------------------------------------===//
// Attributes
//===----------------------------------------------------------------------===//
@ -86,7 +90,7 @@ def fxpmath_CompareFnAttr : EnumAttr<"ComparisonFn",
//===----------------------------------------------------------------------===//
class fxpmath_Op<string mnemonic, list<OpTrait> traits> :
Op<!strconcat("fxpmath.", mnemonic), traits>;
Op<fxpmath_Dialect, mnemonic, traits>;
//===----------------------------------------------------------------------===//
// Fixed-point (fxp) arithmetic ops used by kernels.

View File

@ -192,6 +192,21 @@ def IsMemRefTypePred : CPred<"$_self.isa<MemRefType>()">;
def IsStaticShapeTensorTypePred :
CPred<"$_self.cast<TensorType>().hasStaticShape()">;
//===----------------------------------------------------------------------===//
// Dialect definitions
//===----------------------------------------------------------------------===//
class Dialect {
// The name of the dialect.
string name = ?;
// Short summary of the dialect.
string summary = ?;
// The description of the dialect.
string description = ?;
}
//===----------------------------------------------------------------------===//
// Type definitions
//===----------------------------------------------------------------------===//
@ -808,7 +823,10 @@ class OpBuilder<string p, code b = ""> {
}
// Base class for all ops.
class Op<string mnemonic, list<OpTrait> props = []> {
class Op<Dialect dialect, string mnemonic, list<OpTrait> props = []> {
// The dialect of the op.
Dialect opDialect = dialect;
// The mnemonic of the op.
string opName = mnemonic;

View File

@ -29,14 +29,18 @@
include "mlir/IR/OpBase.td"
#endif // OP_BASE
def LLVM_Dialect : Dialect {
let name = "llvm";
}
// LLVM IR type wrapped in MLIR.
def LLVM_Type : Type<CPred<"$_self.isa<::mlir::LLVM::LLVMType>()">,
"LLVM dialect type">;
// Base class for LLVM operations. Defines the interface to the llvm::IRBuilder
// used to translate to LLVM IR proper.
class LLVM_OpBase<string mnemonic, list<OpTrait> traits = []> :
Op<mnemonic, traits> {
class LLVM_OpBase<Dialect dialect, string mnemonic, list<OpTrait> traits = []> :
Op<dialect, mnemonic, traits> {
// A pattern for constructing the LLVM IR Instruction (or other Value) that
// corresponds to this op. This pattern can use `builder` to refer to an
// `llvm::IRBuilder<>` instance, $-names of arguments and results and the
@ -51,4 +55,4 @@ class LLVM_OpBase<string mnemonic, list<OpTrait> traits = []> :
string llvmBuilder = "";
}
#endif // LLVMIR_OP_BASE
#endif // LLVMIR_OP_BASE

View File

@ -30,7 +30,7 @@ include "mlir/LLVMIR/LLVMOpBase.td"
// this class is specialized below for both cases and should not be used
// directly.
class LLVM_Op<string mnemonic, list<OpTrait> traits = []> :
LLVM_OpBase<!strconcat("llvm.", mnemonic), traits> {
LLVM_OpBase<LLVM_Dialect, mnemonic, traits> {
}
class LLVM_Builder<string builder> {

View File

@ -25,8 +25,12 @@
include "mlir/LLVMIR/LLVMOpBase.td"
def NVVM_Dialect : Dialect {
let name = "nvvm";
}
class NVVM_Op<string mnemonic, list<OpTrait> traits = []> :
LLVM_OpBase<!strconcat("nvvm.", mnemonic), traits> {
LLVM_OpBase<NVVM_Dialect, mnemonic, traits> {
}
class NVVM_SpecialRegisterOp<string mnemonic,
@ -52,4 +56,4 @@ def NVVM_BlockDimXOp : NVVM_SpecialRegisterOp<"read.ptx.sreg.nctaid.x">;
def NVVM_BlockDimYOp : NVVM_SpecialRegisterOp<"read.ptx.sreg.nctaid.y">;
def NVVM_BlockDimZOp : NVVM_SpecialRegisterOp<"read.ptx.sreg.nctaid.z">;
#endif // NVVMIR_OPS
#endif // NVVMIR_OPS

View File

@ -27,13 +27,16 @@
include "mlir/IR/OpBase.td"
#endif // OP_BASE
def Linalg_Dialect : Dialect {
let name = "linalg";
}
// Whether a type is a ViewType.
def LinalgIsViewTypePred : CPred<"$_self.isa<ViewType>()">;
def View : Type<LinalgIsViewTypePred, "view">;
class ParametricNativeOpTrait<string prop, string parameters> :
NativeOpTrait<prop # parameters>
{}
NativeOpTrait<prop # parameters>;
class ParametricIntNativeOpTrait<string prop, list<int> parameters> :
ParametricNativeOpTrait<
@ -45,35 +48,32 @@ class ParametricIntNativeOpTrait<string prop, list<int> parameters> :
sum,
param,
sum # "," # !cast<string>(param)),
">::Impl")>
{}
">::Impl")>;
// The Linalg `NInputsAndOutputs` trait provides the API for ops that are known
// to have a specified number of inputs and outputs, all passed as operands.
// See Linalg/LinalgTraits.h for implementation details an usage.
class NInputsAndOutputs<int n_ins, int n_outs> :
ParametricIntNativeOpTrait<"NInputsAndOutputs", [n_ins, n_outs]>
{}
ParametricIntNativeOpTrait<"NInputsAndOutputs", [n_ins, n_outs]>;
// The linalg `NLoopTypes` trait provides the API for ops that are known to have
// a specified number of parallel (n_par), reduction (n_red) and window (n_win)
// loops.
// See Linalg/LinalgTraits.h for implementation details an usage.
class NLoopTypes<int n_par, int n_red, int n_win> :
ParametricIntNativeOpTrait<"NLoopTypes", [n_par, n_red, n_win]>
{}
ParametricIntNativeOpTrait<"NLoopTypes", [n_par, n_red, n_win]>;
// The linalg `ViewRanks` trait the API for ops that are known to have a
// specified list of view ranks.
// See Linalg/LinalgTraits.h for implementation details an usage.
class ViewRanks<list<int> ranks> :
ParametricIntNativeOpTrait<"ViewRanks", ranks>
{}
ParametricIntNativeOpTrait<"ViewRanks", ranks>;
// Base Tablegen class for Linalg ops.
class LinalgOp<string mnemonic, list<OpTrait> props> :
Op<!strconcat("linalg.", mnemonic), props> {
let arguments = (ins Variadic<View>); // default variadic builder
Op<Linalg_Dialect, mnemonic, props> {
// The default variadic builder.
let arguments = (ins Variadic<View>);
let parser = [{ return impl::parseLinalgLibraryOp(parser, result); }];
@ -85,12 +85,12 @@ Op<!strconcat("linalg.", mnemonic), props> {
////////////////////////////////////////////////////////////////////////////////
def DotOp : LinalgOp<"dot", [NInputsAndOutputs<2, 1>,
NLoopTypes<0, 1, 0>,
ViewRanks<[1, 1, 0]>]> {}
ViewRanks<[1, 1, 0]>]>;
def MatvecOp : LinalgOp<"matvec", [NInputsAndOutputs<2, 1>,
NLoopTypes<1, 1, 0>,
ViewRanks<[2, 1, 1]>]> {}
ViewRanks<[2, 1, 1]>]>;
def MatmulOp : LinalgOp<"matmul", [NInputsAndOutputs<2, 1>,
NLoopTypes<2, 1, 0>,
ViewRanks<[2, 2, 2]>]> {}
ViewRanks<[2, 2, 2]>]>;
#endif // LINALG_OPS
#endif // LINALG_OPS

View File

@ -28,12 +28,16 @@ include "mlir/IR/OpBase.td"
include "mlir/Quantization/QuantPredicates.td"
#endif // OP_BASE
def quant_Dialect : Dialect {
let name = "quant";
}
//===----------------------------------------------------------------------===//
// Base classes
//===----------------------------------------------------------------------===//
class quant_Op<string mnemonic, list<OpTrait> traits> :
Op<!strconcat("quant.", mnemonic), traits>;
Op<quant_Dialect, mnemonic, traits>;
//===----------------------------------------------------------------------===//
// Quantization casts

View File

@ -28,14 +28,18 @@
include "mlir/IR/OpBase.td"
#endif // OP_BASE
def Standard_Dialect : Dialect {
let name = "std";
}
// Base class for standard arithmetic operations. Requires operands and
// results to be of the same type, but does not constrain them to specific
// types. Individual classes will have `lhs` and `rhs` accessor to operands.
class ArithmeticOp<string mnemonic, list<OpTrait> traits = []> :
Op<mnemonic, !listconcat(traits, [NoSideEffect, SameValueType])>,
Results<(outs AnyType)> {
Op<Standard_Dialect, mnemonic,
!listconcat(traits, [NoSideEffect, SameValueType])> {
let opName = mnemonic;
let results = (outs AnyType);
let parser = [{
return impl::parseBinaryOp(parser, result);
@ -69,84 +73,84 @@ class FloatArithmeticOp<string mnemonic, list<OpTrait> traits = []> :
ArithmeticOp<mnemonic, traits>,
Arguments<(ins FloatLike:$lhs, FloatLike:$rhs)>;
def AddFOp : FloatArithmeticOp<"std.addf"> {
def AddFOp : FloatArithmeticOp<"addf"> {
let summary = "floating point addition operation";
let hasConstantFolder = 0b1;
}
def AddIOp : IntArithmeticOp<"std.addi", [Commutative]> {
def AddIOp : IntArithmeticOp<"addi", [Commutative]> {
let summary = "integer addition operation";
let hasFolder = 1;
let hasConstantFolder = 0b1;
}
def AndOp : IntArithmeticOp<"std.and", [Commutative]> {
def AndOp : IntArithmeticOp<"and", [Commutative]> {
let summary = "integer binary and";
let hasConstantFolder = 0b1;
let hasFolder = 1;
}
def DivFOp : FloatArithmeticOp<"std.divf"> {
def DivFOp : FloatArithmeticOp<"divf"> {
let summary = "floating point division operation";
}
def DivISOp : IntArithmeticOp<"std.divis"> {
def DivISOp : IntArithmeticOp<"divis"> {
let summary = "signed integer division operation";
let hasConstantFolder = 0b1;
}
def DivIUOp : IntArithmeticOp<"std.diviu"> {
def DivIUOp : IntArithmeticOp<"diviu"> {
let summary = "unsigned integer division operation";
let hasConstantFolder = 0b1;
}
def MulFOp : FloatArithmeticOp<"std.mulf"> {
def MulFOp : FloatArithmeticOp<"mulf"> {
let summary = "foating point multiplication operation";
let hasConstantFolder = 0b1;
}
def MulIOp : IntArithmeticOp<"std.muli", [Commutative]> {
def MulIOp : IntArithmeticOp<"muli", [Commutative]> {
let summary = "integer multiplication operation";
let hasConstantFolder = 0b1;
let hasFolder = 1;
}
def OrOp : IntArithmeticOp<"std.or", [Commutative]> {
def OrOp : IntArithmeticOp<"or", [Commutative]> {
let summary = "integer binary or";
let hasConstantFolder = 0b1;
let hasFolder = 1;
}
def RemFOp : FloatArithmeticOp<"std.remf"> {
def RemFOp : FloatArithmeticOp<"remf"> {
let summary = "floating point division remainder operation";
}
def RemISOp : IntArithmeticOp<"std.remis"> {
def RemISOp : IntArithmeticOp<"remis"> {
let summary = "signed integer division remainder operation";
let hasConstantFolder = 0b1;
}
def RemIUOp : IntArithmeticOp<"std.remiu"> {
def RemIUOp : IntArithmeticOp<"remiu"> {
let summary = "unsigned integer division remainder operation";
let hasConstantFolder = 0b1;
}
def ShlISOp : IntArithmeticOp<"std.shlis"> {
def ShlISOp : IntArithmeticOp<"shlis"> {
let summary = "signed integer shift left";
}
def SubFOp : FloatArithmeticOp<"std.subf"> {
def SubFOp : FloatArithmeticOp<"subf"> {
let summary = "floating point subtraction operation";
let hasConstantFolder = 0b1;
}
def SubIOp : IntArithmeticOp<"std.subi"> {
def SubIOp : IntArithmeticOp<"subi"> {
let summary = "integer subtraction operation";
let hasConstantFolder = 0b1;
let hasCanonicalizer = 0b1;
}
def XOrOp : IntArithmeticOp<"std.xor", [Commutative]> {
def XOrOp : IntArithmeticOp<"xor", [Commutative]> {
let summary = "integer binary xor";
let hasConstantFolder = 0b1;
let hasCanonicalizer = 0b1;

View File

@ -51,7 +51,7 @@ public:
explicit Operator(const llvm::Record *def) : Operator(*def) {}
// Returns the operation name.
StringRef getOperationName() const;
std::string getOperationName() const;
// Returns this op's dialect name.
StringRef getDialectName() const;

View File

@ -46,8 +46,13 @@ tblgen::Operator::Operator(const llvm::Record &def) : def(def) {
populateOpStructure();
}
StringRef tblgen::Operator::getOperationName() const {
return def.getValueAsString("opName");
std::string tblgen::Operator::getOperationName() const {
auto *dialect = def.getValueAsDef("opDialect");
assert(dialect && "op defined without dialect");
auto prefix = dialect->getValueAsString("name");
if (prefix.empty())
return def.getValueAsString("opName");
return llvm::formatv("{0}.{1}", prefix, def.getValueAsString("opName"));
}
StringRef tblgen::Operator::getDialectName() const { return dialectName; }

View File

@ -28,6 +28,7 @@
using namespace mlir;
using llvm::formatv;
using mlir::tblgen::Operator;
bool tblgen::DagLeaf::isUnspecified() const {

View File

@ -9,7 +9,13 @@ def NS_SomeEnum_B : EnumAttrCase<"B">;
def NS_SomeEnum : EnumAttr<
"SomeEnum", "some enum", [NS_SomeEnum_A, NS_SomeEnum_B]>;
def NS_OpA : Op<"op_a_with_enum_attr", []> {
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def NS_OpA : NS_Op<"op_a_with_enum_attr", []> {
let arguments = (ins NS_SomeEnum:$attr);
let results = (outs I32:$result);
}
@ -26,7 +32,7 @@ def NS_OpA : Op<"op_a_with_enum_attr", []> {
// DEF: if (!(((tblgen_attr.isa<StringAttr>())) && (((tblgen_attr.cast<StringAttr>().getValue() == "A")) || ((tblgen_attr.cast<StringAttr>().getValue() == "B")))))
// DEF-SAME: return emitOpError("attribute 'attr' failed to satisfy constraint: some enum");
def NS_OpB : Op<"op_b_with_enum_attr", []> {
def NS_OpB : NS_Op<"op_b_with_enum_attr", []> {
let arguments = (ins NS_SomeEnum:$attr);
let results = (outs I32:$result);
}
@ -50,7 +56,7 @@ def : Pat<(NS_OpA NS_SomeEnum_A:$attr), (NS_OpB NS_SomeEnum_B)>;
def NS_SomeEnum_Array : TypedArrayAttrBase<NS_SomeEnum, "SomeEnum array">;
def NS_OpC : Op<"op_b_with_enum_array_attr", []> {
def NS_OpC : NS_Op<"op_b_with_enum_array_attr", []> {
let arguments = (ins NS_SomeEnum_Array:$attr);
let results = (outs I32:$result);
}

View File

@ -2,12 +2,18 @@
include "mlir/IR/OpBase.td"
def ThreeResultOp : Op<"three_result_op", []> {
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def ThreeResultOp : NS_Op<"three_result_op", []> {
let arguments = (ins I32:$input);
let results = (outs I32:$r1, I32:$r2, I32:$r3);
}
def OneResultOp : Op<"one_result_op", []> {
def OneResultOp : NS_Op<"one_result_op", []> {
let arguments = (ins I32:$input);
let results = (outs I32:$r1);
}

View File

@ -2,6 +2,12 @@
include "mlir/IR/OpBase.td"
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def SomeAttr : Attr<CPred<"some-condition">, "some attribute kind"> {
let storageType = "some-attr-kind";
let returnType = "some-return-type";
@ -12,7 +18,7 @@ def SomeAttr : Attr<CPred<"some-condition">, "some attribute kind"> {
// Test required, optional, default-valued attributes
// ---
def AOp : Op<"a_op", []> {
def AOp : NS_Op<"a_op", []> {
let arguments = (ins
SomeAttr:$aAttr,
DefaultValuedAttr<SomeAttr, "4.2">:$bAttr,
@ -72,7 +78,7 @@ def SomeTypeAttr : TypeAttrBase<"SomeType", "some type attribute">;
// Test common attribute kinds' constraints
// ---
def BOp : Op<"b_op", []> {
def BOp : NS_Op<"b_op", []> {
let arguments = (ins
AnyAttr:$any_attr,
BoolAttr:$bool_attr,
@ -106,7 +112,7 @@ def BOp : Op<"b_op", []> {
// Test building constant values for array attribute kinds
// ---
def COp : Op<"c_op", []> {
def COp : NS_Op<"c_op", []> {
let arguments = (ins
DefaultValuedAttr<I32ArrayAttr, "{1, 2}">:$i32_array_attr,
DefaultValuedAttr<I64ArrayAttr, "{3, 4}">:$i64_array_attr,
@ -126,7 +132,7 @@ def COp : Op<"c_op", []> {
// Test mixing operands and attributes in arbitrary order
// ---
def MixOperandsAndAttrs : Op<"mix_operands_and_attrs", []> {
def MixOperandsAndAttrs : NS_Op<"mix_operands_and_attrs", []> {
let arguments = (ins F32Attr:$attr, F32:$operand, F32Attr:$otherAttr, F32:$otherArg);
}

View File

@ -2,7 +2,13 @@
include "mlir/IR/OpBase.td"
def NS_AOp : Op<"a_op", [NoSideEffect]> {
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def NS_AOp : NS_Op<"a_op", [NoSideEffect]> {
let arguments = (ins
I32:$a,
Variadic<F32>:$b,
@ -51,21 +57,21 @@ def NS_AOp : Op<"a_op", [NoSideEffect]> {
// Check op trait for different number of operands
// ---
def NS_BOp : Op<"op_with_no_operand", []> {
def NS_BOp : NS_Op<"op_with_no_operand", []> {
let arguments = (ins);
}
// CHECK-LABEL: NS::BOp declarations
// CHECK: OpTrait::NOperands<0>::Impl
def NS_COp : Op<"op_with_one_operand", []> {
def NS_COp : NS_Op<"op_with_one_operand", []> {
let arguments = (ins I32:$operand);
}
// CHECK-LABEL: NS::COp declarations
// CHECK: OpTrait::NOperands<1>::Impl
def NS_DOp : Op<"op_with_two_operands", []> {
def NS_DOp : NS_Op<"op_with_two_operands", []> {
let arguments = (ins I32:$input1, I32:$input2);
}
@ -75,12 +81,12 @@ def NS_DOp : Op<"op_with_two_operands", []> {
// Check leading underscore in op name
// ---
def NS__AOp : Op<"_op_with_leading_underscore", []>;
def NS__AOp : NS_Op<"_op_with_leading_underscore", []>;
// CHECK-LABEL: NS::_AOp declarations
// CHECK: class _AOp : public Op<_AOp
def _BOp : Op<"_op_with_leading_underscore_and_no_namespace", []>;
def _BOp : NS_Op<"_op_with_leading_underscore_and_no_namespace", []>;
// CHECK-LABEL: _BOp declarations
// CHECK: class _BOp : public Op<_BOp

View File

@ -2,7 +2,13 @@
include "mlir/IR/OpBase.td"
def OpA : Op<"one_normal_operand_op", []> {
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def OpA : NS_Op<"one_normal_operand_op", []> {
let arguments = (ins I32:$input);
}
@ -21,7 +27,7 @@ def OpA : Op<"one_normal_operand_op", []> {
// CHECK: if (!((this->getOperation()->getOperand(0)->getType().isInteger(32))))
// CHECK-NEXT: return emitOpError("operand #0 must be 32-bit integer");
def OpB : Op<"one_variadic_operand_op", []> {
def OpB : NS_Op<"one_variadic_operand_op", []> {
let arguments = (ins Variadic<I32>:$input);
}
@ -30,7 +36,7 @@ def OpB : Op<"one_variadic_operand_op", []> {
// CHECK-NOT: assert
// CHECK: tblgen_state->addOperands(input);
def OpC : Op<"all_variadic_inputs_op", [SameVariadicOperandSize]> {
def OpC : NS_Op<"all_variadic_inputs_op", [SameVariadicOperandSize]> {
let arguments = (ins Variadic<Tensor>:$input1, Variadic<Tensor>:$input2);
}
@ -48,7 +54,7 @@ def OpC : Op<"all_variadic_inputs_op", [SameVariadicOperandSize]> {
// CHECK-NEXT: tblgen_state->addOperands(input1);
// CHECK-NEXT: tblgen_state->addOperands(input2);
def OpD : Op<"mix_variadic_and_normal_inputs_op", [SameVariadicOperandSize]> {
def OpD : NS_Op<"mix_variadic_and_normal_inputs_op", [SameVariadicOperandSize]> {
let arguments = (ins Variadic<Tensor>:$input1, Tensor:$input2, Variadic<Tensor>:$input3);
}
@ -72,7 +78,7 @@ def OpD : Op<"mix_variadic_and_normal_inputs_op", [SameVariadicOperandSize]> {
// CHECK-NEXT: tblgen_state->operands.push_back(input2);
// CHECK-NEXT: tblgen_state->addOperands(input3);
def OpE : Op<"one_variadic_among_multi_normal_inputs_op", []> {
def OpE : NS_Op<"one_variadic_among_multi_normal_inputs_op", []> {
let arguments = (ins Tensor:$input1, Tensor:$input2, Variadic<Tensor>:$input3, Tensor:$input4, Tensor:$input5);
}

View File

@ -2,7 +2,13 @@
include "mlir/IR/OpBase.td"
def OpA : Op<"one_normal_result_op", []> {
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def OpA : NS_Op<"one_normal_result_op", []> {
let results = (outs I32:$result);
}
@ -18,7 +24,7 @@ def OpA : Op<"one_normal_result_op", []> {
// CHECK: if (!((this->getOperation()->getResult(0)->getType().isInteger(32))))
// CHECK-NEXT: return emitOpError("result #0 must be 32-bit integer");
def OpB : Op<"same_input_output_type_op", [SameValueType]> {
def OpB : NS_Op<"same_input_output_type_op", [SameValueType]> {
let arguments = (ins I32:$x);
let results = (outs I32:$y);
}
@ -29,7 +35,7 @@ def OpB : Op<"same_input_output_type_op", [SameValueType]> {
// CHECK: void OpB::build(Builder *, OperationState *tblgen_state, Value *x)
// CHECK: tblgen_state->addTypes({x->getType()});
def OpC : Op<"three_normal_result_op", []> {
def OpC : NS_Op<"three_normal_result_op", []> {
let results = (outs I32:$x, /*unnamed*/I32, I32:$z);
}
@ -40,7 +46,7 @@ def OpC : Op<"three_normal_result_op", []> {
// CHECK-NEXT: tblgen_state->types.push_back(z)
def IntegerTypeAttr : TypeAttrBase<"IntegerType", "Integer type attribute">;
def OpD : Op<"type_attr_as_result_type", [FirstAttrDerivedResultType]> {
def OpD : NS_Op<"type_attr_as_result_type", [FirstAttrDerivedResultType]> {
let arguments = (ins I32:$x, IntegerTypeAttr:$attr, F32Attr:$f32);
let results = (outs Tensor:$y);
}
@ -49,7 +55,7 @@ def OpD : Op<"type_attr_as_result_type", [FirstAttrDerivedResultType]> {
// CHECK: void OpD::build(Builder *, OperationState *tblgen_state, Value *x, TypeAttr attr, FloatAttr f32)
// CHECK: tblgen_state->addTypes({attr.getValue()});
def OpE : Op<"value_attr_as_result_type", [FirstAttrDerivedResultType]> {
def OpE : NS_Op<"value_attr_as_result_type", [FirstAttrDerivedResultType]> {
let arguments = (ins I32:$x, F32Attr:$attr);
let results = (outs Tensor:$y);
}
@ -58,7 +64,7 @@ def OpE : Op<"value_attr_as_result_type", [FirstAttrDerivedResultType]> {
// CHECK: void OpE::build(Builder *, OperationState *tblgen_state, Value *x, FloatAttr attr)
// CHECK: tblgen_state->addTypes({attr.getType()});
def OpF : Op<"one_variadic_result_op", []> {
def OpF : NS_Op<"one_variadic_result_op", []> {
let results = (outs Variadic<I32>:$x);
}
@ -70,7 +76,7 @@ def OpF : Op<"one_variadic_result_op", []> {
// CHECK-NOT: assert
// CHECK: tblgen_state->addTypes(x);
def OpG : Op<"one_normal_and_one_variadic_result_op", []> {
def OpG : NS_Op<"one_normal_and_one_variadic_result_op", []> {
let results = (outs I32:$x, Variadic<I32>:$y);
}
@ -87,7 +93,7 @@ def OpG : Op<"one_normal_and_one_variadic_result_op", []> {
// CHECK-NEXT: tblgen_state->addTypes(resultTypes);
def OpH : Op<"all_variadic_results_op", [SameVariadicResultSize]> {
def OpH : NS_Op<"all_variadic_results_op", [SameVariadicResultSize]> {
let results = (outs Variadic<Tensor>:$output1, Variadic<Tensor>:$output2);
}
@ -106,7 +112,7 @@ def OpH : Op<"all_variadic_results_op", [SameVariadicResultSize]> {
// CHECK-NEXT: tblgen_state->addTypes(output1);
// CHECK-NEXT: tblgen_state->addTypes(output2);
def OpI : Op<"mix_variadic_and_normal_results_op", [SameVariadicResultSize]> {
def OpI : NS_Op<"mix_variadic_and_normal_results_op", [SameVariadicResultSize]> {
let results = (outs Variadic<Tensor>:$output1, Tensor:$output2, Variadic<Tensor>:$output3);
}
@ -130,7 +136,7 @@ def OpI : Op<"mix_variadic_and_normal_results_op", [SameVariadicResultSize]> {
// CHECK-NEXT: tblgen_state->types.push_back(output2);
// CHECK-NEXT: tblgen_state->addTypes(output3);
def OpJ : Op<"one_variadic_among_multi_normal_results_op", []> {
def OpJ : NS_Op<"one_variadic_among_multi_normal_results_op", []> {
let results = (outs Tensor:$output1, Tensor:$output2, Variadic<Tensor>:$output3, Tensor:$output4, Tensor:$output5);
}
@ -152,7 +158,7 @@ def OpJ : Op<"one_variadic_among_multi_normal_results_op", []> {
// Test that if the only operand is variadic, we acess the first value in the
// pack to set result type
// ---
def OpK : Op<"only_input_is_variadic_with_same_value_type_op", [SameValueType]> {
def OpK : NS_Op<"only_input_is_variadic_with_same_value_type_op", [SameValueType]> {
let arguments = (ins Variadic<Tensor>:$input);
let results = (outs Tensor:$result);
}

View File

@ -2,16 +2,22 @@
include "mlir/IR/OpBase.td"
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def CreateOperand : NativeCodeCall<"buildOperand($0, $1)">;
def CreateArrayAttr : NativeCodeCall<"$_builder.getArrayAttr({$0, $1})">;
def CreateOpResult : NativeCodeCall<"buildOp($0, $1)">;
def NS_AOp : Op<"a_op", []> {
def NS_AOp : NS_Op<"a_op", []> {
let arguments = (ins I32:$input1, I32:$input2, I32Attr:$attr);
let results = (outs I32:$output);
}
def NS_BOp : Op<"b_op", []> {
def NS_BOp : NS_Op<"b_op", []> {
let arguments = (ins I32:$input, I32Attr:$attr);
let results = (outs I32:$output);
}

View File

@ -2,6 +2,12 @@
include "mlir/IR/OpBase.td"
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def FirstConstraint : AttrConstraint<CPred<"FirstConstraint">,
"first constraint">;
def SecondConstraint : AttrConstraint<CPred<"SecondConstraint">,
@ -9,7 +15,7 @@ def SecondConstraint : AttrConstraint<CPred<"SecondConstraint">,
def ThirdConstraint : AttrConstraint<CPred<"ThirdConstraint">,
"third constraint">;
def OpA : Op<"op_a", []> {
def OpA : NS_Op<"op_a", []> {
let arguments = (ins
I32Attr:$attr
);

View File

@ -2,9 +2,15 @@
include "mlir/IR/OpBase.td"
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def MoreConstraint : AttrConstraint<CPred<"MoreConstraint">, "more constraint">;
def OpA : Op<"op_a", []> {
def OpA : NS_Op<"op_a", []> {
let arguments = (ins
I32Attr:$required_attr,
OptionalAttr<I32Attr>:$optional_attr,

View File

@ -4,15 +4,21 @@ include "mlir/IR/OpBase.td"
def IfEqual : Constraint<CPred<"<notused>">>;
def Test_Dialect : Dialect {
let name = "x";
}
class NS_Op<string mnemonic, list<OpTrait> traits = []> :
Op<Test_Dialect, mnemonic, traits>;
// Define ops to rewrite.
def U: Type<CPred<"true">, "U">;
def X_AddOp : Op<"x.add"> {
def X_AddOp : NS_Op<"add"> {
let arguments = (ins U, U);
}
def Y_AddOp : Op<"y.add"> {
def Y_AddOp : NS_Op<"add"> {
let arguments = (ins U, U, U);
}
def Z_AddOp : Op<"z.add"> {
def Z_AddOp : NS_Op<"add"> {
let arguments = (ins U);
}
@ -25,4 +31,4 @@ def : Pat<(X_AddOp (X_AddOp $lhs, $rhs), $rhs), (Y_AddOp $lhs, $rhs, $rhs)>;
def : Pat<(X_AddOp $lhs, $rhs), (Z_AddOp $lhs), [(IfEqual $lhs, $rhs)], (addBenefit 100)>;
// CHECK-LABEL: struct GeneratedConvert1
// CHECK: GeneratedConvert1(MLIRContext *context) : RewritePattern("x.add", 101, context) {}
// CHECK: GeneratedConvert1(MLIRContext *context) : RewritePattern("x.add", 101, context) {}

View File

@ -2,22 +2,28 @@
include "mlir/IR/OpBase.td"
def OpA : Op<"op_a", []> {
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def OpA : NS_Op<"op_a", []> {
let arguments = (ins I32:$operand, I32Attr:$attr);
let results = (outs I32:$result);
}
def OpB : Op<"op_b", []> {
def OpB : NS_Op<"op_b", []> {
let arguments = (ins I32:$operand);
let results = (outs I32:$result);
}
def OpC : Op<"op_c", []> {
def OpC : NS_Op<"op_c", []> {
let arguments = (ins I32:$operand);
let results = (outs I32:$result);
}
def OpD : Op<"op_d", []> {
def OpD : NS_Op<"op_d", []> {
let arguments = (ins I32:$input1, I32:$input2, I32:$input3, I32Attr:$attr);
let results = (outs I32:$result);
}

View File

@ -2,17 +2,23 @@
include "mlir/IR/OpBase.td"
def ThreeResultOp : Op<"three_result_op", []> {
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def ThreeResultOp : NS_Op<"three_result_op", []> {
let arguments = (ins I32:$input);
let results = (outs I32:$r1, I32:$r2, I32:$r3);
}
def TwoResultOp : Op<"two_result_op", []> {
def TwoResultOp : NS_Op<"two_result_op", []> {
let arguments = (ins I32:$input);
let results = (outs I32:$r1, I32:$r2);
}
def OneResultOp : Op<"one_result_op", []> {
def OneResultOp : NS_Op<"one_result_op", []> {
let arguments = (ins I32:$input);
let results = (outs I32:$r1);
}
@ -54,7 +60,7 @@ def : Pattern<(ThreeResultOp $input), [
// Test more result patterns than needed for replacement
// ---
def AdditionalOp : Op<"additional_one_result_op", []> {
def AdditionalOp : NS_Op<"additional_one_result_op", []> {
let arguments = (ins I32:$input);
let results = (outs I32:$r1);
}

View File

@ -2,12 +2,18 @@
include "mlir/IR/OpBase.td"
def OpA : Op<"op_a", []> {
def Test_Dialect : Dialect {
let name = "";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def OpA : NS_Op<"op_a", []> {
let arguments = (ins I32:$operand, I32Attr:$attr);
let results = (outs I32:$result);
}
def OpB : Op<"op_b", []> {
def OpB : NS_Op<"op_b", []> {
let arguments = (ins I32:$operand, I32Attr:$attr);
let results = (outs I32:$result);
}

View File

@ -2,17 +2,23 @@
include "mlir/IR/OpBase.td"
def Test_Dialect : Dialect {
let name = "test";
}
class NS_Op<string mnemonic, list<OpTrait> traits> :
Op<Test_Dialect, mnemonic, traits>;
def I32OrF32 : Type<CPred<"$_self.isInteger(32) || $_self.isF32()">,
"32-bit integer or floating-point type">;
def OpA : Op<"op_for_CPred_containing_multiple_same_placeholder", []> {
def OpA : NS_Op<"op_for_CPred_containing_multiple_same_placeholder", []> {
let arguments = (ins I32OrF32:$x);
}
// CHECK-LABEL: OpA::verify
// CHECK: if (!((this->getOperation()->getOperand(0)->getType().isInteger(32) || this->getOperation()->getOperand(0)->getType().isF32())))
def OpB : Op<"op_for_AllOf_PredOpTrait", [
def OpB : NS_Op<"op_for_AllOf_PredOpTrait", [
PredOpTrait<"both first and second holds",
AllOf<[CPred<"first">, CPred<"second">]>>]> {
}
@ -20,7 +26,7 @@ def OpB : Op<"op_for_AllOf_PredOpTrait", [
// CHECK-LABEL: OpB::verify
// CHECK: if (!(((first)) && ((second))))
def OpC : Op<"op_for_TCopVTEtIs", [
def OpC : NS_Op<"op_for_TCopVTEtIs", [
PredOpTrait<"first operand has i32 element type",
TCopVTEtIs<0, I32>>]> {
let arguments = (ins Tensor:$x);
@ -30,7 +36,7 @@ def OpC : Op<"op_for_TCopVTEtIs", [
// CHECK: if (!((((*this->getOperation()).getNumOperands() > 0)) && (((*this->getOperation()).getOperand(0)->getType().isa<VectorOrTensorType>())) && (((*this->getOperation()).getOperand(0)->getType().cast<VectorOrTensorType>().getElementType().isInteger(32)))))
def OpD : Op<"op_for_TCOpVTEtIsSameAs", [
def OpD : NS_Op<"op_for_TCOpVTEtIsSameAs", [
PredOpTrait<"first operand is a vector or tensor with the same "
"elemental type as itself",
TCopVTEtIsSameAs<0, 0>>]> {
@ -42,7 +48,7 @@ def OpD : Op<"op_for_TCOpVTEtIsSameAs", [
// CHECK-NEXT: return emitOpError("failed to verify that first operand is a vector or tensor with the same elemental type as itself");
def OpE : Op<"op_for_TCresVTEtIsSameAsOp", [
def OpE : NS_Op<"op_for_TCresVTEtIsSameAsOp", [
PredOpTrait<"first operand is a vector or tensor with the same "
"elemental type as first result",
TCresVTEtIsSameAsOp<0, 0>>]> {
@ -55,7 +61,7 @@ def OpE : Op<"op_for_TCresVTEtIsSameAsOp", [
// CHECK-NEXT: return emitOpError("failed to verify that first operand is a vector or tensor with the same elemental type as first result");
def OpF : Op<"op_for_int_min_val", []> {
def OpF : NS_Op<"op_for_int_min_val", []> {
let arguments = (ins Confined<I32Attr, [IntMinValue<10>]>:$attr);
}
@ -63,7 +69,7 @@ def OpF : Op<"op_for_int_min_val", []> {
// CHECK: (tblgen_attr.cast<IntegerAttr>().getInt() >= 10)
// CHECK-SAME: return emitOpError("attribute 'attr' failed to satisfy constraint: 32-bit integer attribute whose minimal value is 10");
def OpG : Op<"op_for_arr_min_count", []> {
def OpG : NS_Op<"op_for_arr_min_count", []> {
let arguments = (ins Confined<ArrayAttr, [ArrayMinCount<8>]>:$attr);
}
@ -71,7 +77,7 @@ def OpG : Op<"op_for_arr_min_count", []> {
// CHECK: (tblgen_attr.cast<ArrayAttr>().size() >= 8)
// CHECK-SAME: return emitOpError("attribute 'attr' failed to satisfy constraint: array attribute with at least 8 elements");
def OpH : Op<"op_for_arr_value_at_index", []> {
def OpH : NS_Op<"op_for_arr_value_at_index", []> {
let arguments = (ins Confined<ArrayAttr, [IntArrayNthElemEq<0, 8>]>:$attr);
}
@ -79,7 +85,7 @@ def OpH : Op<"op_for_arr_value_at_index", []> {
// CHECK: (((tblgen_attr.cast<ArrayAttr>().size() > 0)) && ((tblgen_attr.cast<ArrayAttr>().getValue()[0].cast<IntegerAttr>().getInt() == 8)))))
// CHECK-SAME: return emitOpError("attribute 'attr' failed to satisfy constraint: array attribute whose 0-th element must be 8");
def OpI: Op<"op_for_arr_min_value_at_index", []> {
def OpI: NS_Op<"op_for_arr_min_value_at_index", []> {
let arguments = (ins Confined<ArrayAttr, [IntArrayNthElemMinValue<0, 8>]>:$attr);
}

View File

@ -5,7 +5,13 @@
include "mlir/IR/OpBase.td"
#endif // OP_BASE
def X_AddOp : Op<"x.add">,
def X_Dialect : Dialect {
let name = "x";
}
class X_Op<string mnemonic, list<OpTrait> traits = []> :
Op<X_Dialect, mnemonic, traits>;
def X_AddOp : X_Op<"add">,
Arguments<(ins Tensor:$A, Tensor:$B)>,
Results<(outs Tensor: $C)> {
// TODO: extract referenceImplementation to Op.