@@ -23,9 +23,6 @@ include "mlir/Interfaces/InferTypeOpInterface.td"
2323include "mlir/Interfaces/SideEffectInterfaces.td"
2424include "mlir/IR/OpAsmInterface.td"
2525
26- class Linalgx_Op<string mnemonic, list<Trait> traits = []> :
27- Op<LinalgxDialect, mnemonic, traits>;
28-
2926// Base Tablegen class for Linalg ops.
3027// Linalg ops that correspond to library calls operate on ShapedType as their
3128// first operands. These may be optionally followed by non-view operands
@@ -315,27 +312,4 @@ def Linalgx_MultiBatchMatmulOp : LinalgxStructuredBase_Op<"multi_batch_matmul",
315312 }];
316313}
317314
318- def Linalgx_ScaledDotProductAttentionOp
319- : Linalgx_Op<"scaled_dot_product_attention",
320- [AttrSizedOperandSegments,
321- DeclareOpInterfaceMethods<AggregatedOpInterface, ["decomposeOperation"]>]> {
322- let summary = "Attention structure.";
323- let description = [{
324- Q, K, V, attention_mask.
325- Output = SoftMax(Q @ K.transpose(-2, -1) + attention_mask) @ V.
326- }];
327- let arguments = (ins
328- Variadic<TensorOrMemref>:$inputs,
329- Variadic<TensorOrMemref>:$outputs);
330- let results = (outs Variadic<TensorOrMemref>:$results);
331-
332- let hasVerifier = 1;
333- let assemblyFormat = [{
334- attr-dict
335- `ins` `(` $inputs `:` type($inputs) `)`
336- `outs` `(` $outputs `:` type($outputs) `)`
337- (`->` type($results)^)?
338- }];
339- }
340-
341315#endif // LINALGX_STRUCTURED_OPS
0 commit comments