diff --git a/runtime/onert/core/include/ir/operation/Attention.h b/runtime/onert/core/include/ir/operation/Attention.h index df5feb09b14..2ef519441f9 100644 --- a/runtime/onert/core/include/ir/operation/Attention.h +++ b/runtime/onert/core/include/ir/operation/Attention.h @@ -41,24 +41,10 @@ class Attention : public Operation POS = 10, }; - struct Param - { - int layer_idx; - }; - -public: - Attention(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs, - const Param ¶m); + Attention(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs); -public: void accept(OperationVisitor &v) const override; OpCode opcode() const final { return OpCode::Attention; } - -public: - const Param ¶m() const { return _param; } - -private: - Param _param; }; } // namespace onert::ir::operation diff --git a/runtime/onert/core/src/ir/operation/Attention.cc b/runtime/onert/core/src/ir/operation/Attention.cc index c77f38c29de..d33599ca1ea 100644 --- a/runtime/onert/core/src/ir/operation/Attention.cc +++ b/runtime/onert/core/src/ir/operation/Attention.cc @@ -22,9 +22,8 @@ namespace onert::ir::operation void Attention::accept(OperationVisitor &v) const { v.visit(*this); } -Attention::Attention(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs, - const Param ¶m) - : Operation{OperandConstraint::createExact(11u), inputs, outputs}, _param{param} +Attention::Attention(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs) + : Operation{OperandConstraint::createExact(11u), inputs, outputs} { } diff --git a/runtime/onert/core/src/loader/CircleLoader.cc b/runtime/onert/core/src/loader/CircleLoader.cc index c613003b027..f212826c0e9 100644 --- a/runtime/onert/core/src/loader/CircleLoader.cc +++ b/runtime/onert/core/src/loader/CircleLoader.cc @@ -436,7 +436,7 @@ void CircleLoader::loadAttention(const Operator *op, ir::Graph &subg) loadOperationIO(op, inputs, outputs); - std::unique_ptr new_op(new ir::operation::Attention(inputs, outputs, {})); + std::unique_ptr new_op(new ir::operation::Attention(inputs, outputs)); subg.addOperation(std::move(new_op)); }