Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

#include "NeuraDialect/Mapping/Mapping.h"
#include "NeuraDialect/Mapping/MappingState.h"
#include "NeuraDialect/NeuraAttributes.h"
#include <climits>
#include <set>

Expand All @@ -21,14 +22,14 @@ class HeuristicMapping : public Mapping {

std::string getName() const override {
if (max_location_to_try == 1 && max_backtrack_depth == 1) {
return "simple";
return attr::val::kSimple.str();
} else if (max_location_to_try == INT_MAX && max_backtrack_depth == 1) {
return "greedy";
return attr::val::kGreedy.str();
} else if (max_location_to_try == INT_MAX &&
max_backtrack_depth == INT_MAX) {
return "exhaustive";
return attr::val::kExhaustive.str();
} else {
return "customized";
return attr::val::kCustomized.str();
}
}

Expand Down
62 changes: 62 additions & 0 deletions include/NeuraDialect/NeuraAttributes.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
#pragma once

#include "llvm/ADT/StringRef.h"

namespace mlir {
namespace neura {

namespace attr {

// Attribute Keys

// Specifies the dataflow representation mode, as opposed to control-flow.
constexpr llvm::StringLiteral kDataflowMode = "dataflow_mode";

// Specifies the mapping strategy mode, can be either 'spatial-only' or
// 'spatial-temporal'.
constexpr llvm::StringLiteral kMappingMode = "mapping_mode";

constexpr llvm::StringLiteral kMappingStrategy = "mapping_strategy";
constexpr llvm::StringLiteral kBacktrackConfig = "backtrack_config";
constexpr llvm::StringLiteral kDumpMappingTable = "dump_mapping_table";

// Identification & Results
constexpr llvm::StringLiteral kDfgId = "dfg_id";
constexpr llvm::StringLiteral kMappingInfo = "mapping_info";
constexpr llvm::StringLiteral kXTiles = "x_tiles";
constexpr llvm::StringLiteral kYTiles = "y_tiles";
constexpr llvm::StringLiteral kCompiledII = "compiled_ii";
constexpr llvm::StringLiteral kRecMII = "rec_mii";
constexpr llvm::StringLiteral kResMII = "res_mii";

// Values & Constants Keys
constexpr llvm::StringLiteral kValue = "value";
constexpr llvm::StringLiteral kConstantValue = "constant_value";
constexpr llvm::StringLiteral kRhsValue = "rhs_value";
constexpr llvm::StringLiteral kLhsValue = "lhs_value";

// Attribute Values & Constants
namespace val {
// Strategy & Mode
constexpr llvm::StringLiteral kSpatialOnly = "spatial-only";
constexpr llvm::StringLiteral kSpatialTemporal = "spatial-temporal";
constexpr llvm::StringLiteral kTemporal = "temporal";
constexpr llvm::StringLiteral kHeuristic = "heuristic";
constexpr llvm::StringLiteral kCustomized = "customized";
constexpr llvm::StringLiteral kSimple = "simple";
constexpr llvm::StringLiteral kGreedy = "greedy";
constexpr llvm::StringLiteral kExhaustive = "exhaustive";

// Identifiers
constexpr llvm::StringLiteral kModeSteering = "steering";
constexpr llvm::StringLiteral kModePredicate = "predicate";

// Operation Logic
constexpr llvm::StringLiteral kOpFused = "fused_op";
constexpr llvm::StringLiteral kNeuraFusedOp = "neura.fused_op";

} // namespace val

} // namespace attr
} // namespace neura
} // namespace mlir
11 changes: 7 additions & 4 deletions lib/NeuraDialect/Transforms/CanonicalizeCastPass.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#include "Common/AcceleratorAttrs.h"
#include "NeuraDialect/NeuraOps.h"
#include "mlir/Dialect/Func/IR/FuncOps.h"
#include "mlir/Dialect/LLVMIR/LLVMDialect.h"
Expand Down Expand Up @@ -113,14 +114,16 @@ struct CanonicalizeCastPass
module_op.walk([&](Operation *op) {
Region *region = nullptr;
if (auto func_op = dyn_cast<func::FuncOp>(op)) {
auto accel_attr = func_op->getAttrOfType<StringAttr>("accelerator");
if (!accel_attr || accel_attr.getValue() != "neura") {
auto accel_attr =
func_op->getAttrOfType<StringAttr>(accel::kAcceleratorAttr);
if (!accel_attr || accel_attr.getValue() != accel::kNeuraTarget) {
return;
}
region = &func_op.getBody();
} else if (auto llvm_func = dyn_cast<LLVM::LLVMFuncOp>(op)) {
auto accel_attr = llvm_func->getAttrOfType<StringAttr>("accelerator");
if (!accel_attr || accel_attr.getValue() != "neura") {
auto accel_attr =
llvm_func->getAttrOfType<StringAttr>(accel::kAcceleratorAttr);
if (!accel_attr || accel_attr.getValue() != accel::kNeuraTarget) {
return;
}
region = &llvm_func.getBody();
Expand Down
11 changes: 7 additions & 4 deletions lib/NeuraDialect/Transforms/CanonicalizeLiveInPass.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#include "Common/AcceleratorAttrs.h"
#include "NeuraDialect/NeuraDialect.h"
#include "NeuraDialect/NeuraOps.h"
#include "mlir/Dialect/Func/IR/FuncOps.h"
Expand Down Expand Up @@ -783,14 +784,16 @@ struct CanonicalizeLiveInPass
module_op.walk([&](Operation *op) {
Region *region = nullptr;
if (auto func_op = dyn_cast<func::FuncOp>(op)) {
auto accel_attr = func_op->getAttrOfType<StringAttr>("accelerator");
if (!accel_attr || accel_attr.getValue() != "neura") {
auto accel_attr =
func_op->getAttrOfType<StringAttr>(accel::kAcceleratorAttr);
if (!accel_attr || accel_attr.getValue() != accel::kNeuraTarget) {
return;
}
region = &func_op.getBody();
} else if (auto llvm_func = dyn_cast<LLVM::LLVMFuncOp>(op)) {
auto accel_attr = llvm_func->getAttrOfType<StringAttr>("accelerator");
if (!accel_attr || accel_attr.getValue() != "neura") {
auto accel_attr =
llvm_func->getAttrOfType<StringAttr>(accel::kAcceleratorAttr);
if (!accel_attr || accel_attr.getValue() != accel::kNeuraTarget) {
return;
}
region = &llvm_func.getBody();
Expand Down
4 changes: 3 additions & 1 deletion lib/NeuraDialect/Transforms/CanonicalizeReturnPass.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#include "Common/AcceleratorAttrs.h"
#include "NeuraDialect/NeuraDialect.h"
#include "NeuraDialect/NeuraOps.h"
#include "NeuraDialect/NeuraPasses.h"
Expand Down Expand Up @@ -191,7 +192,8 @@ struct CanonicalizeReturnPass
void runOnOperation() override {
func::FuncOp func_op = getOperation();
// Checks for neura accelerator attribute.
auto accel_attr = func_op->getAttrOfType<StringAttr>("accelerator");
auto accel_attr =
func_op->getAttrOfType<StringAttr>(accel::kAcceleratorAttr);
if (!accel_attr) {
return;
}
Expand Down
28 changes: 15 additions & 13 deletions lib/NeuraDialect/Transforms/GenerateCodePass.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#include "Common/AcceleratorAttrs.h"
#include "mlir/Pass/Pass.h"
#include "mlir/IR/BuiltinOps.h"
#include "mlir/IR/Operation.h"
Expand All @@ -23,6 +24,7 @@

#include "NeuraDialect/Architecture/Architecture.h"
#include "NeuraDialect/NeuraOps.h"
#include "NeuraDialect/NeuraAttributes.h"

using namespace mlir;
using namespace neura;
Expand Down Expand Up @@ -204,21 +206,21 @@ static std::string extractConstantLiteralFromAttr(Attribute attr) {
// Literals for CONSTANT operations, e.g. "#10" / "#0" / "#3.0".
static std::string getConstantLiteral(Operation *op) {
if (isConstant(op)) {
if (auto value_attr = op->getAttr("value")) {
if (auto value_attr = op->getAttr(attr::kValue)) {
std::string result = extractConstantLiteralFromAttr(value_attr);
if (!result.empty()) return result;
}
return "#0";
}

// Checks for constant_value attribute in non-CONSTANT operations.
if (auto constant_value_attr = op->getAttr("constant_value")) {
if (auto constant_value_attr = op->getAttr(attr::kConstantValue)) {
std::string result = extractConstantLiteralFromAttr(constant_value_attr);
if (!result.empty()) return result;
}

// Checks for rhs_value attribute (for binary operations with constant RHS).
if (auto rhs_value_attr = op->getAttr("rhs_value")) {
if (auto rhs_value_attr = op->getAttr(attr::kRhsValue)) {
std::string result = extractConstantLiteralFromAttr(rhs_value_attr);
if (!result.empty()) return result;
}
Expand Down Expand Up @@ -410,16 +412,16 @@ struct GenerateCodePass

std::pair<int, int> getArrayDimensions(func::FuncOp function) {
int columns = 4, rows = 4; // default 4x4 CGRA.
if (auto mapping_info = function->getAttrOfType<DictionaryAttr>("mapping_info")) {
if (auto x_tiles = dyn_cast_or_null<IntegerAttr>(mapping_info.get("x_tiles"))) columns = x_tiles.getInt();
if (auto y_tiles = dyn_cast_or_null<IntegerAttr>(mapping_info.get("y_tiles"))) rows = y_tiles.getInt();
if (auto mapping_info = function->getAttrOfType<DictionaryAttr>(attr::kMappingInfo)) {
if (auto x_tiles = dyn_cast_or_null<IntegerAttr>(mapping_info.get(attr::kXTiles))) columns = x_tiles.getInt();
if (auto y_tiles = dyn_cast_or_null<IntegerAttr>(mapping_info.get(attr::kYTiles))) rows = y_tiles.getInt();
}
return {columns, rows};
}

int getCompiledII(func::FuncOp function) {
if (auto mapping_info = function->getAttrOfType<DictionaryAttr>("mapping_info")) {
if (auto compiled_ii = dyn_cast_or_null<IntegerAttr>(mapping_info.get("compiled_ii"))) {
if (auto mapping_info = function->getAttrOfType<DictionaryAttr>(attr::kMappingInfo)) {
if (auto compiled_ii = dyn_cast_or_null<IntegerAttr>(mapping_info.get(attr::kCompiledII))) {
return compiled_ii.getInt();
}
}
Expand Down Expand Up @@ -510,7 +512,7 @@ struct GenerateCodePass

if (isConstant(op)) {
inst.src_operands.emplace_back(getConstantLiteral(op), "RED");
} else if (op->getAttr("constant_value")) {
} else if (op->getAttr(attr::kConstantValue)) {
// Checks if operation has constant_value attribute (for non-CONSTANT operations).
inst.src_operands.emplace_back(getConstantLiteral(op), "RED");
} else {
Expand All @@ -524,7 +526,7 @@ struct GenerateCodePass
}

// Handles cases where binary operations have the RHS constant stored as an attribute.
if (auto rhs_value_attr = op->getAttr("rhs_value")) {
if (auto rhs_value_attr = op->getAttr(attr::kRhsValue)) {
std::string rhs_literal = extractConstantLiteralFromAttr(rhs_value_attr);
if (!rhs_literal.empty()) {
inst.src_operands.emplace_back(rhs_literal, "RED");
Expand Down Expand Up @@ -933,7 +935,7 @@ struct GenerateCodePass

// Helper to extract dfg_id from operation.
static int getDfgId(Operation *op) {
if (auto id_attr = op->getAttrOfType<IntegerAttr>("dfg_id")) {
if (auto id_attr = op->getAttrOfType<IntegerAttr>(attr::kDfgId)) {
return id_attr.getInt();
}
return -1;
Expand Down Expand Up @@ -1664,8 +1666,8 @@ struct GenerateCodePass
ModuleOp module = getOperation();

for (auto func : module.getOps<func::FuncOp>()) {
auto accel = func->getAttrOfType<StringAttr>("accelerator");
if (!accel || accel.getValue() != "neura") continue;
auto accel = func->getAttrOfType<StringAttr>(accel::kAcceleratorAttr);
if (!accel || accel.getValue() != accel::kNeuraTarget) continue;

auto [columns, rows] = getArrayDimensions(func);
Topology topo = getTopologyFromArchitecture(columns, rows);
Expand Down
6 changes: 4 additions & 2 deletions lib/NeuraDialect/Transforms/GraphMining/GraMi.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#include "Common/AcceleratorAttrs.h"
#include "NeuraDialect/NeuraAttributes.h"
#include "NeuraDialect/Transforms/GraphMining/GraMi.h"
#include "NeuraDialect/Mapping/mapping_util.h"
#include "mlir/Dialect/Func/IR/FuncOps.h"
Expand Down Expand Up @@ -204,13 +206,13 @@ std::vector<PatternWithSelectedInstances> GraMi::mineFrequentSubgraphs() {
auto derive_label = [](mlir::Operation* op, const std::string& fallback_label) -> std::string {
if (!op) return fallback_label;
auto name = op->getName().getStringRef();
if (name.ends_with("fused_op") || name.contains("neura.fused_op")) {
if (name.ends_with(attr::val::kOpFused) || name.contains(attr::val::kNeuraFusedOp)) {
if (auto attr = op->getAttr("pattern_name")) {
if (auto str_attr = mlir::dyn_cast<mlir::StringAttr>(attr)) {
return std::string("fused_op:") + str_attr.getValue().str();
}
}
return std::string("fused_op");
return std::string(attr::val::kOpFused);
}
return fallback_label;
};
Expand Down
3 changes: 2 additions & 1 deletion lib/NeuraDialect/Transforms/InsertCtrlMovPass.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#include "Common/AcceleratorAttrs.h"
#include "NeuraDialect/NeuraDialect.h"
#include "NeuraDialect/NeuraOps.h"
#include "NeuraDialect/NeuraPasses.h"
Expand All @@ -20,7 +21,7 @@ struct InsertCtrlMovForNeuraOps : public RewritePattern {

LogicalResult matchAndRewrite(Operation *op,
PatternRewriter &rewriter) const override {
if (op->getDialect()->getNamespace() != "neura" ||
if (op->getDialect()->getNamespace() != accel::kNeuraTarget ||
isa<neura::CtrlMovOp>(op)) {
return failure();
}
Expand Down
3 changes: 2 additions & 1 deletion lib/NeuraDialect/Transforms/InsertDataMovPass.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#include "Common/AcceleratorAttrs.h"
#include "NeuraDialect/NeuraDialect.h"
#include "NeuraDialect/NeuraOps.h"
#include "NeuraDialect/NeuraPasses.h"
Expand All @@ -22,7 +23,7 @@ struct InsertDataMovForNeuraOps : public RewritePattern {

LogicalResult matchAndRewrite(Operation *op,
PatternRewriter &rewriter) const override {
if (op->getDialect()->getNamespace() != "neura" ||
if (op->getDialect()->getNamespace() != accel::kNeuraTarget ||
isa<neura::DataMovOp>(op)) {
return failure();
}
Expand Down
8 changes: 5 additions & 3 deletions lib/NeuraDialect/Transforms/LeveragePredicatedValuePass.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#include "Common/AcceleratorAttrs.h"
#include "NeuraDialect/NeuraDialect.h"
#include "NeuraDialect/NeuraOps.h"
#include "NeuraDialect/NeuraPasses.h"
Expand Down Expand Up @@ -33,8 +34,9 @@ struct LeveragePredicatedValuePass

// Processes each function.
module.walk([&](FunctionOpInterface func) {
auto accel_attr = func->getAttrOfType<StringAttr>("accelerator");
if (!accel_attr || accel_attr.getValue() != "neura") {
auto accel_attr =
func->getAttrOfType<StringAttr>(accel::kAcceleratorAttr);
if (!accel_attr || accel_attr.getValue() != accel::kNeuraTarget) {
return;
}
// Converts block argument types to predicated values.
Expand Down Expand Up @@ -107,7 +109,7 @@ struct LeveragePredicatedValuePass
// Converts a single operation to use predicated values.
LogicalResult applyPredicatedDataType(Operation *op) {
// Skips if not a Neura op.
if (op->getDialect()->getNamespace() != "neura") {
if (op->getDialect()->getNamespace() != accel::kNeuraTarget) {
return success();
}

Expand Down
Loading