Skip to content

Commit

Permalink
Bump to LLVM 17
Browse files Browse the repository at this point in the history
  • Loading branch information
xerpi committed Mar 29, 2023
1 parent 1f24e21 commit 1bc3d40
Show file tree
Hide file tree
Showing 13 changed files with 100 additions and 42 deletions.
9 changes: 9 additions & 0 deletions include/scalehls/Dialect/HLS/Utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -424,6 +424,15 @@ using ReverseOpIteratorsMap =
using OpIteratorsMap =
DenseMap<PtrLikeMemRefAccess, SmallVector<Operation **, 16>>;

//===----------------------------------------------------------------------===//
// Printing
//===----------------------------------------------------------------------===//

/// Prints dimension and symbol list.
void printDimAndSymbolList(Operation::operand_iterator begin,
Operation::operand_iterator end, unsigned numDims,
OpAsmPrinter &printer);

} // namespace scalehls
} // namespace mlir

Expand Down
4 changes: 4 additions & 0 deletions lib/Dialect/HLS/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,8 @@ add_mlir_dialect_library(MLIRHLS
MLIRHLSEnumsIncGen
MLIRHLSAttributesIncGen
MLIRHLSInterfacesIncGen

LINK_LIBS PUBLIC
MLIRAffineAnalysis
MLIRAnalysis
)
23 changes: 14 additions & 9 deletions lib/Dialect/HLS/HLS.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,15 @@ struct SimplifyDispatchOrTaskOutputs : public OpRewritePattern<OpType> {

// Identify output values that are used.
SmallVector<Value, 4> usedOutputs;
SmallVector<Type, 4> usedOutputTypes;
SmallVector<Value, 4> usedResults;
for (auto result : op.getResults())
if (result.use_empty()) {
hasUnusedPort = true;
} else {
usedOutputs.push_back(yield.getOperand(result.getResultNumber()));
auto out = yield.getOperand(result.getResultNumber());
usedOutputs.push_back(out);
usedOutputTypes.push_back(out.getType());
usedResults.push_back(result);
}

Expand All @@ -68,8 +71,8 @@ struct SimplifyDispatchOrTaskOutputs : public OpRewritePattern<OpType> {
rewriter.replaceOpWithNewOp<YieldOp>(yield, usedOutputs);

rewriter.setInsertionPoint(op);
auto newTask =
rewriter.create<OpType>(op.getLoc(), ValueRange(usedOutputs));
auto newTask = rewriter.create<OpType>(
op.getLoc(), TypeRange(usedOutputTypes), ValueRange(usedOutputs));
rewriter.inlineRegionBefore(op.getBody(), newTask.getBody(),
newTask.getBody().end());
for (auto t : llvm::zip(usedResults, newTask.getResults()))
Expand Down Expand Up @@ -186,7 +189,7 @@ LogicalResult ToStreamOp::verify() {
return success();
}

OpFoldResult ToStreamOp::fold(ArrayRef<Attribute>) {
OpFoldResult ToStreamOp::fold(FoldAdaptor adaptor) {
if (auto toValue = getValue().getDefiningOp<ToValueOp>())
if (toValue.getStream().getType() == getType())
return toValue.getStream();
Expand All @@ -200,7 +203,7 @@ LogicalResult ToValueOp::verify() {
return success();
}

OpFoldResult ToValueOp::fold(ArrayRef<Attribute>) {
OpFoldResult ToValueOp::fold(FoldAdaptor adaptor) {
if (auto toStream = getStream().getDefiningOp<ToStreamOp>())
if (toStream.getValue().getType() == getType())
return toStream.getValue();
Expand Down Expand Up @@ -831,7 +834,7 @@ LogicalResult BufferDevectorizeOp::verify() {
getInputType());
}

OpFoldResult BufferVectorizeOp::fold(ArrayRef<Attribute>) {
OpFoldResult BufferVectorizeOp::fold(FoldAdaptor adaptor) {
if (auto devectorize = getInput().getDefiningOp<BufferDevectorizeOp>())
if (devectorize.getInputType() == getType())
return devectorize.getInput();
Expand Down Expand Up @@ -1017,10 +1020,12 @@ void AffineSelectOp::getCanonicalizationPatterns(RewritePatternSet &results,
}

/// Canonicalize an affine if op's conditional (integer set + operands).
OpFoldResult AffineSelectOp::fold(ArrayRef<Attribute>) {
OpFoldResult AffineSelectOp::fold(FoldAdaptor adaptor) {
auto set = getIntegerSet();
SmallVector<Value, 4> operands(getArgs());
composeSetAndOperands(set, operands);
auto map = AffineMap::get(set.getNumDims(), set.getNumSymbols(),
set.getConstraints(), set.getContext());
fullyComposeAffineMapAndOperands(&map, &operands);
canonicalizeSetAndOperands(&set, &operands);
return {};
}
Expand Down Expand Up @@ -1206,7 +1211,7 @@ PartitionLayoutAttr::verify(function_ref<InFlightDiagnostic()> emitError,
/// given array shape.
SmallVector<int64_t>
PartitionLayoutAttr::getActualFactors(ArrayRef<int64_t> shape) {
SmallVector<int64_t, 4> actualFactors;
SmallVector<int64_t, 6> actualFactors;
for (auto [size, kind, factor] : llvm::zip(shape, getKinds(), getFactors())) {
if (kind == PartitionKind::BLOCK)
actualFactors.push_back((size + factor - 1) / factor);
Expand Down
54 changes: 40 additions & 14 deletions lib/Dialect/HLS/Utils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,9 @@ DispatchOp scalehls::dispatchBlock(Block *block) {

OpBuilder builder(block, block->begin());
ValueRange returnValues(block->getTerminator()->getOperands());
TypeRange returnTypes(block->getTerminator()->getOperandTypes());
auto loc = builder.getUnknownLoc();
auto dispatch = builder.create<DispatchOp>(loc, returnValues);
auto dispatch = builder.create<DispatchOp>(loc, returnTypes);

auto &dispatchBlock = dispatch.getBody().emplaceBlock();
builder.setInsertionPointToEnd(&dispatchBlock);
Expand All @@ -112,20 +113,24 @@ TaskOp scalehls::fuseOpsIntoTask(ArrayRef<Operation *> ops,
// Collect output values. This is not sufficient and may lead to empty-used
// outputs, which will be removed during canonicalization.
llvm::SetVector<Value> outputValues;
for (auto op : ops)
for (auto result : op->getResults())
llvm::SetVector<Type> outputTypes;
for (auto op : ops) {
for (auto result : op->getResults()) {
if (llvm::any_of(result.getUsers(),
[&](Operation *user) { return !opsSet.count(user); }))
[&](Operation *user) { return !opsSet.count(user); })) {
outputValues.insert(result);
outputTypes.insert(result.getType());
}
}
}

// Create new graph task with all inputs and outputs.
auto loc = rewriter.getUnknownLoc();
if (!insertToLastOp)
rewriter.setInsertionPoint(ops.front());
else
rewriter.setInsertionPoint(ops.back());
auto task =
rewriter.create<TaskOp>(loc, ValueRange(outputValues.getArrayRef()));
auto task = rewriter.create<TaskOp>(loc, outputTypes.getArrayRef());
auto taskBlock = rewriter.createBlock(&task.getBody());

// Move each targeted op into the new graph task.
Expand Down Expand Up @@ -159,26 +164,36 @@ NodeOp scalehls::fuseNodeOps(ArrayRef<NodeOp> nodes,

// Collect inputs, outputs, and params of the new node.
llvm::SetVector<Value> inputs;
llvm::SmallVector<Type> inputTypes;
llvm::SmallVector<unsigned, 8> inputTaps;
llvm::SmallVector<Location, 8> inputLocs;
llvm::SetVector<Value> outputs;
llvm::SmallVector<Type> outputTypes;
llvm::SmallVector<Location, 8> outputLocs;
llvm::SetVector<Value> params;
llvm::SmallVector<Type> paramTypes;
llvm::SmallVector<Location, 8> paramLocs;

for (auto node : nodes) {
for (auto output : node.getOutputs())
if (outputs.insert(output))
for (auto output : node.getOutputs()) {
if (outputs.insert(output)) {
outputTypes.push_back(output.getType());
outputLocs.push_back(output.getLoc());
for (auto param : node.getParams())
if (params.insert(param))
}
}
for (auto param : node.getParams()) {
if (params.insert(param)) {
paramTypes.push_back(param.getType());
paramLocs.push_back(param.getLoc());
}
}
}
for (auto node : nodes)
for (auto input : llvm::enumerate(node.getInputs())) {
if (outputs.count(input.value()))
continue;
if (inputs.insert(input.value())) {
inputTypes.push_back(input.value().getType());
inputLocs.push_back(input.value().getLoc());
inputTaps.push_back(node.getInputTap(input.index()));
}
Expand All @@ -190,9 +205,9 @@ NodeOp scalehls::fuseNodeOps(ArrayRef<NodeOp> nodes,
rewriter.getUnknownLoc(), inputs.getArrayRef(), outputs.getArrayRef(),
params.getArrayRef(), inputTaps);
auto block = rewriter.createBlock(&newNode.getBody());
block->addArguments(ValueRange(inputs.getArrayRef()), inputLocs);
block->addArguments(ValueRange(outputs.getArrayRef()), outputLocs);
block->addArguments(ValueRange(params.getArrayRef()), paramLocs);
block->addArguments(inputTypes, inputLocs);
block->addArguments(outputTypes, outputLocs);
block->addArguments(paramTypes, paramLocs);

// Inline all nodes into the new node.
for (auto node : nodes) {
Expand Down Expand Up @@ -644,7 +659,9 @@ std::pair<bool, bool> scalehls::ifAlwaysTrueOrFalse(mlir::AffineIfOp ifOp) {
while (llvm::any_of(operands, [](Value v) {
return isa_and_nonnull<AffineApplyOp>(v.getDefiningOp());
})) {
composeSetAndOperands(set, operands);
auto map = AffineMap::get(set.getNumDims(), set.getNumSymbols(),
set.getConstraints(), set.getContext());
fullyComposeAffineMapAndOperands(&map, &operands);
}

// Replace the original integer set and operands with the composed integer
Expand Down Expand Up @@ -1192,3 +1209,12 @@ bool PtrLikeMemRefAccess::operator==(const PtrLikeMemRefAccess &rhs) const {
return llvm::all_of(diff.getAffineMap().getResults(),
[](AffineExpr e) { return e == 0; });
}

void scalehls::printDimAndSymbolList(Operation::operand_iterator begin,
Operation::operand_iterator end,
unsigned numDims, OpAsmPrinter &printer) {
OperandRange operands(begin, end);
printer << '(' << operands.take_front(numDims) << ')';
if (operands.size() > numDims)
printer << '[' << operands.drop_front(numDims) << ']';
}
4 changes: 3 additions & 1 deletion lib/Transforms/Dataflow/EliminateMultiConsumer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ struct InsertForkNode : public OpRewritePattern<NodeOp> {
hasChanged = true;
rewriter.setInsertionPointAfter(node);
SmallVector<Value> buffers;
SmallVector<Type> bufferTypes;
SmallVector<Location> bufferLocs;

// Insert a buffer for each consumer.
Expand All @@ -42,14 +43,15 @@ struct InsertForkNode : public OpRewritePattern<NodeOp> {
output.replaceUsesWithIf(
buffer, [&](OpOperand &use) { return use.getOwner() == consumer; });
buffers.push_back(buffer);
bufferTypes.push_back(buffer.getType());
bufferLocs.push_back(loc);
}

// Create a new fork node.
auto fork = rewriter.create<NodeOp>(loc, output, buffers);
auto block = rewriter.createBlock(&fork.getBody());
auto outputArg = block->addArgument(output.getType(), output.getLoc());
auto bufferArgs = block->addArguments(ValueRange(buffers), bufferLocs);
auto bufferArgs = block->addArguments(bufferTypes, bufferLocs);

// Create explicit copy from the original output to the buffers.
rewriter.setInsertionPointToStart(block);
Expand Down
2 changes: 1 addition & 1 deletion lib/Transforms/Dataflow/LegalizeDataflow.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ struct LegalizeDataflow : public LegalizeDataflowBase<LegalizeDataflow> {
auto frozenPatterns = FrozenRewritePatternSet(std::move(patterns));

func.walk([&](ScheduleOp schedule) {
(void)applyOpPatternsAndFold(schedule, frozenPatterns);
(void)applyOpPatternsAndFold(schedule.getOperation(), frozenPatterns);

if (llvm::all_of(schedule.getOps<NodeOp>(),
[](NodeOp node) { return node.getLevel(); }))
Expand Down
17 changes: 13 additions & 4 deletions lib/Transforms/Dataflow/LowerDataflow.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,15 @@ struct LowerDispatchToSchedule : public OpRewritePattern<DispatchOp> {
};

SmallVector<Value, 8> inputs;
SmallVector<Type, 8> inputTypes;
SmallVector<Location, 8> inputLocs;

auto liveins = Liveness(dispatch).getLiveIn(&dispatch.getBody().front());
for (auto livein : liveins) {
if (dispatch.getBody().isAncestor(livein.getParentRegion()))
continue;
inputs.push_back(livein);
inputTypes.push_back(livein.getType());
inputLocs.push_back(livein.getLoc());
}

Expand All @@ -43,7 +45,8 @@ struct LowerDispatchToSchedule : public OpRewritePattern<DispatchOp> {
rewriter.create<ScheduleOp>(rewriter.getUnknownLoc(), inputs);
auto scheduleBlock = rewriter.createBlock(&schedule.getBody());

auto inputArgs = scheduleBlock->addArguments(ValueRange(inputs), inputLocs);
auto inputArgs =
scheduleBlock->addArguments(TypeRange(inputTypes), inputLocs);
for (auto t : llvm::zip(inputs, inputArgs))
std::get<0>(t).replaceUsesWithIf(std::get<1>(t), isInDispatch);

Expand Down Expand Up @@ -72,10 +75,13 @@ struct LowerTaskToNode : public OpRewritePattern<TaskOp> {
};

SmallVector<Value, 8> inputs;
SmallVector<Type, 8> inputTypes;
SmallVector<Location, 8> inputLocs;
SmallVector<Value, 8> outputs;
SmallVector<Type, 8> outputTypes;
SmallVector<Location, 8> outputLocs;
SmallVector<Value, 8> params;
SmallVector<Type, 8> paramTypes;
SmallVector<Location, 8> paramLocs;

auto liveins = Liveness(task).getLiveIn(&task.getBody().front());
Expand All @@ -87,13 +93,16 @@ struct LowerTaskToNode : public OpRewritePattern<TaskOp> {
auto uses = llvm::make_filter_range(livein.getUses(), isInTask);
if (llvm::any_of(uses, [](OpOperand &use) { return isWritten(use); })) {
outputs.push_back(livein);
outputTypes.push_back(livein.getType());
outputLocs.push_back(livein.getLoc());
} else {
inputs.push_back(livein);
inputTypes.push_back(livein.getType());
inputLocs.push_back(livein.getLoc());
}
} else {
params.push_back(livein);
paramTypes.push_back(livein.getType());
paramLocs.push_back(livein.getLoc());
}
}
Expand All @@ -103,16 +112,16 @@ struct LowerTaskToNode : public OpRewritePattern<TaskOp> {
outputs, params);
auto nodeBlock = rewriter.createBlock(&node.getBody());

auto inputArgs = nodeBlock->addArguments(ValueRange(inputs), inputLocs);
auto inputArgs = nodeBlock->addArguments(TypeRange(inputTypes), inputLocs);
for (auto t : llvm::zip(inputs, inputArgs))
std::get<0>(t).replaceUsesWithIf(std::get<1>(t), isInTask);

auto outputArgs =
node.getBody().addArguments(ValueRange(outputs), outputLocs);
node.getBody().addArguments(TypeRange(outputTypes), outputLocs);
for (auto t : llvm::zip(outputs, outputArgs))
std::get<0>(t).replaceUsesWithIf(std::get<1>(t), isInTask);

auto paramArgs = nodeBlock->addArguments(ValueRange(params), paramLocs);
auto paramArgs = nodeBlock->addArguments(TypeRange(paramTypes), paramLocs);
for (auto t : llvm::zip(params, paramArgs))
std::get<0>(t).replaceUsesWithIf(std::get<1>(t), isInTask);

Expand Down
6 changes: 1 addition & 5 deletions lib/Transforms/Dataflow/PlaceDataflowBuffer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -111,13 +111,9 @@ struct PlaceDataflowBuffer
auto func = getOperation();
auto context = func.getContext();

llvm::outs() << "1\n";

mlir::RewritePatternSet patterns(context);
patterns.add<PlaceBuffer>(context, threshold, placeExternalBuffer);
(void)applyOpPatternsAndFold(func, std::move(patterns));

llvm::outs() << "2\n";
(void)applyOpPatternsAndFold(func.getOperation(), std::move(patterns));

patterns.clear();
patterns.add<HoistDramBuffer>(context);
Expand Down
2 changes: 1 addition & 1 deletion lib/Transforms/Directive/CreateAxiInterface.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ struct CreateAxiInterface : public CreateAxiInterfaceBase<CreateAxiInterface> {
auto vectorize = cast<BufferVectorizeOp>(*buffer.user_begin());
vectorize->remove();
builder.insert(vectorize);
return vectorize.getResult();
return cast<Value>(vectorize.getResult());
}
return buffer;
};
Expand Down
9 changes: 7 additions & 2 deletions lib/Transforms/FuncDuplication.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ struct SubViewSinkPattern : public OpRewritePattern<func::CallOp> {
assert(func && "function definition not found");

SmallVector<Value, 16> newInputs;
SmallVector<Type, 16> newInputTypes;
bool hasChanged = false;
for (auto operand : call->getOperands()) {
if (auto subview = operand.getDefiningOp<memref::SubViewOp>()) {
Expand All @@ -46,13 +47,17 @@ struct SubViewSinkPattern : public OpRewritePattern<func::CallOp> {
}

newInputs.append(subview.operand_begin(), subview.operand_end());
newInputTypes.append(subview.getOperandTypes().begin(),
subview.getOperandTypes().end());
hasChanged = true;
} else
} else {
newInputs.push_back(operand);
newInputTypes.push_back(operand.getType());
}
}

if (hasChanged) {
func.setType(rewriter.getFunctionType(ValueRange(newInputs),
func.setType(rewriter.getFunctionType(TypeRange(newInputTypes),
func.getResultTypes()));
rewriter.setInsertionPoint(call);
rewriter.replaceOpWithNewOp<func::CallOp>(call, func, newInputs);
Expand Down
Loading

0 comments on commit 1bc3d40

Please sign in to comment.