mirror of
https://github.com/ldc-developers/ldc.git
synced 2025-05-05 17:43:35 +03:00
irfunction/ScopeStack: Unify style, add comments (NFC)
This commit is contained in:
parent
6215acc15a
commit
d25dee7f89
2 changed files with 134 additions and 78 deletions
|
@ -17,8 +17,8 @@
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
void executeCleanup(IRState *irs, CleanupScope& scope,
|
void executeCleanup(IRState* irs, CleanupScope& scope,
|
||||||
llvm::BasicBlock *sourceBlock, llvm::BasicBlock* continueWith
|
llvm::BasicBlock* sourceBlock, llvm::BasicBlock* continueWith
|
||||||
) {
|
) {
|
||||||
if (scope.exitTargets.empty() || (
|
if (scope.exitTargets.empty() || (
|
||||||
scope.exitTargets.size() == 1 &&
|
scope.exitTargets.size() == 1 &&
|
||||||
|
@ -59,7 +59,7 @@ void executeCleanup(IRState *irs, CleanupScope& scope,
|
||||||
// And convert the BranchInst to the existing branch target to a
|
// And convert the BranchInst to the existing branch target to a
|
||||||
// SelectInst so we can append the other cases to it.
|
// SelectInst so we can append the other cases to it.
|
||||||
scope.endBlock->getTerminator()->eraseFromParent();
|
scope.endBlock->getTerminator()->eraseFromParent();
|
||||||
llvm::Value *sel = new llvm::LoadInst(scope.branchSelector, "",
|
llvm::Value* sel = new llvm::LoadInst(scope.branchSelector, "",
|
||||||
scope.endBlock);
|
scope.endBlock);
|
||||||
llvm::SwitchInst::Create(
|
llvm::SwitchInst::Create(
|
||||||
sel,
|
sel,
|
||||||
|
@ -90,7 +90,7 @@ void executeCleanup(IRState *irs, CleanupScope& scope,
|
||||||
}
|
}
|
||||||
|
|
||||||
// We don't know this branch target yet, so add it to the SwitchInst...
|
// We don't know this branch target yet, so add it to the SwitchInst...
|
||||||
llvm::ConstantInt * const selectorVal = DtoConstUint(scope.exitTargets.size());
|
llvm::ConstantInt* const selectorVal = DtoConstUint(scope.exitTargets.size());
|
||||||
llvm::cast<llvm::SwitchInst>(scope.endBlock->getTerminator())->addCase(
|
llvm::cast<llvm::SwitchInst>(scope.endBlock->getTerminator())->addCase(
|
||||||
selectorVal, continueWith);
|
selectorVal, continueWith);
|
||||||
|
|
||||||
|
@ -106,6 +106,9 @@ void executeCleanup(IRState *irs, CleanupScope& scope,
|
||||||
}
|
}
|
||||||
|
|
||||||
ScopeStack::~ScopeStack() {
|
ScopeStack::~ScopeStack() {
|
||||||
|
// If there are still unresolved gotos left, it means that they were either
|
||||||
|
// down or "sideways" (i.e. down another branch) of the tree of all
|
||||||
|
// cleanup scopes, both of which are not allowed in D.
|
||||||
if (!topLevelUnresolvedGotos.empty()) {
|
if (!topLevelUnresolvedGotos.empty()) {
|
||||||
for (std::vector<GotoJump>::iterator it = topLevelUnresolvedGotos.begin(),
|
for (std::vector<GotoJump>::iterator it = topLevelUnresolvedGotos.begin(),
|
||||||
end = topLevelUnresolvedGotos.end();
|
end = topLevelUnresolvedGotos.end();
|
||||||
|
@ -130,7 +133,7 @@ void ScopeStack::runCleanups(
|
||||||
|
|
||||||
if (targetScope == sourceScope) {
|
if (targetScope == sourceScope) {
|
||||||
// No cleanups to run, just branch to the next block.
|
// No cleanups to run, just branch to the next block.
|
||||||
llvm::BranchInst::Create(continueWith, irs->scopebb());
|
irs->ir->CreateBr(continueWith);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -140,7 +143,7 @@ void ScopeStack::runCleanups(
|
||||||
// Update all the control flow in the cleanups to make sure we end up where
|
// Update all the control flow in the cleanups to make sure we end up where
|
||||||
// we want.
|
// we want.
|
||||||
for (CleanupCursor i = sourceScope; i-- > targetScope;) {
|
for (CleanupCursor i = sourceScope; i-- > targetScope;) {
|
||||||
llvm::BasicBlock *nextBlock = (i > targetScope) ?
|
llvm::BasicBlock* nextBlock = (i > targetScope) ?
|
||||||
cleanupScopes[i - 1].beginBlock : continueWith;
|
cleanupScopes[i - 1].beginBlock : continueWith;
|
||||||
executeCleanup(irs, cleanupScopes[i], irs->scopebb(), nextBlock);
|
executeCleanup(irs, cleanupScopes[i], irs->scopebb(), nextBlock);
|
||||||
}
|
}
|
||||||
|
@ -151,15 +154,18 @@ void ScopeStack::runAllCleanups(llvm::BasicBlock* continueWith) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void ScopeStack::popCleanups(CleanupCursor targetScope) {
|
void ScopeStack::popCleanups(CleanupCursor targetScope) {
|
||||||
|
assert(targetScope <= currentCleanupScope());
|
||||||
if (targetScope == currentCleanupScope()) return;
|
if (targetScope == currentCleanupScope()) return;
|
||||||
|
|
||||||
for (CleanupCursor i = currentCleanupScope(); i-- > targetScope;) {
|
for (CleanupCursor i = currentCleanupScope(); i-- > targetScope;) {
|
||||||
|
// Any gotos that are still unresolved necessarily leave this scope.
|
||||||
|
// Thus, the cleanup needs to be executed.
|
||||||
for (std::vector<GotoJump>::iterator it = currentUnresolvedGotos().begin(),
|
for (std::vector<GotoJump>::iterator it = currentUnresolvedGotos().begin(),
|
||||||
end = currentUnresolvedGotos().end();
|
end = currentUnresolvedGotos().end();
|
||||||
it != end; ++it
|
it != end; ++it
|
||||||
) {
|
) {
|
||||||
// Make the source resp. last cleanup branch to this one.
|
// Make the source resp. last cleanup branch to this one.
|
||||||
llvm::BasicBlock *tentative = it->tentativeTarget;
|
llvm::BasicBlock* tentative = it->tentativeTarget;
|
||||||
tentative->replaceAllUsesWith(cleanupScopes[i].beginBlock);
|
tentative->replaceAllUsesWith(cleanupScopes[i].beginBlock);
|
||||||
|
|
||||||
// And continue execution with the tentative target (we simply reuse
|
// And continue execution with the tentative target (we simply reuse
|
||||||
|
@ -219,6 +225,8 @@ void ScopeStack::addLabelTarget(Identifier* labelName,
|
||||||
) {
|
) {
|
||||||
labelTargets[labelName] = {targetBlock, currentCleanupScope(), 0};
|
labelTargets[labelName] = {targetBlock, currentCleanupScope(), 0};
|
||||||
|
|
||||||
|
// See whether any of the unresolved gotos target this label, and resolve
|
||||||
|
// those that do.
|
||||||
std::vector<GotoJump>& unresolved = currentUnresolvedGotos();
|
std::vector<GotoJump>& unresolved = currentUnresolvedGotos();
|
||||||
size_t i = 0;
|
size_t i = 0;
|
||||||
while (i < unresolved.size()) {
|
while (i < unresolved.size()) {
|
||||||
|
@ -242,7 +250,7 @@ void ScopeStack::jumpToLabel(Loc loc, Identifier* labelName) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
llvm::BasicBlock *target =
|
llvm::BasicBlock* target =
|
||||||
llvm::BasicBlock::Create(irs->context(), "goto.unresolved", irs->topfunc());
|
llvm::BasicBlock::Create(irs->context(), "goto.unresolved", irs->topfunc());
|
||||||
irs->ir->CreateBr(target);
|
irs->ir->CreateBr(target);
|
||||||
currentUnresolvedGotos().push_back({loc, irs->scopebb(), target, labelName});
|
currentUnresolvedGotos().push_back({loc, irs->scopebb(), target, labelName});
|
||||||
|
@ -283,7 +291,7 @@ std::vector<llvm::BasicBlock*>& ScopeStack::currentLandingPads() {
|
||||||
}
|
}
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
llvm::LandingPadInst* createLandingPadInst(IRState *irs) {
|
llvm::LandingPadInst* createLandingPadInst(IRState* irs) {
|
||||||
LLType* retType = LLStructType::get(LLType::getInt8PtrTy(irs->context()),
|
LLType* retType = LLStructType::get(LLType::getInt8PtrTy(irs->context()),
|
||||||
LLType::getInt32Ty(irs->context()),
|
LLType::getInt32Ty(irs->context()),
|
||||||
NULL);
|
NULL);
|
||||||
|
@ -305,23 +313,20 @@ llvm::BasicBlock* ScopeStack::emitLandingPad() {
|
||||||
// save and rewrite scope
|
// save and rewrite scope
|
||||||
IRScope savedIRScope = irs->scope();
|
IRScope savedIRScope = irs->scope();
|
||||||
|
|
||||||
llvm::BasicBlock *beginBB = llvm::BasicBlock::Create(irs->context(),
|
llvm::BasicBlock* beginBB = llvm::BasicBlock::Create(irs->context(),
|
||||||
"landingPad", irs->topfunc());
|
"landingPad", irs->topfunc());
|
||||||
irs->scope() = IRScope(beginBB);
|
irs->scope() = IRScope(beginBB);
|
||||||
|
|
||||||
llvm::LandingPadInst *landingPad = createLandingPadInst(irs);
|
llvm::LandingPadInst* landingPad = createLandingPadInst(irs);
|
||||||
|
|
||||||
// Stash away the exception object pointer and selector value into their
|
// Stash away the exception object pointer and selector value into their
|
||||||
// stack slots.
|
// stack slots.
|
||||||
llvm::Value* ehPtr = DtoExtractValue(landingPad, 0);
|
llvm::Value* ehPtr = DtoExtractValue(landingPad, 0);
|
||||||
if (!irs->func()->resumeUnwindBlock) {
|
if (!irs->func()->resumeUnwindBlock) {
|
||||||
irs->func()->resumeUnwindBlock = llvm::BasicBlock::Create(
|
irs->func()->resumeUnwindBlock = llvm::BasicBlock::Create(
|
||||||
irs->context(),
|
irs->context(), "unwind.resume", irs->topfunc());
|
||||||
"unwind.resume",
|
|
||||||
irs->topfunc()
|
|
||||||
);
|
|
||||||
|
|
||||||
llvm::BasicBlock *oldBB = irs->scopebb();
|
llvm::BasicBlock* oldBB = irs->scopebb();
|
||||||
irs->scope() = IRScope(irs->func()->resumeUnwindBlock);
|
irs->scope() = IRScope(irs->func()->resumeUnwindBlock);
|
||||||
|
|
||||||
llvm::Function* resumeFn = LLVM_D_GetRuntimeFunction(Loc(),
|
llvm::Function* resumeFn = LLVM_D_GetRuntimeFunction(Loc(),
|
||||||
|
@ -364,14 +369,14 @@ llvm::BasicBlock* ScopeStack::emitLandingPad() {
|
||||||
// emitted to the EH tables.
|
// emitted to the EH tables.
|
||||||
landingPad->addClause(it->classInfoPtr);
|
landingPad->addClause(it->classInfoPtr);
|
||||||
|
|
||||||
llvm::BasicBlock *mismatchBB = llvm::BasicBlock::Create(
|
llvm::BasicBlock* mismatchBB = llvm::BasicBlock::Create(
|
||||||
irs->context(),
|
irs->context(),
|
||||||
beginBB->getName() + llvm::Twine(".mismatch"),
|
beginBB->getName() + llvm::Twine(".mismatch"),
|
||||||
irs->topfunc()
|
irs->topfunc()
|
||||||
);
|
);
|
||||||
|
|
||||||
// "Call" llvm.eh.typeid.for, which gives us the eh selector value to compare with
|
// "Call" llvm.eh.typeid.for, which gives us the eh selector value to compare with
|
||||||
llvm::Value *ehTypeId = irs->ir->CreateCall(GET_INTRINSIC_DECL(eh_typeid_for),
|
llvm::Value* ehTypeId = irs->ir->CreateCall(GET_INTRINSIC_DECL(eh_typeid_for),
|
||||||
DtoBitCast(it->classInfoPtr, getVoidPtrType()));
|
DtoBitCast(it->classInfoPtr, getVoidPtrType()));
|
||||||
|
|
||||||
// Compare the selector value from the unwinder against the expected
|
// Compare the selector value from the unwinder against the expected
|
||||||
|
@ -402,8 +407,7 @@ llvm::BasicBlock* ScopeStack::emitLandingPad() {
|
||||||
return beginBB;
|
return beginBB;
|
||||||
}
|
}
|
||||||
|
|
||||||
IrFunction::IrFunction(FuncDeclaration* fd)
|
IrFunction::IrFunction(FuncDeclaration* fd) {
|
||||||
{
|
|
||||||
decl = fd;
|
decl = fd;
|
||||||
|
|
||||||
Type* t = fd->type->toBasetype();
|
Type* t = fd->type->toBasetype();
|
||||||
|
@ -435,8 +439,7 @@ IrFunction::IrFunction(FuncDeclaration* fd)
|
||||||
ehSelectorSlot = NULL;
|
ehSelectorSlot = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
void IrFunction::setNeverInline()
|
void IrFunction::setNeverInline() {
|
||||||
{
|
|
||||||
#if LDC_LLVM_VER >= 303
|
#if LDC_LLVM_VER >= 303
|
||||||
assert(!func->getAttributes().hasAttribute(llvm::AttributeSet::FunctionIndex, llvm::Attribute::AlwaysInline) && "function can't be never- and always-inline at the same time");
|
assert(!func->getAttributes().hasAttribute(llvm::AttributeSet::FunctionIndex, llvm::Attribute::AlwaysInline) && "function can't be never- and always-inline at the same time");
|
||||||
func->addFnAttr(llvm::Attribute::NoInline);
|
func->addFnAttr(llvm::Attribute::NoInline);
|
||||||
|
@ -449,8 +452,7 @@ void IrFunction::setNeverInline()
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void IrFunction::setAlwaysInline()
|
void IrFunction::setAlwaysInline() {
|
||||||
{
|
|
||||||
#if LDC_LLVM_VER >= 303
|
#if LDC_LLVM_VER >= 303
|
||||||
assert(!func->getAttributes().hasAttribute(llvm::AttributeSet::FunctionIndex, llvm::Attribute::NoInline) && "function can't be never- and always-inline at the same time");
|
assert(!func->getAttributes().hasAttribute(llvm::AttributeSet::FunctionIndex, llvm::Attribute::NoInline) && "function can't be never- and always-inline at the same time");
|
||||||
func->addFnAttr(llvm::Attribute::AlwaysInline);
|
func->addFnAttr(llvm::Attribute::AlwaysInline);
|
||||||
|
@ -470,10 +472,9 @@ llvm::AllocaInst* IrFunction::getOrCreateEhPtrSlot() {
|
||||||
return ehPtrSlot;
|
return ehPtrSlot;
|
||||||
}
|
}
|
||||||
|
|
||||||
IrFunction *getIrFunc(FuncDeclaration *decl, bool create)
|
IrFunction* getIrFunc(FuncDeclaration* decl, bool create)
|
||||||
{
|
{
|
||||||
if (!isIrFuncCreated(decl) && create)
|
if (!isIrFuncCreated(decl) && create) {
|
||||||
{
|
|
||||||
assert(decl->ir.irFunc == NULL);
|
assert(decl->ir.irFunc == NULL);
|
||||||
decl->ir.irFunc = new IrFunction(decl);
|
decl->ir.irFunc = new IrFunction(decl);
|
||||||
decl->ir.m_type = IrDsymbol::FuncType;
|
decl->ir.m_type = IrDsymbol::FuncType;
|
||||||
|
@ -482,8 +483,7 @@ IrFunction *getIrFunc(FuncDeclaration *decl, bool create)
|
||||||
return decl->ir.irFunc;
|
return decl->ir.irFunc;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isIrFuncCreated(FuncDeclaration *decl)
|
bool isIrFuncCreated(FuncDeclaration* decl) {
|
||||||
{
|
|
||||||
int t = decl->ir.type();
|
int t = decl->ir.type();
|
||||||
assert(t == IrDsymbol::FuncType || t == IrDsymbol::NotSet);
|
assert(t == IrDsymbol::FuncType || t == IrDsymbol::NotSet);
|
||||||
return t == IrDsymbol::FuncType;
|
return t == IrDsymbol::FuncType;
|
||||||
|
|
154
ir/irfunction.h
154
ir/irfunction.h
|
@ -7,7 +7,7 @@
|
||||||
//
|
//
|
||||||
//===----------------------------------------------------------------------===//
|
//===----------------------------------------------------------------------===//
|
||||||
//
|
//
|
||||||
// Represents the status of a D function/method/... on its way through the
|
// Represents the state of a D function/method/... on its way through the
|
||||||
// codegen process.
|
// codegen process.
|
||||||
//
|
//
|
||||||
//===----------------------------------------------------------------------===//
|
//===----------------------------------------------------------------------===//
|
||||||
|
@ -31,16 +31,17 @@ class Statement;
|
||||||
/// Represents a position on the stack of currently active cleanup scopes.
|
/// Represents a position on the stack of currently active cleanup scopes.
|
||||||
///
|
///
|
||||||
/// Since we always need to run a contiguous part of the stack (or all) in
|
/// Since we always need to run a contiguous part of the stack (or all) in
|
||||||
/// order, this is enough to uniquely identify the location of a given target.
|
/// order, two cursors (one of which is usually the currently top of the stack)
|
||||||
|
/// are enough to identify a sequence of cleanups to run.
|
||||||
typedef size_t CleanupCursor;
|
typedef size_t CleanupCursor;
|
||||||
|
|
||||||
/// Stores information needed to correctly jump to a given label or loop
|
/// Stores information needed to correctly jump to a given label or loop/switch
|
||||||
/// statement (break/continue).
|
/// statement (break/continue can be labeled, but are not necessarily).
|
||||||
struct JumpTarget {
|
struct JumpTarget {
|
||||||
/// The basic block to ultimately branch to.
|
/// The basic block to ultimately branch to.
|
||||||
llvm::BasicBlock* targetBlock;
|
llvm::BasicBlock* targetBlock;
|
||||||
|
|
||||||
/// The index of the label target in the stack of active cleanup scopes.
|
/// The index of the target in the stack of active cleanup scopes.
|
||||||
///
|
///
|
||||||
/// When generating code for a jump to this label, the cleanups between
|
/// When generating code for a jump to this label, the cleanups between
|
||||||
/// the current depth and that of the level will be emitted. Note that
|
/// the current depth and that of the level will be emitted. Note that
|
||||||
|
@ -55,10 +56,12 @@ struct JumpTarget {
|
||||||
Statement* targetStatement;
|
Statement* targetStatement;
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Defines source and target label of a goto (used if we cannot immediately
|
/// Keeps track of source and target label of a goto.
|
||||||
/// figure out the target basic block).
|
///
|
||||||
|
/// Used if we cannot immediately emit all the code for a jump because we have
|
||||||
|
/// not generated code for the target yet.
|
||||||
struct GotoJump {
|
struct GotoJump {
|
||||||
// The location of the jump instruction, for error reporting.
|
// The location of the goto instruction, for error reporting.
|
||||||
Loc sourceLoc;
|
Loc sourceLoc;
|
||||||
|
|
||||||
/// The basic block which contains the goto as its terminator.
|
/// The basic block which contains the goto as its terminator.
|
||||||
|
@ -73,8 +76,11 @@ struct GotoJump {
|
||||||
Identifier* targetLabel;
|
Identifier* targetLabel;
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Describes a particular way to leave a certain scope and continue execution
|
/// Describes a particular way to leave a cleanup scope and continue execution
|
||||||
/// at another one (return, break/continue, exception handling, etc.).
|
/// with another one.
|
||||||
|
///
|
||||||
|
/// In general, there can be multiple ones (normal exit, early returns,
|
||||||
|
/// breaks/continues, exceptions, and so on).
|
||||||
struct CleanupExitTarget {
|
struct CleanupExitTarget {
|
||||||
explicit CleanupExitTarget(llvm::BasicBlock* t) : branchTarget(t) {}
|
explicit CleanupExitTarget(llvm::BasicBlock* t) : branchTarget(t) {}
|
||||||
|
|
||||||
|
@ -98,7 +104,8 @@ struct CleanupExitTarget {
|
||||||
///
|
///
|
||||||
/// Our goal is to only emit each cleanup once such as to avoid generating an
|
/// Our goal is to only emit each cleanup once such as to avoid generating an
|
||||||
/// exponential number of basic blocks/landing pads for handling all the
|
/// exponential number of basic blocks/landing pads for handling all the
|
||||||
/// different ways of exiting a deeply nested scope (exception/no exception/...).
|
/// different ways of exiting a deeply nested scope (consider e.g. ten
|
||||||
|
/// local variables with destructors, each of which might throw itself).
|
||||||
class CleanupScope {
|
class CleanupScope {
|
||||||
public:
|
public:
|
||||||
CleanupScope(llvm::BasicBlock* beginBlock, llvm::BasicBlock* endBlock) :
|
CleanupScope(llvm::BasicBlock* beginBlock, llvm::BasicBlock* endBlock) :
|
||||||
|
@ -107,7 +114,7 @@ public:
|
||||||
/// The basic block to branch to for running the cleanup.
|
/// The basic block to branch to for running the cleanup.
|
||||||
llvm::BasicBlock* beginBlock;
|
llvm::BasicBlock* beginBlock;
|
||||||
|
|
||||||
/// The basic block that contains the end of the cleanuip code (is different
|
/// The basic block that contains the end of the cleanup code (is different
|
||||||
/// from beginBlock if the cleanup contains control flow).
|
/// from beginBlock if the cleanup contains control flow).
|
||||||
llvm::BasicBlock* endBlock;
|
llvm::BasicBlock* endBlock;
|
||||||
|
|
||||||
|
@ -117,20 +124,30 @@ public:
|
||||||
/// Stores all possible targets blocks after running this cleanup, along
|
/// Stores all possible targets blocks after running this cleanup, along
|
||||||
/// with what predecessors want to continue at that target. The index in
|
/// with what predecessors want to continue at that target. The index in
|
||||||
/// the vector corresponds to the branch selector value for that target.
|
/// the vector corresponds to the branch selector value for that target.
|
||||||
|
// Note: This is of course a bad choice of data structure for many targets
|
||||||
|
// complexity-wise. However, situations where this matters should be
|
||||||
|
// exceedingly rare in both hand-written as well as generated code.
|
||||||
std::vector<CleanupExitTarget> exitTargets;
|
std::vector<CleanupExitTarget> exitTargets;
|
||||||
|
|
||||||
/// Keeps track of all the gotos somewhere inside this scope for which we
|
/// Keeps track of all the gotos originating from somewhere inside this
|
||||||
/// have not found the label yet (because it occurs lexically later in the
|
/// scope for which we have not found the label yet (because it occurs
|
||||||
/// function).
|
/// lexically later in the function).
|
||||||
|
// Note: Should also be a dense map from source block to the rest of the
|
||||||
|
// data if we expect many gotos.
|
||||||
std::vector<GotoJump> unresolvedGotos;
|
std::vector<GotoJump> unresolvedGotos;
|
||||||
|
|
||||||
/// Caches landing pads generated for catches at this cleanup scope level
|
/// Caches landing pads generated for catches at this cleanup scope level.
|
||||||
/// (null if not yet emitted, one element is pushed to/popped from the back
|
///
|
||||||
/// on entering/leaving a catch block).
|
/// One element is pushed to the back on each time a catch block is entered,
|
||||||
|
/// and popped again once it is left. If the corresponding landing pad has
|
||||||
|
/// not been generated yet (this is done lazily), the pointer is null.
|
||||||
std::vector<llvm::BasicBlock*> landingPads;
|
std::vector<llvm::BasicBlock*> landingPads;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// Stores information to be able to branch to a catch clause if it matches.
|
||||||
///
|
///
|
||||||
|
/// Each catch body is emitted only once, but may be target from many landing
|
||||||
|
/// pads (in case of nested catch or cleanup scopes).
|
||||||
struct CatchScope {
|
struct CatchScope {
|
||||||
/// The ClassInfo reference corresponding to the type to match the
|
/// The ClassInfo reference corresponding to the type to match the
|
||||||
/// exception object against.
|
/// exception object against.
|
||||||
|
@ -139,34 +156,49 @@ struct CatchScope {
|
||||||
/// The block to branch to if the exception type matches.
|
/// The block to branch to if the exception type matches.
|
||||||
llvm::BasicBlock* bodyBlock;
|
llvm::BasicBlock* bodyBlock;
|
||||||
|
|
||||||
/// The cleanup scope level corresponding to this catch.
|
/// The cleanup scope stack level corresponding to this catch.
|
||||||
CleanupCursor cleanupScope;
|
CleanupCursor cleanupScope;
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Contains transitory information about the current scope, etc. while
|
/// Keeps track of active (abstract) scopes in a function that influence code
|
||||||
/// traversing the function for codegen purposes.
|
/// generation of their contents. This includes cleanups (finally blocks,
|
||||||
|
/// destructors), try/catch blocks and labels for goto/break/continue.
|
||||||
|
///
|
||||||
|
/// Note that the entire code generation process, and this class in particular,
|
||||||
|
/// depends heavily on the fact that we visit the statement/expression tree in
|
||||||
|
/// its natural order, i.e. depth-first and in lexical order. In other words,
|
||||||
|
/// the code here expects that after a cleanup/catch/loop/etc. has been pushed,
|
||||||
|
/// the contents of the block are generated, and it is then popped again
|
||||||
|
/// afterwards. This is also encoded in the fact that none of the methods for
|
||||||
|
/// branching/running cleanups take a cursor for describing the "source" scope,
|
||||||
|
/// it is always assumed to be the current one.
|
||||||
|
///
|
||||||
|
/// Handling of break/continue could be moved into a separate layer that uses
|
||||||
|
/// the rest of the ScopeStack API, as it (in contrast to goto) never requires
|
||||||
|
/// resolving forward references across cleanup scopes.
|
||||||
class ScopeStack {
|
class ScopeStack {
|
||||||
public:
|
public:
|
||||||
ScopeStack(IRState *irs) : irs(irs) {}
|
ScopeStack(IRState* irs) : irs(irs) {}
|
||||||
~ScopeStack();
|
~ScopeStack();
|
||||||
|
|
||||||
/// Registers a piece of cleanup code to be run.
|
/// Registers a piece of cleanup code to be run.
|
||||||
///
|
///
|
||||||
/// The basic block is expected not to contain a terminator yet. It will be
|
/// The end block is expected not to contain a terminator yet. It will be
|
||||||
/// added by ScopeStack as needed based on what followup blocks there will be
|
/// added by ScopeStack as needed, based on what follow-up blocks code from
|
||||||
/// registered.
|
/// within this scope will branch to.
|
||||||
void pushCleanup(llvm::BasicBlock* beginBlock, llvm::BasicBlock* endBlock);
|
void pushCleanup(llvm::BasicBlock* beginBlock, llvm::BasicBlock* endBlock);
|
||||||
|
|
||||||
/// Terminates the current IRScope with a branch to the cleanups needed for
|
/// Terminates the current basic block with a branch to the cleanups needed
|
||||||
/// leaving the current scope and continuing execution at the target scope
|
/// for leaving the current scope and continuing execution at the target
|
||||||
/// stack level.
|
/// scope stack level.
|
||||||
///
|
///
|
||||||
/// After running them, execution will branch to the given basic block.
|
/// After running them, execution will branch to the given basic block.
|
||||||
void runCleanups(CleanupCursor targetScope, llvm::BasicBlock* continueWith) {
|
void runCleanups(CleanupCursor targetScope, llvm::BasicBlock* continueWith) {
|
||||||
runCleanups(currentCleanupScope(), targetScope, continueWith);
|
runCleanups(currentCleanupScope(), targetScope, continueWith);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Like #runCleanups(), but runs all of them.
|
/// Like #runCleanups(), but runs all of them until the top-level scope is
|
||||||
|
/// reached.
|
||||||
void runAllCleanups(llvm::BasicBlock* continueWith);
|
void runAllCleanups(llvm::BasicBlock* continueWith);
|
||||||
|
|
||||||
/// Pops all the cleanups between the current scope and the target cursor.
|
/// Pops all the cleanups between the current scope and the target cursor.
|
||||||
|
@ -174,26 +206,28 @@ public:
|
||||||
/// This does not insert any cleanup calls, use #runCleanups() beforehand.
|
/// This does not insert any cleanup calls, use #runCleanups() beforehand.
|
||||||
void popCleanups(CleanupCursor targetScope);
|
void popCleanups(CleanupCursor targetScope);
|
||||||
|
|
||||||
/// Returns a cursor that identifies the curernt cleanup scope, to be later
|
/// Returns a cursor that identifies the current cleanup scope, to be later
|
||||||
/// userd with #runCleanups() et al.
|
/// used with #runCleanups() et al.
|
||||||
///
|
///
|
||||||
/// Note that this cursor is only valid as long as the current scope is not
|
/// Note that this cursor is only valid as long as the current scope is not
|
||||||
/// popped.
|
/// popped.
|
||||||
CleanupCursor currentCleanupScope() { return cleanupScopes.size(); }
|
CleanupCursor currentCleanupScope() { return cleanupScopes.size(); }
|
||||||
|
|
||||||
|
/// Registers a catch block to be taken into consideration when an exception
|
||||||
|
/// is thrown within the current scope.
|
||||||
///
|
///
|
||||||
|
/// When a potentially throwing function call is emitted, a landing pad will
|
||||||
|
/// be emitted to compare the dynamic type info of the exception against the
|
||||||
|
/// given ClassInfo constant and to branch to the given body block if it
|
||||||
|
/// matches. The registered catch blocks are maintained on a stack, with the
|
||||||
|
/// top-most (i.e. last pushed, innermost) taking precedence.
|
||||||
void pushCatch(llvm::Constant* classInfoPtr, llvm::BasicBlock* bodyBlock);
|
void pushCatch(llvm::Constant* classInfoPtr, llvm::BasicBlock* bodyBlock);
|
||||||
|
|
||||||
///
|
/// Unregisters the last registered catch block.
|
||||||
void popCatch();
|
void popCatch();
|
||||||
|
|
||||||
/// Emits a call or invoke to the given callee, depending on whether there
|
/// Registers a loop statement to be used as a target for break/continue
|
||||||
/// are catches/cleanups active or not.
|
/// statements in the current scope.
|
||||||
template <typename T>
|
|
||||||
llvm::CallSite callOrInvoke(llvm::Value* callee, const T &args,
|
|
||||||
const char* name = "");
|
|
||||||
|
|
||||||
///
|
|
||||||
void pushLoopTarget(Statement* loopStatement, llvm::BasicBlock* continueTarget,
|
void pushLoopTarget(Statement* loopStatement, llvm::BasicBlock* continueTarget,
|
||||||
llvm::BasicBlock* breakTarget);
|
llvm::BasicBlock* breakTarget);
|
||||||
|
|
||||||
|
@ -201,34 +235,55 @@ public:
|
||||||
/// consideration for resolving breaks/continues.
|
/// consideration for resolving breaks/continues.
|
||||||
void popLoopTarget();
|
void popLoopTarget();
|
||||||
|
|
||||||
///
|
/// Registers a statement to be used as a target for break statements in the
|
||||||
|
/// current scope (currently applies only to switch statements).
|
||||||
void pushBreakTarget(Statement* switchStatement, llvm::BasicBlock* targetBlock);
|
void pushBreakTarget(Statement* switchStatement, llvm::BasicBlock* targetBlock);
|
||||||
|
|
||||||
///
|
/// Unregisters the last registered break target.
|
||||||
void popBreakTarget();
|
void popBreakTarget();
|
||||||
|
|
||||||
|
/// Adds a label to serve as a target for goto statements.
|
||||||
///
|
///
|
||||||
|
/// Also causes in-flight forward references to this label to be resolved.
|
||||||
void addLabelTarget(Identifier* labelName, llvm::BasicBlock* targetBlock);
|
void addLabelTarget(Identifier* labelName, llvm::BasicBlock* targetBlock);
|
||||||
|
|
||||||
|
/// Emits a call or invoke to the given callee, depending on whether there
|
||||||
|
/// are catches/cleanups active or not.
|
||||||
|
template <typename T>
|
||||||
|
llvm::CallSite callOrInvoke(llvm::Value* callee, const T &args,
|
||||||
|
const char* name = "");
|
||||||
|
|
||||||
|
/// Terminates the current basic block with an unconditional branch to the
|
||||||
|
/// given label, along with the cleanups to execute on the way there.
|
||||||
///
|
///
|
||||||
|
/// Legal forward references (i.e. within the same function, and not into
|
||||||
|
/// a cleanup scope) will be resolved.
|
||||||
void jumpToLabel(Loc loc, Identifier* labelName);
|
void jumpToLabel(Loc loc, Identifier* labelName);
|
||||||
|
|
||||||
///
|
/// Terminates the current basic block with an unconditional branch to the
|
||||||
|
/// continue target generated by the given loop statement, along with
|
||||||
|
/// the cleanups to execute on the way there.
|
||||||
void continueWithLoop(Statement* loopStatement) {
|
void continueWithLoop(Statement* loopStatement) {
|
||||||
jumpToStatement(continueTargets, loopStatement);
|
jumpToStatement(continueTargets, loopStatement);
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Terminates the current basic block with an unconditional branch to the
|
||||||
|
/// closest loop continue target, along with the cleanups to execute on
|
||||||
|
/// the way there.
|
||||||
void continueWithClosest() {
|
void continueWithClosest() {
|
||||||
jumpToClosest(continueTargets);
|
jumpToClosest(continueTargets);
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Terminates the current basic block with an unconditional branch to the
|
||||||
|
/// break target generated by the given loop or switch statement, along with
|
||||||
|
/// the cleanups to execute on the way there.
|
||||||
void breakToStatement(Statement* loopOrSwitchStatement) {
|
void breakToStatement(Statement* loopOrSwitchStatement) {
|
||||||
jumpToStatement(breakTargets, loopOrSwitchStatement);
|
jumpToStatement(breakTargets, loopOrSwitchStatement);
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Terminates the current basic block with an unconditional branch to the
|
||||||
|
/// closest break statement target, along with the cleanups to execute on
|
||||||
|
/// the way there.
|
||||||
void breakToClosest() {
|
void breakToClosest() {
|
||||||
jumpToClosest(breakTargets);
|
jumpToClosest(breakTargets);
|
||||||
}
|
}
|
||||||
|
@ -254,7 +309,7 @@ private:
|
||||||
|
|
||||||
/// The ambient IRState. For legacy reasons, there is currently a cyclic
|
/// The ambient IRState. For legacy reasons, there is currently a cyclic
|
||||||
/// dependency between the two.
|
/// dependency between the two.
|
||||||
IRState *irs;
|
IRState* irs;
|
||||||
|
|
||||||
typedef llvm::DenseMap<Identifier*, JumpTarget> LabelTargetMap;
|
typedef llvm::DenseMap<Identifier*, JumpTarget> LabelTargetMap;
|
||||||
/// The labels we have encountered in this function so far, accessed by
|
/// The labels we have encountered in this function so far, accessed by
|
||||||
|
@ -344,6 +399,7 @@ struct IrFunction {
|
||||||
FuncDeclaration* decl;
|
FuncDeclaration* decl;
|
||||||
TypeFunction* type;
|
TypeFunction* type;
|
||||||
|
|
||||||
|
/// Points to the associated scope stack while emitting code for the function.
|
||||||
ScopeStack* scopes;
|
ScopeStack* scopes;
|
||||||
|
|
||||||
bool queued;
|
bool queued;
|
||||||
|
@ -371,7 +427,7 @@ struct IrFunction {
|
||||||
/// A stack slot containing the exception object pointer while a landing pad
|
/// A stack slot containing the exception object pointer while a landing pad
|
||||||
/// is active. Need this because the instruction must dominate all uses as a
|
/// is active. Need this because the instruction must dominate all uses as a
|
||||||
/// _d_eh_resume_unwind parameter, but if we take a select at the end on a
|
/// _d_eh_resume_unwind parameter, but if we take a select at the end on a
|
||||||
/// cleanup on the way there, it also must dominate all other precedessors
|
/// cleanup on the way there, it also must dominate all other predecessors
|
||||||
/// of the cleanup. Thus, we just create an alloca at the start of the
|
/// of the cleanup. Thus, we just create an alloca at the start of the
|
||||||
/// function.
|
/// function.
|
||||||
llvm::AllocaInst* ehPtrSlot;
|
llvm::AllocaInst* ehPtrSlot;
|
||||||
|
@ -398,7 +454,7 @@ struct IrFunction {
|
||||||
IrFuncTy irFty;
|
IrFuncTy irFty;
|
||||||
};
|
};
|
||||||
|
|
||||||
IrFunction *getIrFunc(FuncDeclaration *decl, bool create = false);
|
IrFunction* getIrFunc(FuncDeclaration* decl, bool create = false);
|
||||||
bool isIrFuncCreated(FuncDeclaration *decl);
|
bool isIrFuncCreated(FuncDeclaration* decl);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue