Skip to content
Merged
Changes from 1 commit
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
f9ab9d4
[Typed continuations] resume instruction
frank-emrich Oct 3, 2023
2a932d3
Merge remote-tracking branch 'upstream/main' into wasmfx-resume
frank-emrich Nov 14, 2023
e96014a
dummy implementation for TypeGeneralizing pass
frank-emrich Nov 14, 2023
6b8787d
new implementation of operateOnScopeNameUsesAndSentTypes
frank-emrich Nov 14, 2023
a3cc291
OverriddenScopeNameUseVisitor
frank-emrich Nov 14, 2023
dfbb193
pass nullptr to func in operateOnScopeNameUsesAndSentValues
frank-emrich Nov 15, 2023
ef0a683
resume instructions have implicitTrap effect
frank-emrich Nov 15, 2023
53e40d4
move comment on own line
frank-emrich Nov 15, 2023
8f85434
manually remove (tag ...) lines erroneously added by updater script
frank-emrich Nov 15, 2023
b475268
experiment: move SentTypesVisitor outside of lambda
frank-emrich Nov 15, 2023
3c1906e
combine both experiments
frank-emrich Nov 15, 2023
6ab20ad
give up on OverriddenScopeNameUseVisitor
frank-emrich Nov 15, 2023
3556171
add typed_continuations_resume.wast to fuzzer ignore list
frank-emrich Nov 16, 2023
bc86090
remove visitTry, visitRethrow from SentTypesVisitor
frank-emrich Nov 21, 2023
97d54f7
Merge remote-tracking branch 'upstream/main' into wasmfx-resume
frank-emrich Nov 21, 2023
cf17036
dummy implementation of SubtypingDiscoverer::visitResume
frank-emrich Nov 21, 2023
db5b02a
Merge remote-tracking branch 'upstream/main' into wasmfx-resume
frank-emrich Dec 13, 2023
115d0e2
when printing resume instructions, keep tag clauses on same line
frank-emrich Dec 13, 2023
143bb2d
remove SentTypesVisitor in favor of simple loop
frank-emrich Dec 23, 2023
6749e08
printing: move as much into PrintExpressionContents as possible
frank-emrich Jan 7, 2024
17be964
Merge remote-tracking branch 'upstream/main' into wasmfx-resume
frank-emrich Jan 7, 2024
c262d3c
rephrase comment
frank-emrich Jan 7, 2024
fbcb7fb
Make Resume class more consistent with TryTable
frank-emrich Jan 7, 2024
06a26c1
include Resume's sentTypes field in field delegations
frank-emrich Jan 7, 2024
d41256d
address minor nits
frank-emrich Jan 8, 2024
ab6ff8b
move invariant checks to FunctionValidator::visitResume
frank-emrich Jan 8, 2024
179faa4
move & update typed_continuations_resume.wast
frank-emrich Jan 8, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Merge remote-tracking branch 'upstream/main' into wasmfx-resume
  • Loading branch information
frank-emrich committed Nov 21, 2023
commit 97d54f752cbabd2758ba8544cbcb9532ec5a00df
263 changes: 0 additions & 263 deletions src/passes/Unsubtyping.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -335,269 +335,6 @@ struct Unsubtyping
// Visit the rest of the code that is not in functions.
walkModuleCode(wasm);
}

void visitFunction(Function* func) {
if (func->body) {
noteSubtype(func->body->type, func->getResults());
}
}
void visitGlobal(Global* global) {
if (global->init) {
noteSubtype(global->init->type, global->type);
}
}
void visitElementSegment(ElementSegment* seg) {
if (seg->offset) {
noteSubtype(seg->type, getModule()->getTable(seg->table)->type);
}
for (auto init : seg->data) {
noteSubtype(init->type, seg->type);
}
}
void visitNop(Nop* curr) {}
void visitBlock(Block* curr) {
if (!curr->list.empty()) {
noteSubtype(curr->list.back()->type, curr->type);
}
}
void visitIf(If* curr) {
if (curr->ifFalse) {
noteSubtype(curr->ifTrue->type, curr->type);
noteSubtype(curr->ifFalse->type, curr->type);
}
}
void visitLoop(Loop* curr) { noteSubtype(curr->body->type, curr->type); }
void visitBreak(Break* curr) {
if (curr->value) {
noteSubtype(curr->value->type, findBreakTarget(curr->name)->type);
}
}
void visitSwitch(Switch* curr) {
if (curr->value) {
for (auto name : BranchUtils::getUniqueTargets(curr)) {
noteSubtype(curr->value->type, findBreakTarget(name)->type);
}
}
}
template<typename T> void handleCall(T* curr, Signature sig) {
assert(curr->operands.size() == sig.params.size());
for (size_t i = 0, size = sig.params.size(); i < size; ++i) {
noteSubtype(curr->operands[i]->type, sig.params[i]);
}
if (curr->isReturn) {
noteSubtype(sig.results, getFunction()->getResults());
}
}
void visitCall(Call* curr) {
handleCall(curr, getModule()->getFunction(curr->target)->getSig());
}
void visitCallIndirect(CallIndirect* curr) {
handleCall(curr, curr->heapType.getSignature());
auto* table = getModule()->getTable(curr->table);
auto tableType = table->type.getHeapType();
if (HeapType::isSubType(tableType, curr->heapType)) {
// Unlike other casts, where cast targets are always subtypes of cast
// sources, call_indirect target types may be supertypes of their source
// table types. In this case, the cast will always succeed, but only if we
// keep the types related.
noteSubtype(tableType, curr->heapType);
} else if (HeapType::isSubType(curr->heapType, tableType)) {
noteCast(tableType, curr->heapType);
} else {
// The types are unrelated and the cast will fail. We can keep the types
// unrelated.
}
}
void visitLocalGet(LocalGet* curr) {}
void visitLocalSet(LocalSet* curr) {
noteSubtype(curr->value->type, getFunction()->getLocalType(curr->index));
}
void visitGlobalGet(GlobalGet* curr) {}
void visitGlobalSet(GlobalSet* curr) {
noteSubtype(curr->value->type, getModule()->getGlobal(curr->name)->type);
}
void visitLoad(Load* curr) {}
void visitStore(Store* curr) {}
void visitAtomicRMW(AtomicRMW* curr) {}
void visitAtomicCmpxchg(AtomicCmpxchg* curr) {}
void visitAtomicWait(AtomicWait* curr) {}
void visitAtomicNotify(AtomicNotify* curr) {}
void visitAtomicFence(AtomicFence* curr) {}
void visitSIMDExtract(SIMDExtract* curr) {}
void visitSIMDReplace(SIMDReplace* curr) {}
void visitSIMDShuffle(SIMDShuffle* curr) {}
void visitSIMDTernary(SIMDTernary* curr) {}
void visitSIMDShift(SIMDShift* curr) {}
void visitSIMDLoad(SIMDLoad* curr) {}
void visitSIMDLoadStoreLane(SIMDLoadStoreLane* curr) {}
void visitMemoryInit(MemoryInit* curr) {}
void visitDataDrop(DataDrop* curr) {}
void visitMemoryCopy(MemoryCopy* curr) {}
void visitMemoryFill(MemoryFill* curr) {}
void visitConst(Const* curr) {}
void visitUnary(Unary* curr) {}
void visitBinary(Binary* curr) {}
void visitSelect(Select* curr) {
noteSubtype(curr->ifTrue->type, curr->type);
noteSubtype(curr->ifFalse->type, curr->type);
}
void visitDrop(Drop* curr) {}
void visitReturn(Return* curr) {
if (curr->value) {
noteSubtype(curr->value->type, getFunction()->getResults());
}
}
void visitMemorySize(MemorySize* curr) {}
void visitMemoryGrow(MemoryGrow* curr) {}
void visitUnreachable(Unreachable* curr) {}
void visitPop(Pop* curr) {}
void visitRefNull(RefNull* curr) {}
void visitRefIsNull(RefIsNull* curr) {}
void visitRefFunc(RefFunc* curr) {}
void visitRefEq(RefEq* curr) {}
void visitTableGet(TableGet* curr) {}
void visitTableSet(TableSet* curr) {
noteSubtype(curr->value->type, getModule()->getTable(curr->table)->type);
}
void visitTableSize(TableSize* curr) {}
void visitTableGrow(TableGrow* curr) {}
void visitTableFill(TableFill* curr) {
noteSubtype(curr->value->type, getModule()->getTable(curr->table)->type);
}
void visitTableCopy(TableCopy* curr) {
noteSubtype(getModule()->getTable(curr->sourceTable)->type,
getModule()->getTable(curr->destTable)->type);
}
void visitTry(Try* curr) {
noteSubtype(curr->body->type, curr->type);
for (auto* body : curr->catchBodies) {
noteSubtype(body->type, curr->type);
}
}
void visitThrow(Throw* curr) {
Type params = getModule()->getTag(curr->tag)->sig.params;
assert(params.size() == curr->operands.size());
for (size_t i = 0, size = curr->operands.size(); i < size; ++i) {
noteSubtype(curr->operands[i]->type, params[i]);
}
}
void visitRethrow(Rethrow* curr) {}
void visitTupleMake(TupleMake* curr) {}
void visitTupleExtract(TupleExtract* curr) {}
void visitRefI31(RefI31* curr) {}
void visitI31Get(I31Get* curr) {}
void visitCallRef(CallRef* curr) {
if (!curr->target->type.isSignature()) {
return;
}
handleCall(curr, curr->target->type.getHeapType().getSignature());
}
void visitRefTest(RefTest* curr) {
noteCast(curr->ref->type, curr->castType);
}
void visitRefCast(RefCast* curr) { noteCast(curr->ref->type, curr->type); }
void visitBrOn(BrOn* curr) {
if (curr->op == BrOnCast || curr->op == BrOnCastFail) {
noteCast(curr->ref->type, curr->castType);
}
noteSubtype(curr->getSentType(), findBreakTarget(curr->name)->type);
}
void visitStructNew(StructNew* curr) {
if (!curr->type.isStruct() || curr->isWithDefault()) {
return;
}
const auto& fields = curr->type.getHeapType().getStruct().fields;
assert(fields.size() == curr->operands.size());
for (size_t i = 0, size = fields.size(); i < size; ++i) {
noteSubtype(curr->operands[i]->type, fields[i].type);
}
}
void visitStructGet(StructGet* curr) {}
void visitStructSet(StructSet* curr) {
if (!curr->ref->type.isStruct()) {
return;
}
const auto& fields = curr->ref->type.getHeapType().getStruct().fields;
noteSubtype(curr->value->type, fields[curr->index].type);
}
void visitArrayNew(ArrayNew* curr) {
if (!curr->type.isArray() || curr->isWithDefault()) {
return;
}
auto array = curr->type.getHeapType().getArray();
noteSubtype(curr->init->type, array.element.type);
}
void visitArrayNewData(ArrayNewData* curr) {}
void visitArrayNewElem(ArrayNewElem* curr) {
if (!curr->type.isArray()) {
return;
}
auto array = curr->type.getHeapType().getArray();
auto* seg = getModule()->getElementSegment(curr->segment);
noteSubtype(seg->type, array.element.type);
}
void visitArrayNewFixed(ArrayNewFixed* curr) {
if (!curr->type.isArray()) {
return;
}
auto array = curr->type.getHeapType().getArray();
for (auto* value : curr->values) {
noteSubtype(value->type, array.element.type);
}
}
void visitArrayGet(ArrayGet* curr) {}
void visitArraySet(ArraySet* curr) {
if (!curr->ref->type.isArray()) {
return;
}
auto array = curr->ref->type.getHeapType().getArray();
noteSubtype(curr->value->type, array.element.type);
}
void visitArrayLen(ArrayLen* curr) {}
void visitArrayCopy(ArrayCopy* curr) {
if (!curr->srcRef->type.isArray() || !curr->destRef->type.isArray()) {
return;
}
auto src = curr->srcRef->type.getHeapType().getArray();
auto dest = curr->destRef->type.getHeapType().getArray();
noteSubtype(src.element.type, dest.element.type);
}
void visitArrayFill(ArrayFill* curr) {
if (!curr->ref->type.isArray()) {
return;
}
auto array = curr->ref->type.getHeapType().getArray();
noteSubtype(curr->value->type, array.element.type);
}
void visitArrayInitData(ArrayInitData* curr) {}
void visitArrayInitElem(ArrayInitElem* curr) {
if (!curr->ref->type.isArray()) {
return;
}
auto array = curr->ref->type.getHeapType().getArray();
auto* seg = getModule()->getElementSegment(curr->segment);
noteSubtype(seg->type, array.element.type);
}
void visitRefAs(RefAs* curr) {}
void visitStringNew(StringNew* curr) {}
void visitStringConst(StringConst* curr) {}
void visitStringMeasure(StringMeasure* curr) {}
void visitStringEncode(StringEncode* curr) {}
void visitStringConcat(StringConcat* curr) {}
void visitStringEq(StringEq* curr) {}
void visitStringAs(StringAs* curr) {}
void visitStringWTF8Advance(StringWTF8Advance* curr) {}
void visitStringWTF16Get(StringWTF16Get* curr) {}
void visitStringIterNext(StringIterNext* curr) {}
void visitStringIterMove(StringIterMove* curr) {}
void visitStringSliceWTF(StringSliceWTF* curr) {}
void visitStringSliceIter(StringSliceIter* curr) {}

void visitResume(Resume* curr) {
// TODO Implement this by exposing the subtyping relations that the
// validator expects to hold, once validation of resume is implemented
WASM_UNREACHABLE("not implemented");
}
};

} // anonymous namespace
Expand Down
You are viewing a condensed version of this merge commit. You can view the full changes here.