Skip to content

Assert trees are valid, fill in missing tests #1808

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -137,15 +137,15 @@ public IdlToken next() {
}

CapturedToken peekPastSpaces() {
return peekWhile(1, token -> token == IdlToken.SPACE);
}

CapturedToken peekPastWs() {
return peekWhile(1, token -> token.isWhitespace() || token == IdlToken.DOC_COMMENT);
return peekWhile(0, token -> token == IdlToken.SPACE);
}

CapturedToken peekWhile(int offsetFromPosition, Predicate<IdlToken> predicate) {
int position = cursor + offsetFromPosition;
// If the start position is out of bounds, return the EOF token.
if (position >= tokens.size()) {
return tokens.get(tokens.size() - 1);
}
CapturedToken token = tokens.get(position);
while (token.getIdlToken() != IdlToken.EOF && predicate.test(token.getIdlToken())) {
token = tokens.get(++position);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ void parse(CapturingTokenizer tokenizer) {
@Override
void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
tokenizer.next();
tokenizer.next(); // Skip "use"
SP.parse(tokenizer);
ABSOLUTE_ROOT_SHAPE_ID.parse(tokenizer);
BR.parse(tokenizer);
Expand Down Expand Up @@ -236,22 +236,16 @@ void parse(CapturingTokenizer tokenizer) {
@Override
void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
SIMPLE_TYPE_NAME.parse(tokenizer);
optionalSpaces(tokenizer);
IDENTIFIER.parse(tokenizer);
optionalSpaces(tokenizer);

if (tokenizer.isCurrentLexeme("with")) {
SHAPE_MIXINS.parse(tokenizer);
}
parseShapeTypeAndName(tokenizer, SIMPLE_TYPE_NAME);
parseOptionalMixins(tokenizer);
});
}
},

SIMPLE_TYPE_NAME {
@Override
void parse(CapturingTokenizer tokenizer) {
// Assumes that the current token is a valid simple type name validated by SHAPE_BODY.
// Assumes that the current token is a valid simple type name validated by SHAPE.
tokenizer.withState(this, tokenizer::next);
}
},
Expand All @@ -260,14 +254,8 @@ void parse(CapturingTokenizer tokenizer) {
@Override
void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
ENUM_TYPE_NAME.parse(tokenizer);
optionalSpaces(tokenizer);
IDENTIFIER.parse(tokenizer);
optionalSpaces(tokenizer);

if (tokenizer.isCurrentLexeme("with")) {
SHAPE_MIXINS.parse(tokenizer);
}
parseShapeTypeAndName(tokenizer, ENUM_TYPE_NAME);
parseOptionalMixins(tokenizer);

optionalWs(tokenizer);
ENUM_SHAPE_MEMBERS.parse(tokenizer);
Expand All @@ -278,7 +266,7 @@ void parse(CapturingTokenizer tokenizer) {
ENUM_TYPE_NAME {
@Override
void parse(CapturingTokenizer tokenizer) {
// Assumes that the current token is a valid enum type name validated by SHAPE_BODY.
// Assumes that the current token is a valid enum type name validated by SHAPE.
tokenizer.withState(this, tokenizer::next);
}
},
Expand Down Expand Up @@ -308,10 +296,7 @@ void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
TRAIT_STATEMENTS.parse(tokenizer);
IDENTIFIER.parse(tokenizer);
if (tokenizer.hasNext() && (tokenizer.getCurrentToken() == IdlToken.EQUAL
|| tokenizer.peekPastSpaces().getIdlToken() == IdlToken.EQUAL)) {
VALUE_ASSIGNMENT.parse(tokenizer);
}
parseOptionalValueAssignment(tokenizer);
});
}
},
Expand All @@ -320,10 +305,7 @@ void parse(CapturingTokenizer tokenizer) {
@Override
void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
AGGREGATE_TYPE_NAME.parse(tokenizer);
optionalSpaces(tokenizer);
IDENTIFIER.parse(tokenizer);
optionalSpaces(tokenizer);
parseShapeTypeAndName(tokenizer, AGGREGATE_TYPE_NAME);
parseSharedStructureBodyWithinInline(tokenizer);
});
}
Expand All @@ -332,16 +314,17 @@ void parse(CapturingTokenizer tokenizer) {
AGGREGATE_TYPE_NAME {
@Override
void parse(CapturingTokenizer tokenizer) {
// Assumes that the current token is a valid simple type name validated by SHAPE_BODY.
// Assumes that the current token is a valid simple type name validated by SHAPE.
tokenizer.withState(this, tokenizer::next);
}
},

AGGREGATE_SHAPE_RESOURCE {
// Don't use this directly. Instead, use parseOptionalForResource
FOR_RESOURCE {
@Override
void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
IDENTIFIER.parse(tokenizer);
tokenizer.next(); // Skip "for"
SP.parse(tokenizer);
SHAPE_ID.parse(tokenizer);
});
Expand Down Expand Up @@ -377,11 +360,7 @@ void parse(CapturingTokenizer tokenizer) {
} else {
EXPLICIT_SHAPE_MEMBER.parse(tokenizer);
}

optionalSpaces(tokenizer);
if (tokenizer.getCurrentToken() == IdlToken.EQUAL) {
VALUE_ASSIGNMENT.parse(tokenizer);
}
parseOptionalValueAssignment(tokenizer);
});
}
},
Expand Down Expand Up @@ -416,11 +395,9 @@ void parse(CapturingTokenizer tokenizer) {
void parse(CapturingTokenizer tokenizer) {
// Assumes that the shape type is a valid "service" or "resource".
tokenizer.withState(this, () -> {
parseShapeTypeAndName(tokenizer);
parseShapeTypeAndName(tokenizer, ENTITY_TYPE_NAME);

if (tokenizer.isCurrentLexeme("with")) {
SHAPE_MIXINS.parse(tokenizer);
}
parseOptionalMixins(tokenizer);

optionalWs(tokenizer);
tokenizer.expect(IdlToken.LBRACE);
Expand All @@ -429,15 +406,21 @@ void parse(CapturingTokenizer tokenizer) {
}
},

ENTITY_TYPE_NAME {
@Override
void parse(CapturingTokenizer tokenizer) {
// Assumes that the current token is a valid entity type name validated by SHAPE.
tokenizer.withState(this, tokenizer::next);
}
},

OPERATION_SHAPE {
@Override
void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
parseShapeTypeAndName(tokenizer);

if (tokenizer.isCurrentLexeme("with")) {
SHAPE_MIXINS.parse(tokenizer);
}
parseOptionalMixins(tokenizer);

optionalWs(tokenizer);
OPERATION_BODY.parse(tokenizer);
Expand Down Expand Up @@ -544,11 +527,12 @@ void parse(CapturingTokenizer tokenizer) {

// Mixins =
// [SP] %s"with" [WS] "[" [WS] 1*(ShapeId [WS]) "]"
SHAPE_MIXINS {
// Don't use this directly. Instead, use parseOptionalMixins
MIXINS {
@Override
void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
IDENTIFIER.parse(tokenizer); // 'with'
tokenizer.next(); // Skip "with"
optionalWs(tokenizer);

tokenizer.expect(IdlToken.LBRACKET);
Expand All @@ -567,6 +551,7 @@ void parse(CapturingTokenizer tokenizer) {
}
},

// Don't use this directly. Instead, use parseOptionalValueAssignment
VALUE_ASSIGNMENT {
@Override
void parse(CapturingTokenizer tokenizer) {
Expand Down Expand Up @@ -634,7 +619,9 @@ void parse(CapturingTokenizer tokenizer) {
case STRING:
case IDENTIFIER:
default:
if (tokenizer.peekPastWs().getIdlToken() == IdlToken.COLON) {
CapturedToken nextPastWs = tokenizer.peekWhile(1, token ->
token.isWhitespace() || token == IdlToken.DOC_COMMENT);
if (nextPastWs.getIdlToken() == IdlToken.COLON) {
TRAIT_STRUCTURE.parse(tokenizer);
} else {
TRAIT_NODE.parse(tokenizer);
Expand Down Expand Up @@ -677,7 +664,7 @@ void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
// Try to see if this is a singular or block apply statement.
IdlToken peek = tokenizer
.peekWhile(1, t -> t != IdlToken.EOF && t != IdlToken.AT && t != IdlToken.LBRACE)
.peekWhile(1, t -> t != IdlToken.AT && t != IdlToken.LBRACE)
.getIdlToken();
if (peek == IdlToken.LBRACE) {
APPLY_STATEMENT_BLOCK.parse(tokenizer);
Expand All @@ -692,7 +679,7 @@ void parse(CapturingTokenizer tokenizer) {
@Override
void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
tokenizer.next();
tokenizer.next(); // Skip "apply"
SP.parse(tokenizer);
SHAPE_ID.parse(tokenizer);
WS.parse(tokenizer);
Expand All @@ -705,7 +692,7 @@ void parse(CapturingTokenizer tokenizer) {
@Override
void parse(CapturingTokenizer tokenizer) {
tokenizer.withState(this, () -> {
tokenizer.next();
tokenizer.next(); // Skip "apply"
SP.parse(tokenizer);
SHAPE_ID.parse(tokenizer);
WS.parse(tokenizer);
Expand Down Expand Up @@ -1089,27 +1076,46 @@ protected static void optionalSpaces(CapturingTokenizer tokenizer) {
}

protected static void parseShapeTypeAndName(CapturingTokenizer tokenizer) {
tokenizer.expect(IdlToken.IDENTIFIER);
tokenizer.next(); // skip the shape type
parseShapeTypeAndName(tokenizer, null);
}

protected static void parseShapeTypeAndName(CapturingTokenizer tokenizer, TreeType typeName) {
if (typeName == null) {
tokenizer.next();
} else {
typeName.parse(tokenizer); // Skip the shape type
}
optionalSpaces(tokenizer);
IDENTIFIER.parse(tokenizer); // shape name
optionalSpaces(tokenizer);
}

protected static void parseSharedStructureBodyWithinInline(CapturingTokenizer tokenizer) {
optionalSpaces(tokenizer);
parseOptionalForResource(tokenizer);
parseOptionalMixins(tokenizer);

optionalWs(tokenizer);
SHAPE_MEMBERS.parse(tokenizer);
}

protected static void parseOptionalForResource(CapturingTokenizer tokenizer) {
optionalSpaces(tokenizer);
if (tokenizer.isCurrentLexeme("for")) {
AGGREGATE_SHAPE_RESOURCE.parse(tokenizer);
optionalSpaces(tokenizer);
FOR_RESOURCE.parse(tokenizer);
}
}

protected static void parseOptionalMixins(CapturingTokenizer tokenizer) {
optionalSpaces(tokenizer);
if (tokenizer.isCurrentLexeme("with")) {
SHAPE_MIXINS.parse(tokenizer);
MIXINS.parse(tokenizer);
}
}

optionalWs(tokenizer);
SHAPE_MEMBERS.parse(tokenizer);
protected static void parseOptionalValueAssignment(CapturingTokenizer tokenizer) {
if (tokenizer.peekPastSpaces().getIdlToken() == IdlToken.EQUAL) {
VALUE_ASSIGNMENT.parse(tokenizer);
}
}

protected static void operationInputOutputDefinition(CapturingTokenizer tokenizer) {
Expand Down
Loading