comment
stringlengths
16
8.84k
method_body
stringlengths
37
239k
target_code
stringlengths
0
242
method_body_after
stringlengths
29
239k
context_before
stringlengths
14
424k
context_after
stringlengths
14
284k
It is not necessary to call analyze for tableRelation separately, the analyze of query will perform the logic of table analyze
public static void analyze(UpdateStmt updateStmt, ConnectContext session) { TableName tableName = updateStmt.getTableName(); MetaUtils.normalizationTableName(session, tableName); MetaUtils.getStarRocks(session, tableName); Table table = MetaUtils.getStarRocksTable(session, tableName); if (!(table instanceof OlapTable && ((OlapTable) table).getKeysType() == KeysType.PRIMARY_KEYS)) { throw unsupportedException("only support updating primary key table"); } if (updateStmt.getWherePredicate() == null) { throw new SemanticException("must specify where clause to prevent full table update"); } List<ColumnAssignment> assignmentList = updateStmt.getAssignments(); Map<String, ColumnAssignment> assignmentByColName = assignmentList.stream().collect( Collectors.toMap(assign -> assign.getColumn().toLowerCase(), a -> a)); SelectList selectList = new SelectList(); for (Column col : table.getBaseSchema()) { SelectListItem item; ColumnAssignment assign = assignmentByColName.get(col.getName().toLowerCase()); if (assign != null) { if (col.isKey()) { throw new SemanticException("primary key column cannot be updated: " + col.getName()); } item = new SelectListItem(new CastExpr(col.getType(), assign.getExpr()), col.getName()); } else { item = new SelectListItem(new SlotRef(tableName, col.getName()), col.getName()); } selectList.addItem(item); } TableRelation tableRelation = new TableRelation(tableName); tableRelation.setTable(table); Scope tableScope = analyzeTable(tableRelation, tableName, table, session); SelectRelation selectRelation = new SelectRelation(selectList, tableRelation, updateStmt.getWherePredicate(), null, null); AnalyzeState analyzeState = new AnalyzeState(); SelectAnalyzer selectAnalyzer = new SelectAnalyzer(session); selectAnalyzer.analyze( analyzeState, selectRelation.getSelectList(), selectRelation.getRelation(), tableScope, selectRelation.getGroupByClause(), selectRelation.getHavingClause(), selectRelation.getWhereClause(), selectRelation.getOrderBy(), selectRelation.getLimit()); selectRelation.fillResolvedAST(analyzeState); updateStmt.setTable(table); updateStmt.setUpdateRelation(selectRelation); }
Scope tableScope = analyzeTable(tableRelation, tableName, table, session);
public static void analyze(UpdateStmt updateStmt, ConnectContext session) { TableName tableName = updateStmt.getTableName(); MetaUtils.normalizationTableName(session, tableName); MetaUtils.getStarRocks(session, tableName); Table table = MetaUtils.getStarRocksTable(session, tableName); if (!(table instanceof OlapTable && ((OlapTable) table).getKeysType() == KeysType.PRIMARY_KEYS)) { throw unsupportedException("only support updating primary key table"); } if (updateStmt.getWherePredicate() == null) { throw new SemanticException("must specify where clause to prevent full table update"); } List<ColumnAssignment> assignmentList = updateStmt.getAssignments(); Map<String, ColumnAssignment> assignmentByColName = assignmentList.stream().collect( Collectors.toMap(assign -> assign.getColumn().toLowerCase(), a -> a)); SelectList selectList = new SelectList(); for (Column col : table.getBaseSchema()) { SelectListItem item; ColumnAssignment assign = assignmentByColName.get(col.getName().toLowerCase()); if (assign != null) { if (col.isKey()) { throw new SemanticException("primary key column cannot be updated: " + col.getName()); } item = new SelectListItem(new CastExpr(col.getType(), assign.getExpr()), col.getName()); } else { item = new SelectListItem(new SlotRef(tableName, col.getName()), col.getName()); } selectList.addItem(item); } TableRelation tableRelation = new TableRelation(tableName); SelectRelation selectRelation = new SelectRelation(selectList, tableRelation, updateStmt.getWherePredicate(), null, null); QueryStatement queryStatement = new QueryStatement(selectRelation); queryStatement.setIsExplain(updateStmt.isExplain(), updateStmt.getExplainLevel()); new QueryAnalyzer(session).analyze(queryStatement); updateStmt.setTable(table); updateStmt.setQueryStatement(queryStatement); }
class UpdateAnalyzer { private static final Logger LOG = LogManager.getLogger(UpdateAnalyzer.class); public static Scope analyzeTable(TableRelation node, TableName tableName, Table table, ConnectContext session) { ImmutableList.Builder<Field> fields = ImmutableList.builder(); ImmutableMap.Builder<Field, Column> columns = ImmutableMap.builder(); for (Column column : table.getFullSchema()) { Field field; if (table.getBaseSchema().contains(column)) { field = new Field(column.getName(), column.getType(), tableName, new SlotRef(tableName, column.getName(), column.getName()), true); } else { field = new Field(column.getName(), column.getType(), tableName, new SlotRef(tableName, column.getName(), column.getName()), false); } columns.put(field, column); fields.add(field); } node.setColumns(columns.build()); session.getDumpInfo().addTable(node.getName().getDb().split(":")[1], table); Scope scope = new Scope(RelationId.of(node), new RelationFields(fields.build())); node.setScope(scope); return scope; } }
class UpdateAnalyzer { }
We need to check the size of `memTypes` before creating a union type. We shouldn't be creating a union type when there are 0 or 1 members. 0 should probably be neverType or semanticError.
public BType getSafeType(BType type, boolean liftNil, boolean liftError) { if (liftNil) { switch (type.tag) { case TypeTags.JSON: return new BJSONType((BJSONType) type, false); case TypeTags.ANY: return new BAnyType(type.tag, type.tsymbol, false); case TypeTags.ANYDATA: return new BAnydataType((BAnydataType) type, false); case TypeTags.READONLY: if (liftError) { return symTable.anyAndReadonly; } return new BReadonlyType(type.tag, type.tsymbol, false); } } if (type.tag != TypeTags.UNION) { return type; } BUnionType unionType = (BUnionType) type; LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes()); BUnionType errorLiftedType = BUnionType.create(null, memTypes); if (liftNil) { errorLiftedType.remove(symTable.nilType); } if (liftError) { memTypes = errorLiftedType.getMemberTypes().stream().filter(t -> t.tag != TypeTags.ERROR) .collect(Collectors.toCollection(LinkedHashSet::new)); errorLiftedType = BUnionType.create(null, memTypes); } if (errorLiftedType.getMemberTypes().size() == 1) { return errorLiftedType.getMemberTypes().toArray(new BType[0])[0]; } return errorLiftedType; }
errorLiftedType = BUnionType.create(null, memTypes);
public BType getSafeType(BType type, boolean liftNil, boolean liftError) { if (liftNil) { switch (type.tag) { case TypeTags.JSON: return new BJSONType((BJSONType) type, false); case TypeTags.ANY: return new BAnyType(type.tag, type.tsymbol, false); case TypeTags.ANYDATA: return new BAnydataType((BAnydataType) type, false); case TypeTags.READONLY: if (liftError) { return symTable.anyAndReadonly; } return new BReadonlyType(type.tag, type.tsymbol, false); } } if (type.tag != TypeTags.UNION) { return type; } BUnionType unionType = (BUnionType) type; LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes()); BUnionType errorLiftedType = BUnionType.create(null, memTypes); if (liftNil) { errorLiftedType.remove(symTable.nilType); } if (liftError) { LinkedHashSet<BType> bTypes = new LinkedHashSet<>(); for (BType t : errorLiftedType.getMemberTypes()) { if (t.tag != TypeTags.ERROR) { bTypes.add(t); } } memTypes = bTypes; errorLiftedType = BUnionType.create(null, memTypes); } if (errorLiftedType.getMemberTypes().size() == 1) { return errorLiftedType.getMemberTypes().toArray(new BType[0])[0]; } if (errorLiftedType.getMemberTypes().size() == 0) { return symTable.semanticError; } return errorLiftedType; }
class BOrderedTypeVisitor implements BTypeVisitor<BType, Boolean> { Set<TypePair> unresolvedTypes; BOrderedTypeVisitor(Set<TypePair> unresolvedTypes) { this.unresolvedTypes = unresolvedTypes; } @Override public Boolean visit(BType target, BType source) { if (isSimpleBasicType(source.tag) && isSimpleBasicType(target.tag)) { return (source == target) || isIntOrStringType(source.tag, target.tag); } if (source.tag == TypeTags.FINITE) { return checkValueSpaceHasSameType(((BFiniteType) source), target); } return isSameOrderedType(target, source, this.unresolvedTypes); } @Override public Boolean visit(BArrayType target, BType source) { if (source.tag != TypeTags.ARRAY) { return false; } BArrayType rhsArrayType = (BArrayType) source; boolean hasSameOrderedTypeElements = isSameOrderedType(target.eType, rhsArrayType.eType, unresolvedTypes); if (target.state == BArrayState.OPEN) { return (rhsArrayType.state == BArrayState.OPEN) && hasSameOrderedTypeElements; } return hasSameOrderedTypeElements; } @Override public Boolean visit(BTupleType target, BType source) { if (source.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(source, target)) { return false; } BTupleType sourceT = (BTupleType) source; BType sourceRestType = sourceT.restType; BType targetRestType = target.restType; int sourceTupleCount = sourceT.tupleTypes.size(); int targetTupleCount = target.tupleTypes.size(); int len = Math.min(sourceTupleCount, targetTupleCount); for (int i = 0; i < len; i++) { if (!isSameOrderedType(sourceT.getTupleTypes().get(i), target.tupleTypes.get(i), this.unresolvedTypes)) { return false; } } if (sourceTupleCount == targetTupleCount) { if (sourceRestType == null || targetRestType == null) { return true; } return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes); } if (sourceTupleCount > targetTupleCount) { return checkSameOrderedTypeInTuples(sourceT, sourceTupleCount, targetTupleCount, sourceRestType, targetRestType); } return checkSameOrderedTypeInTuples(target, targetTupleCount, sourceTupleCount, targetRestType, sourceRestType); } private boolean checkSameOrderedTypeInTuples(BTupleType source, int sourceTupleCount, int targetTupleCount, BType sourceRestType, BType targetRestType) { if (targetRestType == null) { return true; } for (int i = targetTupleCount; i < sourceTupleCount; i++) { if (!isSameOrderedType(source.getTupleTypes().get(i), targetRestType, this.unresolvedTypes)) { return false; } } if (sourceRestType == null) { return true; } return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes); } @Override public Boolean visit(BUnionType target, BType source) { if (source.tag != TypeTags.UNION || !hasSameReadonlyFlag(source, target)) { return checkUnionHasSameType(target.getMemberTypes(), source); } BUnionType sUnionType = (BUnionType) source; LinkedHashSet<BType> sourceTypes = sUnionType.getMemberTypes(); LinkedHashSet<BType> targetTypes = target.getMemberTypes(); if (checkUnionHasAllFiniteOrNilMembers(sourceTypes) && checkUnionHasAllFiniteOrNilMembers(targetTypes)) { if (sourceTypes.contains(symTable.nilType) != targetTypes.contains(symTable.nilType)) { return false; } return checkValueSpaceHasSameType(((BFiniteType) target.getMemberTypes().iterator().next()), sUnionType.getMemberTypes().iterator().next()); } if (sUnionType.getMemberTypes().size() != target.getMemberTypes().size()) { return false; } return checkSameOrderedTypesInUnionMembers(sourceTypes, targetTypes); } private boolean checkSameOrderedTypesInUnionMembers(LinkedHashSet<BType> sourceTypes, LinkedHashSet<BType> targetTypes) { for (BType sourceT : sourceTypes) { boolean foundSameOrderedType = false; for (BType targetT : targetTypes) { if (isSameOrderedType(targetT, sourceT, this.unresolvedTypes)) { foundSameOrderedType = true; break; } } if (!foundSameOrderedType) { return false; } } return true; } @Override public Boolean visit(BFiniteType t, BType s) { return checkValueSpaceHasSameType(t, s); } private boolean hasSameReadonlyFlag(BType source, BType target) { return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY); } @Override public Boolean visit(BBuiltInRefType t, BType s) { return false; } @Override public Boolean visit(BAnyType t, BType s) { return false; } @Override public Boolean visit(BAnydataType t, BType s) { return false; } @Override public Boolean visit(BMapType t, BType s) { return false; } @Override public Boolean visit(BFutureType t, BType s) { return false; } @Override public Boolean visit(BXMLType t, BType s) { return false; } @Override public Boolean visit(BJSONType t, BType s) { return false; } @Override public Boolean visit(BObjectType t, BType s) { return false; } @Override public Boolean visit(BRecordType t, BType s) { return false; } @Override public Boolean visit(BStreamType t, BType s) { return false; } @Override public Boolean visit(BTableType t, BType s) { return false; } @Override public Boolean visit(BInvokableType t, BType s) { return false; } @Override public Boolean visit(BIntersectionType tIntersectionType, BType s) { return false; } @Override public Boolean visit(BErrorType t, BType s) { return false; } @Override public Boolean visit(BTypedescType t, BType s) { return false; } @Override public Boolean visit(BParameterizedType t, BType s) { return false; } }
class BOrderedTypeVisitor implements BTypeVisitor<BType, Boolean> { Set<TypePair> unresolvedTypes; BOrderedTypeVisitor(Set<TypePair> unresolvedTypes) { this.unresolvedTypes = unresolvedTypes; } @Override public Boolean visit(BType target, BType source) { if (isSimpleBasicType(source.tag) && isSimpleBasicType(target.tag)) { return (source == target) || isIntOrStringType(source.tag, target.tag); } if (source.tag == TypeTags.FINITE) { return checkValueSpaceHasSameType(((BFiniteType) source), target); } return isSameOrderedType(target, source, this.unresolvedTypes); } @Override public Boolean visit(BArrayType target, BType source) { if (source.tag != TypeTags.ARRAY) { return false; } BArrayType rhsArrayType = (BArrayType) source; boolean hasSameOrderedTypeElements = isSameOrderedType(target.eType, rhsArrayType.eType, unresolvedTypes); if (target.state == BArrayState.OPEN) { return (rhsArrayType.state == BArrayState.OPEN) && hasSameOrderedTypeElements; } return hasSameOrderedTypeElements; } @Override public Boolean visit(BTupleType target, BType source) { if (source.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(source, target)) { return false; } BTupleType sourceT = (BTupleType) source; BType sourceRestType = sourceT.restType; BType targetRestType = target.restType; int sourceTupleCount = sourceT.tupleTypes.size(); int targetTupleCount = target.tupleTypes.size(); int len = Math.min(sourceTupleCount, targetTupleCount); for (int i = 0; i < len; i++) { if (!isSameOrderedType(sourceT.getTupleTypes().get(i), target.tupleTypes.get(i), this.unresolvedTypes)) { return false; } } if (sourceTupleCount == targetTupleCount) { if (sourceRestType == null || targetRestType == null) { return true; } return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes); } if (sourceTupleCount > targetTupleCount) { return checkSameOrderedTypeInTuples(sourceT, sourceTupleCount, targetTupleCount, sourceRestType, targetRestType); } return checkSameOrderedTypeInTuples(target, targetTupleCount, sourceTupleCount, targetRestType, sourceRestType); } private boolean checkSameOrderedTypeInTuples(BTupleType source, int sourceTupleCount, int targetTupleCount, BType sourceRestType, BType targetRestType) { if (targetRestType == null) { return true; } for (int i = targetTupleCount; i < sourceTupleCount; i++) { if (!isSameOrderedType(source.getTupleTypes().get(i), targetRestType, this.unresolvedTypes)) { return false; } } if (sourceRestType == null) { return true; } return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes); } @Override public Boolean visit(BUnionType target, BType source) { if (source.tag != TypeTags.UNION || !hasSameReadonlyFlag(source, target)) { return checkUnionHasSameType(target.getMemberTypes(), source); } BUnionType sUnionType = (BUnionType) source; LinkedHashSet<BType> sourceTypes = sUnionType.getMemberTypes(); LinkedHashSet<BType> targetTypes = target.getMemberTypes(); if (checkUnionHasAllFiniteOrNilMembers(sourceTypes) && checkUnionHasAllFiniteOrNilMembers(targetTypes)) { if (sourceTypes.contains(symTable.nilType) != targetTypes.contains(symTable.nilType)) { return false; } return checkValueSpaceHasSameType(((BFiniteType) target.getMemberTypes().iterator().next()), sUnionType.getMemberTypes().iterator().next()); } if (sUnionType.getMemberTypes().size() != target.getMemberTypes().size()) { return false; } return checkSameOrderedTypesInUnionMembers(sourceTypes, targetTypes); } private boolean checkSameOrderedTypesInUnionMembers(LinkedHashSet<BType> sourceTypes, LinkedHashSet<BType> targetTypes) { for (BType sourceT : sourceTypes) { boolean foundSameOrderedType = false; for (BType targetT : targetTypes) { if (isSameOrderedType(targetT, sourceT, this.unresolvedTypes)) { foundSameOrderedType = true; break; } } if (!foundSameOrderedType) { return false; } } return true; } @Override public Boolean visit(BFiniteType t, BType s) { return checkValueSpaceHasSameType(t, s); } private boolean hasSameReadonlyFlag(BType source, BType target) { return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY); } @Override public Boolean visit(BBuiltInRefType t, BType s) { return false; } @Override public Boolean visit(BAnyType t, BType s) { return false; } @Override public Boolean visit(BAnydataType t, BType s) { return false; } @Override public Boolean visit(BMapType t, BType s) { return false; } @Override public Boolean visit(BFutureType t, BType s) { return false; } @Override public Boolean visit(BXMLType t, BType s) { return false; } @Override public Boolean visit(BJSONType t, BType s) { return false; } @Override public Boolean visit(BObjectType t, BType s) { return false; } @Override public Boolean visit(BRecordType t, BType s) { return false; } @Override public Boolean visit(BStreamType t, BType s) { return false; } @Override public Boolean visit(BTableType t, BType s) { return false; } @Override public Boolean visit(BInvokableType t, BType s) { return false; } @Override public Boolean visit(BIntersectionType tIntersectionType, BType s) { return false; } @Override public Boolean visit(BErrorType t, BType s) { return false; } @Override public Boolean visit(BTypedescType t, BType s) { return false; } @Override public Boolean visit(BParameterizedType t, BType s) { return false; } }
```suggestion } return Relation.NO_RELATION; ```
private Relation getSemTypeRelation(SemType t1, SemType t2) { boolean s1 = Core.isSubtype(ctx, t1, t2); boolean s2 = Core.isSubtype(ctx, t2, t1); if (s1 && s2) { return Relation.EQUAL; } else if (s1) { return Relation.SUBTYPE; } else if (s2) { throw new IllegalStateException("'>' relation found which can be converted to a '<' relation"); } else { return Relation.NO_RELATION; } }
return Relation.NO_RELATION;
private Relation getSemTypeRelation(SemType t1, SemType t2) { boolean s1 = Core.isSubtype(ctx, t1, t2); boolean s2 = Core.isSubtype(ctx, t2, t1); if (s1 && s2) { return Relation.EQUAL; } else if (s1) { return Relation.SUBTYPE; } else if (s2) { throw new IllegalStateException("'>' relation found which can be converted to a '<' relation"); } else { return Relation.NO_RELATION; } }
class CellTypeTest { Context ctx; public enum Relation { EQUAL("="), SUBTYPE("<"), NO_RELATION("<>"); final String value; Relation(String value) { this.value = value; } } @BeforeClass public void beforeClass() { ctx = Context.from(new Env()); } private CellSemType cell(SemType ty, CellAtomicType.CellMutability mut) { return CellSubtype.cellContaining(ctx.env, ty, mut); } private SemType tuple(SemType ty) { return SemTypes.tuple(ctx.env, new SemType[]{ty}); } private void assertSemTypeRelation(SemType t1, SemType t2, Relation relation) { Relation actual = getSemTypeRelation(t1, t2); Assert.assertEquals(actual, relation); } @Test(description = "Test T and cell(T) having no relation", dataProvider = "typeCellDisparityDataProvider") public void testTypeCellDisparity(SemType t1, SemType t2, Relation relation) { assertSemTypeRelation(t1, t2, relation); } @DataProvider(name = "typeCellDisparityDataProvider") public Object[][] createTypeCellDisparityTestData() { return new Object[][]{ {PredefinedType.INT, cell(PredefinedType.INT, CELL_MUT_NONE), Relation.NO_RELATION}, {PredefinedType.INT, cell(PredefinedType.INT, CELL_MUT_LIMITED), Relation.NO_RELATION}, {PredefinedType.INT, cell(PredefinedType.INT, CELL_MUT_UNLIMITED), Relation.NO_RELATION}, }; } @Test(description = "Test basic cell subtyping", dataProvider = "basicCellSubtypingDataProvider") public void testBasicCellSubtyping(SemType t1, SemType t2, Relation[] relations) { assert relations.length == 3; Relation[] actual = new Relation[3]; CellAtomicType.CellMutability[] values = CellAtomicType.CellMutability.values(); for (int i = 0; i < values.length; i++) { CellAtomicType.CellMutability mut = values[i]; CellSemType c1 = cell(t1, mut); CellSemType c2 = cell(t2, mut); actual[i] = getSemTypeRelation(c1, c2); } Assert.assertEquals(actual, relations); } @DataProvider(name = "basicCellSubtypingDataProvider") public Object[][] createBasicCellSubtypingTestData() { return new Object[][]{ { PredefinedType.INT, PredefinedType.INT, new Relation[]{ Relation.EQUAL, Relation.EQUAL, Relation.EQUAL } }, { PredefinedType.BOOLEAN, PredefinedType.BOOLEAN, new Relation[]{ Relation.EQUAL, Relation.EQUAL, Relation.EQUAL } }, { PredefinedType.BYTE, PredefinedType.INT, new Relation[]{ Relation.SUBTYPE, Relation.SUBTYPE, Relation.SUBTYPE } }, { PredefinedType.BOOLEAN, PredefinedType.INT, new Relation[]{ Relation.NO_RELATION, Relation.NO_RELATION, Relation.NO_RELATION } }, { PredefinedType.BOOLEAN, Core.union(PredefinedType.INT, PredefinedType.BOOLEAN), new Relation[]{ Relation.SUBTYPE, Relation.SUBTYPE, Relation.SUBTYPE } } }; } @Test(dataProvider = "cellSubtypeDataProvider1") public void testCellSubtyping1(SemType t1, SemType t2, Relation relation) { assertSemTypeRelation(t1, t2, relation); } @DataProvider(name = "cellSubtypeDataProvider1") public Object[][] createCellSubtypeData1() { return new Object[][]{ { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_NONE) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN), CELL_MUT_NONE), Relation.EQUAL }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_LIMITED) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN), CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_UNLIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN), CELL_MUT_UNLIMITED), Relation.EQUAL }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_NONE), cell(PredefinedType.STRING, CELL_MUT_NONE) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_NONE), Relation.EQUAL }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_LIMITED), cell(PredefinedType.STRING, CELL_MUT_LIMITED) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_UNLIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED), cell(PredefinedType.STRING, CELL_MUT_UNLIMITED) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_UNLIMITED), Relation.EQUAL }, { SemTypes.union( cell(tuple(PredefinedType.INT), CELL_MUT_NONE), cell(tuple(PredefinedType.BOOLEAN), CELL_MUT_NONE) ), cell(tuple(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN)), CELL_MUT_NONE), Relation.EQUAL }, { SemTypes.union( cell(tuple(PredefinedType.INT), CELL_MUT_LIMITED), cell(tuple(PredefinedType.BOOLEAN), CELL_MUT_LIMITED) ), cell(tuple(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN)), CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.union( cell(tuple(PredefinedType.INT), CELL_MUT_UNLIMITED), cell(tuple(PredefinedType.BOOLEAN), CELL_MUT_UNLIMITED) ), cell(tuple(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN)), CELL_MUT_UNLIMITED), Relation.EQUAL }, }; } @Test(dataProvider = "cellSubtypeDataProvider2") public void testCellSubtyping2(SemType t1, SemType t2, Relation relation) { assertSemTypeRelation(t1, t2, relation); } @DataProvider(name = "cellSubtypeDataProvider2") public Object[][] createCellSubtypeData2() { return new Object[][]{ { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED), cell(PredefinedType.STRING, CELL_MUT_LIMITED) ), cell( SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_UNLIMITED ), Relation.SUBTYPE }, { cell( SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_NONE ), SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED), cell(PredefinedType.STRING, CELL_MUT_LIMITED) ), Relation.SUBTYPE }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED), cell(PredefinedType.STRING, CELL_MUT_LIMITED) ), cell( SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_LIMITED ), Relation.NO_RELATION }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.INT, CELL_MUT_UNLIMITED) ), cell(PredefinedType.INT, CELL_MUT_UNLIMITED), Relation.EQUAL }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.INT, CELL_MUT_UNLIMITED) ), cell(PredefinedType.INT, CELL_MUT_UNLIMITED), Relation.SUBTYPE }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.INT, CELL_MUT_UNLIMITED) ), cell(PredefinedType.INT, CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.INT, CELL_MUT_UNLIMITED) ), cell(PredefinedType.INT, CELL_MUT_NONE), Relation.EQUAL }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.BYTE, CELL_MUT_LIMITED) ), cell(PredefinedType.BYTE, CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), SemTypes.union( cell(PredefinedType.BYTE, CELL_MUT_LIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_LIMITED) ) ), cell(PredefinedType.BYTE, CELL_MUT_NONE), Relation.EQUAL }, }; } @AfterClass public void afterClass() { ctx = null; } }
class CellTypeTest { Context ctx; public enum Relation { EQUAL("="), SUBTYPE("<"), NO_RELATION("<>"); final String value; Relation(String value) { this.value = value; } } @BeforeClass public void beforeClass() { ctx = Context.from(new Env()); } private CellSemType cell(SemType ty, CellAtomicType.CellMutability mut) { return CellSubtype.cellContaining(ctx.env, ty, mut); } private SemType tuple(SemType ty) { return SemTypes.tuple(ctx.env, new SemType[]{ty}); } private void assertSemTypeRelation(SemType t1, SemType t2, Relation relation) { Relation actual = getSemTypeRelation(t1, t2); Assert.assertEquals(actual, relation); } @Test(description = "Test T and cell(T) having no relation", dataProvider = "typeCellDisparityDataProvider") public void testTypeCellDisparity(SemType t1, SemType t2, Relation relation) { assertSemTypeRelation(t1, t2, relation); } @DataProvider(name = "typeCellDisparityDataProvider") public Object[][] createTypeCellDisparityTestData() { return new Object[][]{ {PredefinedType.INT, cell(PredefinedType.INT, CELL_MUT_NONE), Relation.NO_RELATION}, {PredefinedType.INT, cell(PredefinedType.INT, CELL_MUT_LIMITED), Relation.NO_RELATION}, {PredefinedType.INT, cell(PredefinedType.INT, CELL_MUT_UNLIMITED), Relation.NO_RELATION}, }; } @Test(description = "Test basic cell subtyping", dataProvider = "basicCellSubtypingDataProvider") public void testBasicCellSubtyping(SemType t1, SemType t2, Relation[] relations) { assert relations.length == 3; Relation[] actual = new Relation[3]; CellAtomicType.CellMutability[] values = CellAtomicType.CellMutability.values(); for (int i = 0; i < values.length; i++) { CellAtomicType.CellMutability mut = values[i]; CellSemType c1 = cell(t1, mut); CellSemType c2 = cell(t2, mut); actual[i] = getSemTypeRelation(c1, c2); } Assert.assertEquals(actual, relations); } @DataProvider(name = "basicCellSubtypingDataProvider") public Object[][] createBasicCellSubtypingTestData() { return new Object[][]{ { PredefinedType.INT, PredefinedType.INT, new Relation[]{ Relation.EQUAL, Relation.EQUAL, Relation.EQUAL } }, { PredefinedType.BOOLEAN, PredefinedType.BOOLEAN, new Relation[]{ Relation.EQUAL, Relation.EQUAL, Relation.EQUAL } }, { PredefinedType.BYTE, PredefinedType.INT, new Relation[]{ Relation.SUBTYPE, Relation.SUBTYPE, Relation.SUBTYPE } }, { PredefinedType.BOOLEAN, PredefinedType.INT, new Relation[]{ Relation.NO_RELATION, Relation.NO_RELATION, Relation.NO_RELATION } }, { PredefinedType.BOOLEAN, Core.union(PredefinedType.INT, PredefinedType.BOOLEAN), new Relation[]{ Relation.SUBTYPE, Relation.SUBTYPE, Relation.SUBTYPE } } }; } @Test(dataProvider = "cellSubtypeDataProvider1") public void testCellSubtyping1(SemType t1, SemType t2, Relation relation) { assertSemTypeRelation(t1, t2, relation); } @DataProvider(name = "cellSubtypeDataProvider1") public Object[][] createCellSubtypeData1() { return new Object[][]{ { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_NONE) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN), CELL_MUT_NONE), Relation.EQUAL }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_LIMITED) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN), CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_UNLIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN), CELL_MUT_UNLIMITED), Relation.EQUAL }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_NONE), cell(PredefinedType.STRING, CELL_MUT_NONE) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_NONE), Relation.EQUAL }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_LIMITED), cell(PredefinedType.STRING, CELL_MUT_LIMITED) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_UNLIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED), cell(PredefinedType.STRING, CELL_MUT_UNLIMITED) ), cell(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_UNLIMITED), Relation.EQUAL }, { SemTypes.union( cell(tuple(PredefinedType.INT), CELL_MUT_NONE), cell(tuple(PredefinedType.BOOLEAN), CELL_MUT_NONE) ), cell(tuple(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN)), CELL_MUT_NONE), Relation.EQUAL }, { SemTypes.union( cell(tuple(PredefinedType.INT), CELL_MUT_LIMITED), cell(tuple(PredefinedType.BOOLEAN), CELL_MUT_LIMITED) ), cell(tuple(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN)), CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.union( cell(tuple(PredefinedType.INT), CELL_MUT_UNLIMITED), cell(tuple(PredefinedType.BOOLEAN), CELL_MUT_UNLIMITED) ), cell(tuple(SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN)), CELL_MUT_UNLIMITED), Relation.EQUAL }, }; } @Test(dataProvider = "cellSubtypeDataProvider2") public void testCellSubtyping2(SemType t1, SemType t2, Relation relation) { assertSemTypeRelation(t1, t2, relation); } @DataProvider(name = "cellSubtypeDataProvider2") public Object[][] createCellSubtypeData2() { return new Object[][]{ { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED), cell(PredefinedType.STRING, CELL_MUT_LIMITED) ), cell( SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_UNLIMITED ), Relation.SUBTYPE }, { cell( SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_NONE ), SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED), cell(PredefinedType.STRING, CELL_MUT_LIMITED) ), Relation.SUBTYPE }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.BOOLEAN, CELL_MUT_UNLIMITED), cell(PredefinedType.STRING, CELL_MUT_LIMITED) ), cell( SemTypes.union(PredefinedType.INT, PredefinedType.BOOLEAN, PredefinedType.STRING), CELL_MUT_LIMITED ), Relation.NO_RELATION }, { SemTypes.union( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.INT, CELL_MUT_UNLIMITED) ), cell(PredefinedType.INT, CELL_MUT_UNLIMITED), Relation.EQUAL }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.INT, CELL_MUT_UNLIMITED) ), cell(PredefinedType.INT, CELL_MUT_UNLIMITED), Relation.SUBTYPE }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.INT, CELL_MUT_UNLIMITED) ), cell(PredefinedType.INT, CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.INT, CELL_MUT_UNLIMITED) ), cell(PredefinedType.INT, CELL_MUT_NONE), Relation.EQUAL }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), cell(PredefinedType.INT, CELL_MUT_LIMITED), cell(PredefinedType.BYTE, CELL_MUT_LIMITED) ), cell(PredefinedType.BYTE, CELL_MUT_LIMITED), Relation.SUBTYPE }, { SemTypes.intersect( cell(PredefinedType.INT, CELL_MUT_NONE), SemTypes.union( cell(PredefinedType.BYTE, CELL_MUT_LIMITED), cell(PredefinedType.BOOLEAN, CELL_MUT_LIMITED) ) ), cell(PredefinedType.BYTE, CELL_MUT_NONE), Relation.EQUAL }, }; } @AfterClass public void afterClass() { ctx = null; } }
Can't we return this as `return new ArrayValueImpl(cacheMap.keySet().toArray(new String[0]));` ?
public static BArray externKeys(ObjectValue cache) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); String[] keySets = cacheMap.keySet().toArray(new String[0]); HandleValue[] handleValues = new HandleValue[keySets.length]; for (int i = 0; i < keySets.length; i++) { handleValues[i] = new HandleValue(keySets[i]); } return BValueCreator.createArrayValue(handleValues, new BArrayType(BTypes.typeHandle)); }
String[] keySets = cacheMap.keySet().toArray(new String[0]);
public static BArray externKeys(ObjectValue cache) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); String[] keySets = cacheMap.keySet().toArray(new String[0]); HandleValue[] handleValues = new HandleValue[keySets.length]; for (int i = 0; i < keySets.length; i++) { handleValues[i] = new HandleValue(keySets[i]); } return BValueCreator.createArrayValue(handleValues, new BArrayType(BTypes.typeHandle)); }
class Cache { private static ConcurrentLinkedHashMap<String, BMap<String, Object>> cacheMap; private static final String MAX_CAPACITY = "capacity"; private static final String EVICTION_FACTOR = "evictionFactor"; private static final String EXPIRE_TIME = "expTime"; private static final String CACHE = "CACHE"; private static final String ID = "ID"; private Cache() {} public static void externInit(ObjectValue cache) { int capacity = (int) cache.getIntValue(MAX_CAPACITY); cacheMap = new ConcurrentLinkedHashMap<>(capacity); cache.addNativeData(CACHE, cacheMap); } @SuppressWarnings("unchecked") public static void externPut(ObjectValue cache, String key, BMap<String, Object> value) { int capacity = (int) cache.getIntValue(MAX_CAPACITY); float evictionFactor = (float) cache.getFloatValue(EVICTION_FACTOR); cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); if (cacheMap.size() >= capacity) { int evictionKeysCount = (int) Math.ceil(capacity * evictionFactor); cacheMap.setCapacity((capacity - evictionKeysCount)); cacheMap.setCapacity(capacity); } cacheMap.put(key, value); } @SuppressWarnings("unchecked") public static Object externGet(ObjectValue cache, String key, Long currentTime) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); BMap<String, Object> value = cacheMap.get(key); Long time = (Long) value.get(EXPIRE_TIME); if (time != -1 && time <= currentTime) { cacheMap.remove(key); return null; } return value; } @SuppressWarnings("unchecked") public static void externRemove(ObjectValue cache, String key) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); cacheMap.remove(key); } @SuppressWarnings("unchecked") public static void externRemoveAll(ObjectValue cache) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); cacheMap.clear(); } @SuppressWarnings("unchecked") public static boolean externHasKey(ObjectValue cache, String key) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); return cacheMap.containsKey(key); } @SuppressWarnings("unchecked") @SuppressWarnings("unchecked") public static int externSize(ObjectValue cache) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); return cacheMap.size(); } @SuppressWarnings("unchecked") public static void externCleanUp(ObjectValue cache, Long currentTime) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); for (Map.Entry<String, BMap<String, Object>> entry : cacheMap.entrySet()) { BMap<String, Object> value = entry.getValue(); Long time = (Long) value.get(EXPIRE_TIME); if (time != -1 && time <= currentTime) { cacheMap.remove(entry.getKey()); } } } }
class Cache { private static ConcurrentLinkedHashMap<String, BMap<String, Object>> cacheMap; private static final String MAX_CAPACITY = "capacity"; private static final String EVICTION_FACTOR = "evictionFactor"; private static final String EXPIRE_TIME = "expTime"; private static final String CACHE = "CACHE"; private static final String ID = "ID"; private Cache() {} public static void externInit(ObjectValue cache) { int capacity = (int) cache.getIntValue(MAX_CAPACITY); cacheMap = new ConcurrentLinkedHashMap<>(capacity); cache.addNativeData(CACHE, cacheMap); } @SuppressWarnings("unchecked") public static void externPut(ObjectValue cache, String key, BMap<String, Object> value) { int capacity = (int) cache.getIntValue(MAX_CAPACITY); float evictionFactor = (float) cache.getFloatValue(EVICTION_FACTOR); cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); if (cacheMap.size() >= capacity) { int evictionKeysCount = (int) Math.ceil(capacity * evictionFactor); cacheMap.setCapacity((capacity - evictionKeysCount)); cacheMap.setCapacity(capacity); } cacheMap.put(key, value); } @SuppressWarnings("unchecked") public static Object externGet(ObjectValue cache, String key, Long currentTime) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); BMap<String, Object> value = cacheMap.get(key); Long time = (Long) value.get(EXPIRE_TIME); if (time != -1 && time <= currentTime) { cacheMap.remove(key); return null; } return value; } @SuppressWarnings("unchecked") public static void externRemove(ObjectValue cache, String key) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); cacheMap.remove(key); } @SuppressWarnings("unchecked") public static void externRemoveAll(ObjectValue cache) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); cacheMap.clear(); } @SuppressWarnings("unchecked") public static boolean externHasKey(ObjectValue cache, String key) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); return cacheMap.containsKey(key); } @SuppressWarnings("unchecked") @SuppressWarnings("unchecked") public static int externSize(ObjectValue cache) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); return cacheMap.size(); } @SuppressWarnings("unchecked") public static void externCleanUp(ObjectValue cache, Long currentTime) { cacheMap = (ConcurrentLinkedHashMap<String, BMap<String, Object>>) cache.getNativeData(CACHE); for (Map.Entry<String, BMap<String, Object>> entry : cacheMap.entrySet()) { BMap<String, Object> value = entry.getValue(); Long time = (Long) value.get(EXPIRE_TIME); if (time != -1 && time <= currentTime) { cacheMap.remove(entry.getKey()); } } } }
Hm, the `renderTemplate()` method is called from the `handle()` above...
protected void renderTemplate(RoutingContext event, TemplateInstance template) { for (Map.Entry<String, Object> global : globalData.entrySet()) { template.data(global.getKey(), global.getValue()); } template.renderAsync().handle(new BiFunction<String, Throwable, Object>() { @Override public Object apply(String s, Throwable throwable) { if (throwable != null) { event.fail(throwable); } else { event.response().putHeader(HttpHeaderNames.CONTENT_TYPE, HTML_CONTENT_TYPE).end(s); } return null; } }); }
event.response().putHeader(HttpHeaderNames.CONTENT_TYPE, HTML_CONTENT_TYPE).end(s);
protected void renderTemplate(RoutingContext event, TemplateInstance template) { for (Map.Entry<String, Object> global : globalData.entrySet()) { template.data(global.getKey(), global.getValue()); } template.renderAsync().handle(new BiFunction<String, Throwable, Object>() { @Override public Object apply(String s, Throwable throwable) { if (throwable != null) { event.fail(throwable); } else { event.response().end(s); } return null; } }); }
class DevConsole implements Handler<RoutingContext> { private static final Logger log = Logger.getLogger(DevConsole.class); private static final String HTML_CONTENT_TYPE = "text/html; charset=UTF-8"; static final ThreadLocal<String> currentExtension = new ThreadLocal<>(); private static final Comparator<Map<String, Object>> EXTENSION_COMPARATOR = Comparator .comparing(m -> ((String) m.get("name"))); final Engine engine; final Map<String, Map<String, Object>> extensions = new HashMap<>(); final Map<String, Object> globalData = new HashMap<>(); final Config config = ConfigProvider.getConfig(); final String devRootAppend; DevConsole(Engine engine, String httpRootPath, String frameworkRootPath) { this.engine = engine; this.globalData.put("httpRootPath", httpRootPath); this.globalData.put("frameworkRootPath", frameworkRootPath); this.devRootAppend = frameworkRootPath + "dev"; this.globalData.put("devRootAppend", devRootAppend); this.globalData.put("quarkusVersion", Version.getVersion()); this.globalData.put("applicationName", config.getOptionalValue("quarkus.application.name", String.class).orElse("")); this.globalData.put("applicationVersion", config.getOptionalValue("quarkus.application.version", String.class).orElse("")); } private void initLazyState() { if (extensions.isEmpty()) { synchronized (extensions) { if (extensions.isEmpty()) { try { final Yaml yaml = new Yaml(new SafeConstructor()); ClassPathUtils.consumeAsPaths("/META-INF/quarkus-extension.yaml", p -> { try { final String desc; try (Scanner scanner = new Scanner(Files.newBufferedReader(p, StandardCharsets.UTF_8))) { scanner.useDelimiter("\\A"); desc = scanner.hasNext() ? scanner.next() : null; } if (desc == null) { return; } final Map<String, Object> metadata = yaml.load(desc); extensions.put(getExtensionNamespace(metadata), metadata); } catch (IOException | RuntimeException e) { log.error("Failed to process extension descriptor " + p.toUri(), e); } }); this.globalData.put("configKeyMap", getConfigKeyMap()); } catch (IOException x) { throw new RuntimeException(x); } } } } } @Override public void handle(RoutingContext ctx) { initLazyState(); if (ctx.normalizedPath().length() == devRootAppend.length()) { ctx.response().setStatusCode(302); ctx.response().headers().set(HttpHeaders.LOCATION, devRootAppend + "/"); ctx.response().end(); return; } String path = ctx.normalizedPath().substring(ctx.mountPoint().length() + 1); if (path.isEmpty() || path.equals("/")) { sendMainPage(ctx); } else { int nsIndex = path.indexOf("/"); if (nsIndex == -1) { ctx.response().setStatusCode(404).end(); return; } String namespace = path.substring(0, nsIndex); currentExtension.set(namespace); Template devTemplate = engine.getTemplate(path); if (devTemplate != null) { String extName = getExtensionName(namespace); ctx.response().setStatusCode(200).headers().set(HttpHeaderNames.CONTENT_TYPE, HTML_CONTENT_TYPE); TemplateInstance devTemplateInstance = devTemplate .data("currentExtensionName", extName) .data("query-string", ctx.request().query()) .data("flash", FlashScopeUtil.getFlash(ctx)) .data("currentRequest", ctx.request()); renderTemplate(ctx, devTemplateInstance); } else { ctx.next(); } } } private Map<String, List<String>> getConfigKeyMap() { Map<String, List<String>> ckm = new TreeMap<>(); Collection<Map<String, Object>> values = this.extensions.values(); for (Map<String, Object> extension : values) { if (extension.containsKey("metadata")) { Map<String, Object> metadata = (Map<String, Object>) extension.get("metadata"); if (metadata.containsKey("config")) { List<String> configKeys = (List<String>) metadata.get("config"); String name = (String) extension.get("name"); ckm.put(name, configKeys); } } } return ckm; } private String getExtensionName(String namespace) { Map<String, Object> map = extensions.get(namespace); if (map == null) return null; return (String) map.get("name"); } private void sendMainPage(RoutingContext event) { Template devTemplate = engine.getTemplate("index"); List<Map<String, Object>> actionableExtensions = new ArrayList<>(); List<Map<String, Object>> nonActionableExtensions = new ArrayList<>(); for (Entry<String, Map<String, Object>> entry : this.extensions.entrySet()) { final String namespace = entry.getKey(); final Map<String, Object> loaded = entry.getValue(); @SuppressWarnings("unchecked") final Map<String, Object> metadata = (Map<String, Object>) loaded.get("metadata"); currentExtension.set(namespace); Template simpleTemplate = engine.getTemplate(namespace + "/embedded.html"); boolean hasConsoleEntry = simpleTemplate != null; boolean hasGuide = metadata.containsKey("guide"); boolean hasConfig = metadata.containsKey("config"); boolean isUnlisted = metadata.containsKey("unlisted") && (metadata.get("unlisted").equals(true) || metadata.get("unlisted").equals("true")); loaded.put("hasConsoleEntry", hasConsoleEntry); loaded.put("hasGuide", hasGuide); if (!isUnlisted || hasConsoleEntry || hasGuide || hasConfig) { if (hasConsoleEntry) { Map<String, Object> data = new HashMap<>(); data.putAll(globalData); data.put("urlbase", namespace); String result = simpleTemplate.render(data); loaded.put("_dev", result); actionableExtensions.add(loaded); } else { nonActionableExtensions.add(loaded); } } } actionableExtensions.sort(EXTENSION_COMPARATOR); nonActionableExtensions.sort(EXTENSION_COMPARATOR); TemplateInstance instance = devTemplate.data("actionableExtensions", actionableExtensions) .data("nonActionableExtensions", nonActionableExtensions).data("flash", FlashScopeUtil.getFlash(event)); renderTemplate(event, instance); } private static String getExtensionNamespace(Map<String, Object> metadata) { final String groupId; final String artifactId; final String artifact = (String) metadata.get("artifact"); if (artifact == null) { groupId = (String) metadata.get("group-id"); artifactId = (String) metadata.get("artifact-id"); if (artifactId == null || groupId == null) { throw new RuntimeException( "Failed to locate 'artifact' or 'group-id' and 'artifact-id' among metadata keys " + metadata.keySet()); } } else { final GACTV coords = GACTV.fromString(artifact); groupId = coords.getGroupId(); artifactId = coords.getArtifactId(); } return groupId + "." + artifactId; } }
class DevConsole implements Handler<RoutingContext> { private static final Logger log = Logger.getLogger(DevConsole.class); private static final String HTML_CONTENT_TYPE = "text/html; charset=UTF-8"; static final ThreadLocal<String> currentExtension = new ThreadLocal<>(); private static final Comparator<Map<String, Object>> EXTENSION_COMPARATOR = Comparator .comparing(m -> ((String) m.get("name"))); final Engine engine; final Map<String, Map<String, Object>> extensions = new HashMap<>(); final Map<String, Object> globalData = new HashMap<>(); final Config config = ConfigProvider.getConfig(); final String devRootAppend; DevConsole(Engine engine, String httpRootPath, String frameworkRootPath) { this.engine = engine; this.globalData.put("httpRootPath", httpRootPath); this.globalData.put("frameworkRootPath", frameworkRootPath); this.devRootAppend = frameworkRootPath + "dev"; this.globalData.put("devRootAppend", devRootAppend); this.globalData.put("quarkusVersion", Version.getVersion()); this.globalData.put("applicationName", config.getOptionalValue("quarkus.application.name", String.class).orElse("")); this.globalData.put("applicationVersion", config.getOptionalValue("quarkus.application.version", String.class).orElse("")); } private void initLazyState() { if (extensions.isEmpty()) { synchronized (extensions) { if (extensions.isEmpty()) { try { final Yaml yaml = new Yaml(new SafeConstructor()); ClassPathUtils.consumeAsPaths("/META-INF/quarkus-extension.yaml", p -> { try { final String desc; try (Scanner scanner = new Scanner(Files.newBufferedReader(p, StandardCharsets.UTF_8))) { scanner.useDelimiter("\\A"); desc = scanner.hasNext() ? scanner.next() : null; } if (desc == null) { return; } final Map<String, Object> metadata = yaml.load(desc); extensions.put(getExtensionNamespace(metadata), metadata); } catch (IOException | RuntimeException e) { log.error("Failed to process extension descriptor " + p.toUri(), e); } }); this.globalData.put("configKeyMap", getConfigKeyMap()); } catch (IOException x) { throw new RuntimeException(x); } } } } } @Override public void handle(RoutingContext ctx) { initLazyState(); if (ctx.normalizedPath().length() == devRootAppend.length()) { ctx.response().setStatusCode(302); ctx.response().headers().set(HttpHeaders.LOCATION, devRootAppend + "/"); ctx.response().end(); return; } String path = ctx.normalizedPath().substring(ctx.mountPoint().length() + 1); if (path.isEmpty() || path.equals("/")) { sendMainPage(ctx); } else { int nsIndex = path.indexOf("/"); if (nsIndex == -1) { ctx.response().setStatusCode(404).end(); return; } String namespace = path.substring(0, nsIndex); currentExtension.set(namespace); Template devTemplate = engine.getTemplate(path); if (devTemplate != null) { String extName = getExtensionName(namespace); ctx.response().setStatusCode(200).headers().set(HttpHeaderNames.CONTENT_TYPE, HTML_CONTENT_TYPE); TemplateInstance devTemplateInstance = devTemplate .data("currentExtensionName", extName) .data("query-string", ctx.request().query()) .data("flash", FlashScopeUtil.getFlash(ctx)) .data("currentRequest", ctx.request()); renderTemplate(ctx, devTemplateInstance); } else { ctx.next(); } } } private Map<String, List<String>> getConfigKeyMap() { Map<String, List<String>> ckm = new TreeMap<>(); Collection<Map<String, Object>> values = this.extensions.values(); for (Map<String, Object> extension : values) { if (extension.containsKey("metadata")) { Map<String, Object> metadata = (Map<String, Object>) extension.get("metadata"); if (metadata.containsKey("config")) { List<String> configKeys = (List<String>) metadata.get("config"); String name = (String) extension.get("name"); ckm.put(name, configKeys); } } } return ckm; } private String getExtensionName(String namespace) { Map<String, Object> map = extensions.get(namespace); if (map == null) return null; return (String) map.get("name"); } private void sendMainPage(RoutingContext event) { Template devTemplate = engine.getTemplate("index"); List<Map<String, Object>> actionableExtensions = new ArrayList<>(); List<Map<String, Object>> nonActionableExtensions = new ArrayList<>(); for (Entry<String, Map<String, Object>> entry : this.extensions.entrySet()) { final String namespace = entry.getKey(); final Map<String, Object> loaded = entry.getValue(); @SuppressWarnings("unchecked") final Map<String, Object> metadata = (Map<String, Object>) loaded.get("metadata"); currentExtension.set(namespace); Template simpleTemplate = engine.getTemplate(namespace + "/embedded.html"); boolean hasConsoleEntry = simpleTemplate != null; boolean hasGuide = metadata.containsKey("guide"); boolean hasConfig = metadata.containsKey("config"); boolean isUnlisted = metadata.containsKey("unlisted") && (metadata.get("unlisted").equals(true) || metadata.get("unlisted").equals("true")); loaded.put("hasConsoleEntry", hasConsoleEntry); loaded.put("hasGuide", hasGuide); if (!isUnlisted || hasConsoleEntry || hasGuide || hasConfig) { if (hasConsoleEntry) { Map<String, Object> data = new HashMap<>(); data.putAll(globalData); data.put("urlbase", namespace); String result = simpleTemplate.render(data); loaded.put("_dev", result); actionableExtensions.add(loaded); } else { nonActionableExtensions.add(loaded); } } } actionableExtensions.sort(EXTENSION_COMPARATOR); nonActionableExtensions.sort(EXTENSION_COMPARATOR); TemplateInstance instance = devTemplate.data("actionableExtensions", actionableExtensions) .data("nonActionableExtensions", nonActionableExtensions).data("flash", FlashScopeUtil.getFlash(event)); event.response().setStatusCode(200).headers().set(HttpHeaderNames.CONTENT_TYPE, HTML_CONTENT_TYPE); renderTemplate(event, instance); } private static String getExtensionNamespace(Map<String, Object> metadata) { final String groupId; final String artifactId; final String artifact = (String) metadata.get("artifact"); if (artifact == null) { groupId = (String) metadata.get("group-id"); artifactId = (String) metadata.get("artifact-id"); if (artifactId == null || groupId == null) { throw new RuntimeException( "Failed to locate 'artifact' or 'group-id' and 'artifact-id' among metadata keys " + metadata.keySet()); } } else { final GACTV coords = GACTV.fromString(artifact); groupId = coords.getGroupId(); artifactId = coords.getArtifactId(); } return groupId + "." + artifactId; } }
I'm not sure about this. My concern is that, the <2.6 test case might eventually not executed in most cases. Take `testDoesntCallGetContainersFromPreviousAttemptsMethodIfAbsent` as an example. If we make this test case only executed with Hadoop <2.2, then it's practically not executed. Currently we have Hadoop 2.8 for travis ci test, 2.4 & 2.8 for nightly test, and 2.4 (pom default) for local maven verify unless another version is intentionally specified.
public void testCallsGetSchedulerResourceTypesMethodIfPresent() { final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG, HasMethod.class); final Optional<Set<String>> schedulerResourceTypeNames = registerApplicationMasterResponseReflector.getSchedulerResourceTypeNamesUnsafe(new HasMethod()); assertTrue(schedulerResourceTypeNames.isPresent()); assertThat(schedulerResourceTypeNames.get(), containsInAnyOrder("MEMORY", "CPU")); }
new RegisterApplicationMasterResponseReflector(LOG, HasMethod.class);
public void testCallsGetSchedulerResourceTypesMethodIfPresent() { final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG, HasMethod.class); final Optional<Set<String>> schedulerResourceTypeNames = registerApplicationMasterResponseReflector.getSchedulerResourceTypeNamesUnsafe(new HasMethod()); assertTrue(schedulerResourceTypeNames.isPresent()); assertThat(schedulerResourceTypeNames.get(), containsInAnyOrder("MEMORY", "CPU")); }
class RegisterApplicationMasterResponseReflectorTest extends TestLogger { private static final Logger LOG = LoggerFactory.getLogger(RegisterApplicationMasterResponseReflectorTest.class); @Mock private Container mockContainer; @Before public void setUp() { MockitoAnnotations.initMocks(this); } @Test public void testCallsGetContainersFromPreviousAttemptsMethodIfPresent() { final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG, HasMethod.class); final List<Container> containersFromPreviousAttemptsUnsafe = registerApplicationMasterResponseReflector.getContainersFromPreviousAttemptsUnsafe(new HasMethod()); assertThat(containersFromPreviousAttemptsUnsafe, hasSize(1)); } @Test public void testDoesntCallGetContainersFromPreviousAttemptsMethodIfAbsent() { final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG, HasMethod.class); final List<Container> containersFromPreviousAttemptsUnsafe = registerApplicationMasterResponseReflector.getContainersFromPreviousAttemptsUnsafe(new Object()); assertThat(containersFromPreviousAttemptsUnsafe, empty()); } @Test public void testGetContainersFromPreviousAttemptsMethodReflectiveHadoop22() { assumeTrue( "Method getContainersFromPreviousAttempts is not supported by Hadoop: " + VersionInfo.getVersion(), isHadoopVersionGreaterThanOrEquals(2, 2)); final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG); final Method method = registerApplicationMasterResponseReflector.getGetContainersFromPreviousAttemptsMethod(); assertThat(method, notNullValue()); } @Test @Test public void testDoesntCallGetSchedulerResourceTypesMethodIfAbsent() { final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG, HasMethod.class); final Optional<Set<String>> schedulerResourceTypeNames = registerApplicationMasterResponseReflector.getSchedulerResourceTypeNamesUnsafe(new Object()); assertFalse(schedulerResourceTypeNames.isPresent()); } @Test public void testGetSchedulerResourceTypesMethodReflectiveHadoop26() { assumeTrue( "Method getSchedulerResourceTypes is not supported by Hadoop: " + VersionInfo.getVersion(), isHadoopVersionGreaterThanOrEquals(2, 6)); final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG); final Method method = registerApplicationMasterResponseReflector.getGetSchedulerResourceTypesMethod(); assertThat(method, notNullValue()); } /** * Class which has a method with the same signature as * {@link RegisterApplicationMasterResponse */ private class HasMethod { /** * Called from {@link */ @SuppressWarnings("unused") public List<Container> getContainersFromPreviousAttempts() { return Collections.singletonList(mockContainer); } /** * Called from {@link */ @SuppressWarnings("unused") public EnumSet<MockSchedulerResourceTypes> getSchedulerResourceTypes() { return EnumSet.allOf(MockSchedulerResourceTypes.class); } } @SuppressWarnings("unused") private enum MockSchedulerResourceTypes { MEMORY, CPU } }
class RegisterApplicationMasterResponseReflectorTest extends TestLogger { private static final Logger LOG = LoggerFactory.getLogger(RegisterApplicationMasterResponseReflectorTest.class); @Mock private Container mockContainer; @Before public void setUp() { MockitoAnnotations.initMocks(this); } @Test public void testCallsGetContainersFromPreviousAttemptsMethodIfPresent() { final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG, HasMethod.class); final List<Container> containersFromPreviousAttemptsUnsafe = registerApplicationMasterResponseReflector.getContainersFromPreviousAttemptsUnsafe(new HasMethod()); assertThat(containersFromPreviousAttemptsUnsafe, hasSize(1)); } @Test public void testDoesntCallGetContainersFromPreviousAttemptsMethodIfAbsent() { final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG, HasMethod.class); final List<Container> containersFromPreviousAttemptsUnsafe = registerApplicationMasterResponseReflector.getContainersFromPreviousAttemptsUnsafe(new Object()); assertThat(containersFromPreviousAttemptsUnsafe, empty()); } @Test public void testGetContainersFromPreviousAttemptsMethodReflectiveHadoop22() { assumeTrue( "Method getContainersFromPreviousAttempts is not supported by Hadoop: " + VersionInfo.getVersion(), isHadoopVersionGreaterThanOrEquals(2, 2)); final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG); assertTrue(registerApplicationMasterResponseReflector.getGetContainersFromPreviousAttemptsMethod().isPresent()); } @Test @Test public void testDoesntCallGetSchedulerResourceTypesMethodIfAbsent() { final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG, HasMethod.class); final Optional<Set<String>> schedulerResourceTypeNames = registerApplicationMasterResponseReflector.getSchedulerResourceTypeNamesUnsafe(new Object()); assertFalse(schedulerResourceTypeNames.isPresent()); } @Test public void testGetSchedulerResourceTypesMethodReflectiveHadoop26() { assumeTrue( "Method getSchedulerResourceTypes is not supported by Hadoop: " + VersionInfo.getVersion(), isHadoopVersionGreaterThanOrEquals(2, 6)); final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG); assertTrue(registerApplicationMasterResponseReflector.getGetSchedulerResourceTypesMethod().isPresent()); } /** * Class which has a method with the same signature as * {@link RegisterApplicationMasterResponse */ private class HasMethod { /** * Called from {@link */ @SuppressWarnings("unused") public List<Container> getContainersFromPreviousAttempts() { return Collections.singletonList(mockContainer); } /** * Called from {@link */ @SuppressWarnings("unused") public EnumSet<MockSchedulerResourceTypes> getSchedulerResourceTypes() { return EnumSet.allOf(MockSchedulerResourceTypes.class); } } @SuppressWarnings("unused") private enum MockSchedulerResourceTypes { MEMORY, CPU } }
yes, you are right. It is a bug, I will fix it.
public static boolean isAllEqualInnerJoin(OptExpression root) { Operator operator = root.getOp(); if (!(operator instanceof LogicalOperator)) { return false; } if (operator instanceof LogicalJoinOperator) { LogicalJoinOperator joinOperator = (LogicalJoinOperator) operator; boolean isEqualPredicate = isColumnEqualPredicate(joinOperator.getOnPredicate()); if (joinOperator.getJoinType() == JoinOperator.INNER_JOIN && isEqualPredicate) { return true; } return false; } for (OptExpression child : root.getInputs()) { if (!isAllEqualInnerJoin(child)) { return false; } } return true; }
if (joinOperator.getJoinType() == JoinOperator.INNER_JOIN && isEqualPredicate) {
public static boolean isAllEqualInnerJoin(OptExpression root) { Operator operator = root.getOp(); if (!(operator instanceof LogicalOperator)) { return false; } if (operator instanceof LogicalJoinOperator) { LogicalJoinOperator joinOperator = (LogicalJoinOperator) operator; boolean isEqualPredicate = isColumnEqualPredicate(joinOperator.getOnPredicate()); if (joinOperator.getJoinType() != JoinOperator.INNER_JOIN || !isEqualPredicate) { return false; } } for (OptExpression child : root.getInputs()) { if (!isAllEqualInnerJoin(child)) { return false; } } return true; }
class Utils { private static final Logger LOG = LogManager.getLogger(Utils.class); public static List<ScalarOperator> extractConjuncts(ScalarOperator root) { LinkedList<ScalarOperator> list = new LinkedList<>(); if (null == root) { return list; } extractConjunctsImpl(root, list); return list; } public static void extractConjunctsImpl(ScalarOperator root, List<ScalarOperator> result) { if (!OperatorType.COMPOUND.equals(root.getOpType())) { result.add(root); return; } CompoundPredicateOperator cpo = (CompoundPredicateOperator) root; if (!cpo.isAnd()) { result.add(root); return; } extractConjunctsImpl(cpo.getChild(0), result); extractConjunctsImpl(cpo.getChild(1), result); } public static List<ScalarOperator> extractDisjunctive(ScalarOperator root) { LinkedList<ScalarOperator> list = new LinkedList<>(); if (null == root) { return list; } extractDisjunctiveImpl(root, list); return list; } public static void extractDisjunctiveImpl(ScalarOperator root, List<ScalarOperator> result) { if (!OperatorType.COMPOUND.equals(root.getOpType())) { result.add(root); return; } CompoundPredicateOperator cpo = (CompoundPredicateOperator) root; if (!cpo.isOr()) { result.add(root); return; } extractDisjunctiveImpl(cpo.getChild(0), result); extractDisjunctiveImpl(cpo.getChild(1), result); } public static List<ColumnRefOperator> extractColumnRef(ScalarOperator root) { if (null == root || !root.isVariable()) { return new LinkedList<>(); } LinkedList<ColumnRefOperator> list = new LinkedList<>(); if (OperatorType.VARIABLE.equals(root.getOpType())) { list.add((ColumnRefOperator) root); return list; } for (ScalarOperator child : root.getChildren()) { list.addAll(extractColumnRef(child)); } return list; } public static int countColumnRef(ScalarOperator root) { return countColumnRef(root, 0); } private static int countColumnRef(ScalarOperator root, int count) { if (null == root || !root.isVariable()) { return 0; } if (OperatorType.VARIABLE.equals(root.getOpType())) { return 1; } for (ScalarOperator child : root.getChildren()) { count += countColumnRef(child, count); } return count; } public static void extractOlapScanOperator(GroupExpression groupExpression, List<LogicalOlapScanOperator> list) { extractOperator(groupExpression, list, p -> OperatorType.LOGICAL_OLAP_SCAN.equals(p.getOpType())); } private static <E extends Operator> void extractOperator(GroupExpression root, List<E> list, Predicate<Operator> lambda) { if (lambda.test(root.getOp())) { list.add((E) root.getOp()); return; } List<Group> groups = root.getInputs(); for (Group group : groups) { GroupExpression expression = group.getFirstLogicalExpression(); extractOperator(expression, list, lambda); } } public static boolean containAnyColumnRefs(List<ColumnRefOperator> refs, ScalarOperator operator) { if (refs.isEmpty() || null == operator) { return false; } if (operator.isColumnRef()) { return refs.contains(operator); } for (ScalarOperator so : operator.getChildren()) { if (containAnyColumnRefs(refs, so)) { return true; } } return false; } public static boolean containColumnRef(ScalarOperator operator, String column) { if (null == column || null == operator) { return false; } if (operator.isColumnRef()) { return ((ColumnRefOperator) operator).getName().equalsIgnoreCase(column); } for (ScalarOperator so : operator.getChildren()) { if (containColumnRef(so, column)) { return true; } } return false; } public static ScalarOperator compoundOr(Collection<ScalarOperator> nodes) { return createCompound(CompoundPredicateOperator.CompoundType.OR, nodes); } public static ScalarOperator compoundOr(ScalarOperator... nodes) { return createCompound(CompoundPredicateOperator.CompoundType.OR, Arrays.asList(nodes)); } public static ScalarOperator compoundAnd(Collection<ScalarOperator> nodes) { return createCompound(CompoundPredicateOperator.CompoundType.AND, nodes); } public static ScalarOperator compoundAnd(ScalarOperator... nodes) { return createCompound(CompoundPredicateOperator.CompoundType.AND, Arrays.asList(nodes)); } public static ScalarOperator createCompound(CompoundPredicateOperator.CompoundType type, Collection<ScalarOperator> nodes) { LinkedList<ScalarOperator> link = nodes.stream().filter(Objects::nonNull).collect(Collectors.toCollection(Lists::newLinkedList)); if (link.size() < 1) { return null; } if (link.size() == 1) { return link.get(0); } while (link.size() > 1) { LinkedList<ScalarOperator> buffer = new LinkedList<>(); while (link.size() >= 2) { buffer.add(new CompoundPredicateOperator(type, link.poll(), link.poll())); } if (!link.isEmpty()) { buffer.add(link.remove()); } link = buffer; } return link.remove(); } public static boolean isInnerOrCrossJoin(Operator operator) { if (operator instanceof LogicalJoinOperator) { LogicalJoinOperator joinOperator = (LogicalJoinOperator) operator; return joinOperator.isInnerOrCrossJoin(); } return false; } public static int countInnerJoinNodeSize(OptExpression root) { int count = 0; Operator operator = root.getOp(); for (OptExpression child : root.getInputs()) { if (isInnerOrCrossJoin(operator) && ((LogicalJoinOperator) operator).getJoinHint().isEmpty()) { count += countInnerJoinNodeSize(child); } else { count = Math.max(count, countInnerJoinNodeSize(child)); } } if (isInnerOrCrossJoin(operator) && ((LogicalJoinOperator) operator).getJoinHint().isEmpty()) { count += 1; } return count; } public static boolean capableSemiReorder(OptExpression root, boolean hasSemi, int joinNum, int maxJoin) { Operator operator = root.getOp(); if (operator instanceof LogicalJoinOperator) { if (((LogicalJoinOperator) operator).getJoinType().isSemiAntiJoin()) { hasSemi = true; } else { joinNum = joinNum + 1; } if (joinNum > maxJoin && hasSemi) { return false; } } for (OptExpression child : root.getInputs()) { if (operator instanceof LogicalJoinOperator) { if (!capableSemiReorder(child, hasSemi, joinNum, maxJoin)) { return false; } } else { if (!capableSemiReorder(child, false, 0, maxJoin)) { return false; } } } return true; } public static boolean hasUnknownColumnsStats(OptExpression root) { Operator operator = root.getOp(); if (operator instanceof LogicalScanOperator) { LogicalScanOperator scanOperator = (LogicalScanOperator) operator; List<String> colNames = scanOperator.getColRefToColumnMetaMap().values().stream().map(Column::getName).collect( Collectors.toList()); if (operator instanceof LogicalOlapScanOperator) { Table table = scanOperator.getTable(); if (table instanceof OlapTable) { if (KeysType.AGG_KEYS.equals(((OlapTable) table).getKeysType())) { List<String> keyColumnNames = scanOperator.getColRefToColumnMetaMap().values().stream().filter(Column::isKey) .map(Column::getName) .collect(Collectors.toList()); List<ColumnStatistic> keyColumnStatisticList = GlobalStateMgr.getCurrentStatisticStorage().getColumnStatistics(table, keyColumnNames); return keyColumnStatisticList.stream().anyMatch(ColumnStatistic::isUnknown); } } List<ColumnStatistic> columnStatisticList = GlobalStateMgr.getCurrentStatisticStorage().getColumnStatistics(table, colNames); return columnStatisticList.stream().anyMatch(ColumnStatistic::isUnknown); } else if (operator instanceof LogicalHiveScanOperator || operator instanceof LogicalHudiScanOperator) { if (ConnectContext.get().getSessionVariable().enableHiveColumnStats()) { if (operator instanceof LogicalHiveScanOperator) { return ((LogicalHiveScanOperator) operator).hasUnknownColumn(); } else { return ((LogicalHudiScanOperator) operator).hasUnknownColumn(); } } return true; } else if (operator instanceof LogicalIcebergScanOperator) { IcebergTable table = (IcebergTable) scanOperator.getTable(); try { List<ColumnStatistic> columnStatisticList = IcebergTableStatisticCalculator.getColumnStatistics( new ArrayList<>(), table.getIcebergTable(), scanOperator.getColRefToColumnMetaMap()); return columnStatisticList.stream().anyMatch(ColumnStatistic::isUnknown); } catch (Exception e) { LOG.warn("Iceberg table {} get column failed. error : {}", table.getName(), e); return true; } } else { return true; } } return root.getInputs().stream().anyMatch(Utils::hasUnknownColumnsStats); } public static long getLongFromDateTime(LocalDateTime dateTime) { return dateTime.atZone(ZoneId.systemDefault()).toInstant().getEpochSecond(); } public static LocalDateTime getDatetimeFromLong(long dateTime) { return LocalDateTime.ofInstant(Instant.ofEpochSecond(dateTime), ZoneId.systemDefault()); } public static long convertBitSetToLong(BitSet bitSet, int length) { long gid = 0; for (int b = 0; b < length; ++b) { gid = gid * 2 + (bitSet.get(b) ? 1 : 0); } return gid; } public static ColumnRefOperator findSmallestColumnRef(List<ColumnRefOperator> columnRefOperatorList) { Preconditions.checkState(!columnRefOperatorList.isEmpty()); ColumnRefOperator smallestColumnRef = columnRefOperatorList.get(0); int smallestColumnLength = Integer.MAX_VALUE; for (ColumnRefOperator columnRefOperator : columnRefOperatorList) { Type columnType = columnRefOperator.getType(); if (columnType.isScalarType()) { int columnLength = columnType.getTypeSize(); if (columnLength < smallestColumnLength) { smallestColumnRef = columnRefOperator; smallestColumnLength = columnLength; } } } return smallestColumnRef; } public static boolean canDoReplicatedJoin(OlapTable table, long selectedIndexId, Collection<Long> selectedPartitionId, Collection<Long> selectedTabletId) { ConnectContext ctx = ConnectContext.get(); int backendSize = ctx.getTotalBackendNumber(); int aliveBackendSize = ctx.getAliveBackendNumber(); int schemaHash = table.getSchemaHashByIndexId(selectedIndexId); for (Long partitionId : selectedPartitionId) { Partition partition = table.getPartition(partitionId); if (table.isLakeTable()) { return false; } if (table.getPartitionInfo().getReplicationNum(partitionId) < backendSize) { return false; } long visibleVersion = partition.getVisibleVersion(); MaterializedIndex materializedIndex = partition.getIndex(selectedIndexId); for (Long id : selectedTabletId) { LocalTablet tablet = (LocalTablet) materializedIndex.getTablet(id); if (tablet != null && tablet.getQueryableReplicasSize(visibleVersion, schemaHash) != aliveBackendSize) { return false; } } } return true; } public static boolean isEqualBinaryPredicate(ScalarOperator predicate) { if (predicate instanceof BinaryPredicateOperator) { BinaryPredicateOperator binaryPredicate = (BinaryPredicateOperator) predicate; return binaryPredicate.getBinaryType().isEquivalence(); } if (predicate instanceof CompoundPredicateOperator) { CompoundPredicateOperator compoundPredicate = (CompoundPredicateOperator) predicate; if (compoundPredicate.isAnd()) { return isEqualBinaryPredicate(compoundPredicate.getChild(0)) && isEqualBinaryPredicate(compoundPredicate.getChild(1)); } return false; } return false; } /** * Try cast op to descType, return empty if failed */ public static Optional<ScalarOperator> tryCastConstant(ScalarOperator op, Type descType) { if (!op.isConstantRef() || op.getType().matchesType(descType) || Type.FLOAT.equals(op.getType()) || descType.equals(Type.FLOAT)) { return Optional.empty(); } try { if (((ConstantOperator) op).isNull()) { return Optional.of(ConstantOperator.createNull(descType)); } ConstantOperator result = ((ConstantOperator) op).castToStrictly(descType); if (result.toString().equalsIgnoreCase(op.toString())) { return Optional.of(result); } else if (descType.isDate() && (op.getType().isIntegerType() || op.getType().isStringType())) { if (op.toString().equalsIgnoreCase(result.toString().replaceAll("-", ""))) { return Optional.of(result); } } } catch (Exception ignored) { } return Optional.empty(); } public static Optional<ScalarOperator> tryDecimalCastConstant(CastOperator lhs, ConstantOperator rhs) { Type lhsType = lhs.getType(); Type rhsType = rhs.getType(); Type childType = lhs.getChild(0).getType(); if (!lhsType.isExactNumericType() || !rhsType.isExactNumericType() || !childType.isExactNumericType()) { return Optional.empty(); } if (!Type.isAssignable2Decimal((ScalarType) lhsType, (ScalarType) childType) || !Type.isAssignable2Decimal((ScalarType) childType, (ScalarType) rhsType)) { return Optional.empty(); } if (rhs.isNull()) { return Optional.of(ConstantOperator.createNull(childType)); } try { ConstantOperator result = rhs.castTo(childType); return Optional.of(result); } catch (Exception ignored) { } return Optional.empty(); } public static ScalarOperator transTrue2Null(ScalarOperator predicates) { if (ConstantOperator.TRUE.equals(predicates)) { return null; } return predicates; } public static <T extends ScalarOperator> List<T> collect(ScalarOperator root, Class<T> clazz) { List<T> output = Lists.newArrayList(); collect(root, clazz, output); return output; } private static <T extends ScalarOperator> void collect(ScalarOperator root, Class<T> clazz, List<T> output) { if (clazz.isInstance(root)) { output.add(clazz.cast(root)); } root.getChildren().forEach(child -> collect(child, clazz, output)); } public static Set<MaterializedView> getRelatedMvs(int maxLevel, List<Table> tablesToCheck) { Set<MaterializedView> mvs = Sets.newHashSet(); getRelatedMvs(maxLevel, 0, tablesToCheck, mvs); return mvs; } public static void getRelatedMvs(int maxLevel, int currentLevel, List<Table> tablesToCheck, Set<MaterializedView> mvs) { if (currentLevel >= maxLevel) { return; } Set<MvId> newMvIds = Sets.newHashSet(); for (Table table : tablesToCheck) { Set<MvId> mvIds = table.getRelatedMaterializedViews(); if (mvIds != null && !mvIds.isEmpty()) { newMvIds.addAll(mvIds); } } if (newMvIds.isEmpty()) { return; } List<Table> newMvs = Lists.newArrayList(); for (MvId mvId : newMvIds) { Database db = GlobalStateMgr.getCurrentState().getDb(mvId.getDbId()); if (db == null) { continue; } Table table = db.getTable(mvId.getId()); if (table == null) { continue; } newMvs.add(table); mvs.add((MaterializedView) table); } getRelatedMvs(maxLevel, currentLevel + 1, newMvs, mvs); } public static List<Table> getAllTables(OptExpression root) { List<Table> tables = Lists.newArrayList(); getAllTables(root, tables); return tables; } private static void getAllTables(OptExpression root, List<Table> tables) { if (root.getOp() instanceof LogicalScanOperator) { LogicalScanOperator scanOperator = (LogicalScanOperator) root.getOp(); tables.add(scanOperator.getTable()); } else { for (OptExpression child : root.getInputs()) { getAllTables(child, tables); } } } public static boolean isValidMVPlan(OptExpression root) { if (root == null) { return false; } return isLogicalSPJ(root) || isLogicalSPJG(root); } public static boolean isLogicalSPJG(OptExpression root) { if (root == null) { return false; } Operator operator = root.getOp(); if (!(operator instanceof LogicalAggregationOperator)) { return false; } LogicalAggregationOperator agg = (LogicalAggregationOperator) operator; if (agg.getType() != AggType.GLOBAL) { return false; } OptExpression child = root.inputAt(0); return isLogicalSPJ(child); } public static boolean isLogicalSPJ(OptExpression root) { if (root == null) { return false; } Operator operator = root.getOp(); if (!(operator instanceof LogicalOperator)) { return false; } if (!(operator instanceof LogicalScanOperator) && !(operator instanceof LogicalProjectOperator) && !(operator instanceof LogicalFilterOperator) && !(operator instanceof LogicalJoinOperator)) { return false; } for (OptExpression child : root.getInputs()) { if (!isLogicalSPJ(child)) { return false; } } return true; } public static Pair<OptExpression, LogicalPlan> getRuleOptimizedLogicalPlan(String sql, ColumnRefFactory columnRefFactory, ConnectContext connectContext) { StatementBase mvStmt; try { List<StatementBase> statementBases = com.starrocks.sql.parser.SqlParser.parse(sql, connectContext.getSessionVariable()); Preconditions.checkState(statementBases.size() == 1); mvStmt = statementBases.get(0); } catch (ParsingException parsingException) { LOG.warn("parse sql:{} failed", sql, parsingException); return null; } Preconditions.checkState(mvStmt instanceof QueryStatement); Analyzer.analyze(mvStmt, connectContext); QueryRelation query = ((QueryStatement) mvStmt).getQueryRelation(); LogicalPlan logicalPlan = new RelationTransformer(columnRefFactory, connectContext).transformWithSelectLimit(query); OptimizerConfig optimizerConfig = new OptimizerConfig(OptimizerConfig.OptimizerAlgorithm.RULE_BASED); optimizerConfig.disableRuleSet(RuleSetType.SINGLE_TABLE_MV_REWRITE); Optimizer optimizer = new Optimizer(optimizerConfig); OptExpression optimizedPlan = optimizer.optimize( connectContext, logicalPlan.getRoot(), new PhysicalPropertySet(), new ColumnRefSet(logicalPlan.getOutputColumn()), columnRefFactory); return Pair.create(optimizedPlan, logicalPlan); } public static List<OptExpression> collectScanExprs(OptExpression expression) { List<OptExpression> scanExprs = Lists.newArrayList(); OptExpressionVisitor scanCollector = new OptExpressionVisitor<Void, Void>() { @Override public Void visit(OptExpression optExpression, Void context) { for (OptExpression input : optExpression.getInputs()) { super.visit(input, context); } return null; } @Override public Void visitLogicalTableScan(OptExpression optExpression, Void context) { scanExprs.add(optExpression); return null; } }; expression.getOp().accept(scanCollector, expression, null); return scanExprs; } public static List<ScalarOperator> getAllPredicates(OptExpression root) { List<ScalarOperator> predicates = Lists.newArrayList(); getAllPredicates(root, predicates); return predicates; } private static void getAllPredicates(OptExpression root, List<ScalarOperator> predicates) { Operator operator = root.getOp(); if (operator.getPredicate() != null) { predicates.add(root.getOp().getPredicate()); } if (operator instanceof LogicalJoinOperator) { LogicalJoinOperator joinOperator = (LogicalJoinOperator) operator; if (joinOperator.getOnPredicate() != null) { predicates.add(joinOperator.getOnPredicate()); } } for (OptExpression child : root.getInputs()) { getAllPredicates(child, predicates); } } public static PredicateSplit splitPredicate(ScalarOperator predicate) { if (predicate == null) { return PredicateSplit.of(null, null, null); } List<ScalarOperator> predicateConjuncts = Utils.extractConjuncts(predicate); List<ScalarOperator> columnEqualityPredicates = Lists.newArrayList(); List<ScalarOperator> rangePredicates = Lists.newArrayList(); List<ScalarOperator> residualPredicates = Lists.newArrayList(); for (ScalarOperator scalarOperator : predicateConjuncts) { if (scalarOperator instanceof BinaryPredicateOperator) { BinaryPredicateOperator binary = (BinaryPredicateOperator) scalarOperator; ScalarOperator leftChild = scalarOperator.getChild(0); ScalarOperator rightChild = scalarOperator.getChild(1); if (binary.getBinaryType().isEqual()) { if (leftChild.isColumnRef() && rightChild.isColumnRef()) { columnEqualityPredicates.add(scalarOperator); } else if (leftChild.isColumnRef() && rightChild.isConstantRef()) { rangePredicates.add(scalarOperator); } else { residualPredicates.add(scalarOperator); } } else if (binary.getBinaryType().isRange()) { if (leftChild.isColumnRef() && rightChild.isConstantRef()) { rangePredicates.add(scalarOperator); } else { residualPredicates.add(scalarOperator); } } } else { residualPredicates.add(scalarOperator); } } return PredicateSplit.of(Utils.compoundAnd(columnEqualityPredicates), Utils.compoundAnd(rangePredicates), Utils.compoundAnd(residualPredicates)); } public static ScalarOperator canonizePredicate(ScalarOperator predicate) { if (predicate == null) { return null; } ScalarOperatorRewriter rewrite = new ScalarOperatorRewriter(); return rewrite.rewrite(predicate, ScalarOperatorRewriter.DEFAULT_REWRITE_SCAN_PREDICATE_RULES); } public static ScalarOperator canonizePredicateForRewrite(ScalarOperator predicate) { if (predicate == null) { return null; } ScalarOperatorRewriter rewrite = new ScalarOperatorRewriter(); return rewrite.rewrite(predicate, ScalarOperatorRewriter.MV_SCALAR_REWRITE_RULES); } public static boolean isAlwaysFalse(ScalarOperator predicate) { if (predicate instanceof ConstantOperator) { ConstantOperator constant = (ConstantOperator) predicate; if (constant.getType() == Type.BOOLEAN && constant.getBoolean() == false) { return true; } } else if (predicate instanceof CompoundPredicateOperator) { CompoundPredicateOperator compound = (CompoundPredicateOperator) predicate; if (compound.isAnd()) { return isAlwaysFalse(compound.getChild(0)) || isAlwaysFalse(compound.getChild(1)); } else if (compound.isOr()) { return isAlwaysFalse(compound.getChild(0)) && isAlwaysFalse(compound.getChild(1)); } else if (compound.isNot()) { return isAlwaysTrue(predicate.getChild(0)); } } return false; } public static boolean isAlwaysTrue(ScalarOperator predicate) { if (predicate instanceof ConstantOperator) { ConstantOperator constant = (ConstantOperator) predicate; if (constant.getType() == Type.BOOLEAN && constant.getBoolean() == true) { return true; } } else if (predicate instanceof CompoundPredicateOperator) { CompoundPredicateOperator compound = (CompoundPredicateOperator) predicate; if (compound.isAnd()) { return isAlwaysTrue(compound.getChild(0)) && isAlwaysTrue(compound.getChild(1)); } else if (compound.isOr()) { return isAlwaysTrue(compound.getChild(0)) || isAlwaysTrue(compound.getChild(1)); } else if (compound.isNot()) { return isAlwaysFalse(predicate.getChild(0)); } } return false; } public static ScalarOperator splitOr(ScalarOperator src, ScalarOperator target) { List<ScalarOperator> srcItems = Utils.extractDisjunctive(src); List<ScalarOperator> targetItems = Utils.extractDisjunctive(target); int srcLength = srcItems.size(); int targetLength = targetItems.size(); for (ScalarOperator item : srcItems) { removeAll(targetItems, item); } if (targetItems.isEmpty() && srcLength == targetLength) { return ConstantOperator.createBoolean(true); } else if (!targetItems.isEmpty()) { return src; } else { return null; } } public static void removeAll(List<ScalarOperator> scalars, ScalarOperator predicate) { Iterator<ScalarOperator> iter = scalars.iterator(); while (iter.hasNext()) { ScalarOperator current = iter.next(); if (current.equals(predicate)) { iter.remove(); } } } public static boolean isColumnEqualPredicate(ScalarOperator predicate) { if (predicate == null) { return false; } if (predicate instanceof BinaryPredicateOperator) { BinaryPredicateOperator binaryPredicate = (BinaryPredicateOperator) predicate; if (binaryPredicate.getBinaryType().isEqual() && binaryPredicate.getChild(0).isColumnRef() && binaryPredicate.getChild(1).isColumnRef()) { return true; } } return false; } }
class Utils { private static final Logger LOG = LogManager.getLogger(Utils.class); public static List<ScalarOperator> extractConjuncts(ScalarOperator root) { LinkedList<ScalarOperator> list = new LinkedList<>(); if (null == root) { return list; } extractConjunctsImpl(root, list); return list; } public static void extractConjunctsImpl(ScalarOperator root, List<ScalarOperator> result) { if (!OperatorType.COMPOUND.equals(root.getOpType())) { result.add(root); return; } CompoundPredicateOperator cpo = (CompoundPredicateOperator) root; if (!cpo.isAnd()) { result.add(root); return; } extractConjunctsImpl(cpo.getChild(0), result); extractConjunctsImpl(cpo.getChild(1), result); } public static List<ScalarOperator> extractDisjunctive(ScalarOperator root) { LinkedList<ScalarOperator> list = new LinkedList<>(); if (null == root) { return list; } extractDisjunctiveImpl(root, list); return list; } public static void extractDisjunctiveImpl(ScalarOperator root, List<ScalarOperator> result) { if (!OperatorType.COMPOUND.equals(root.getOpType())) { result.add(root); return; } CompoundPredicateOperator cpo = (CompoundPredicateOperator) root; if (!cpo.isOr()) { result.add(root); return; } extractDisjunctiveImpl(cpo.getChild(0), result); extractDisjunctiveImpl(cpo.getChild(1), result); } public static List<ColumnRefOperator> extractColumnRef(ScalarOperator root) { if (null == root || !root.isVariable()) { return new LinkedList<>(); } LinkedList<ColumnRefOperator> list = new LinkedList<>(); if (OperatorType.VARIABLE.equals(root.getOpType())) { list.add((ColumnRefOperator) root); return list; } for (ScalarOperator child : root.getChildren()) { list.addAll(extractColumnRef(child)); } return list; } public static int countColumnRef(ScalarOperator root) { return countColumnRef(root, 0); } private static int countColumnRef(ScalarOperator root, int count) { if (null == root || !root.isVariable()) { return 0; } if (OperatorType.VARIABLE.equals(root.getOpType())) { return 1; } for (ScalarOperator child : root.getChildren()) { count += countColumnRef(child, count); } return count; } public static void extractOlapScanOperator(GroupExpression groupExpression, List<LogicalOlapScanOperator> list) { extractOperator(groupExpression, list, p -> OperatorType.LOGICAL_OLAP_SCAN.equals(p.getOpType())); } private static <E extends Operator> void extractOperator(GroupExpression root, List<E> list, Predicate<Operator> lambda) { if (lambda.test(root.getOp())) { list.add((E) root.getOp()); return; } List<Group> groups = root.getInputs(); for (Group group : groups) { GroupExpression expression = group.getFirstLogicalExpression(); extractOperator(expression, list, lambda); } } public static boolean containAnyColumnRefs(List<ColumnRefOperator> refs, ScalarOperator operator) { if (refs.isEmpty() || null == operator) { return false; } if (operator.isColumnRef()) { return refs.contains(operator); } for (ScalarOperator so : operator.getChildren()) { if (containAnyColumnRefs(refs, so)) { return true; } } return false; } public static boolean containColumnRef(ScalarOperator operator, String column) { if (null == column || null == operator) { return false; } if (operator.isColumnRef()) { return ((ColumnRefOperator) operator).getName().equalsIgnoreCase(column); } for (ScalarOperator so : operator.getChildren()) { if (containColumnRef(so, column)) { return true; } } return false; } public static ScalarOperator compoundOr(Collection<ScalarOperator> nodes) { return createCompound(CompoundPredicateOperator.CompoundType.OR, nodes); } public static ScalarOperator compoundOr(ScalarOperator... nodes) { return createCompound(CompoundPredicateOperator.CompoundType.OR, Arrays.asList(nodes)); } public static ScalarOperator compoundAnd(Collection<ScalarOperator> nodes) { return createCompound(CompoundPredicateOperator.CompoundType.AND, nodes); } public static ScalarOperator compoundAnd(ScalarOperator... nodes) { return createCompound(CompoundPredicateOperator.CompoundType.AND, Arrays.asList(nodes)); } public static ScalarOperator createCompound(CompoundPredicateOperator.CompoundType type, Collection<ScalarOperator> nodes) { LinkedList<ScalarOperator> link = nodes.stream().filter(Objects::nonNull).collect(Collectors.toCollection(Lists::newLinkedList)); if (link.size() < 1) { return null; } if (link.size() == 1) { return link.get(0); } while (link.size() > 1) { LinkedList<ScalarOperator> buffer = new LinkedList<>(); while (link.size() >= 2) { buffer.add(new CompoundPredicateOperator(type, link.poll(), link.poll())); } if (!link.isEmpty()) { buffer.add(link.remove()); } link = buffer; } return link.remove(); } public static boolean isInnerOrCrossJoin(Operator operator) { if (operator instanceof LogicalJoinOperator) { LogicalJoinOperator joinOperator = (LogicalJoinOperator) operator; return joinOperator.isInnerOrCrossJoin(); } return false; } public static int countInnerJoinNodeSize(OptExpression root) { int count = 0; Operator operator = root.getOp(); for (OptExpression child : root.getInputs()) { if (isInnerOrCrossJoin(operator) && ((LogicalJoinOperator) operator).getJoinHint().isEmpty()) { count += countInnerJoinNodeSize(child); } else { count = Math.max(count, countInnerJoinNodeSize(child)); } } if (isInnerOrCrossJoin(operator) && ((LogicalJoinOperator) operator).getJoinHint().isEmpty()) { count += 1; } return count; } public static boolean capableSemiReorder(OptExpression root, boolean hasSemi, int joinNum, int maxJoin) { Operator operator = root.getOp(); if (operator instanceof LogicalJoinOperator) { if (((LogicalJoinOperator) operator).getJoinType().isSemiAntiJoin()) { hasSemi = true; } else { joinNum = joinNum + 1; } if (joinNum > maxJoin && hasSemi) { return false; } } for (OptExpression child : root.getInputs()) { if (operator instanceof LogicalJoinOperator) { if (!capableSemiReorder(child, hasSemi, joinNum, maxJoin)) { return false; } } else { if (!capableSemiReorder(child, false, 0, maxJoin)) { return false; } } } return true; } public static boolean hasUnknownColumnsStats(OptExpression root) { Operator operator = root.getOp(); if (operator instanceof LogicalScanOperator) { LogicalScanOperator scanOperator = (LogicalScanOperator) operator; List<String> colNames = scanOperator.getColRefToColumnMetaMap().values().stream().map(Column::getName).collect( Collectors.toList()); if (operator instanceof LogicalOlapScanOperator) { Table table = scanOperator.getTable(); if (table instanceof OlapTable) { if (KeysType.AGG_KEYS.equals(((OlapTable) table).getKeysType())) { List<String> keyColumnNames = scanOperator.getColRefToColumnMetaMap().values().stream().filter(Column::isKey) .map(Column::getName) .collect(Collectors.toList()); List<ColumnStatistic> keyColumnStatisticList = GlobalStateMgr.getCurrentStatisticStorage().getColumnStatistics(table, keyColumnNames); return keyColumnStatisticList.stream().anyMatch(ColumnStatistic::isUnknown); } } List<ColumnStatistic> columnStatisticList = GlobalStateMgr.getCurrentStatisticStorage().getColumnStatistics(table, colNames); return columnStatisticList.stream().anyMatch(ColumnStatistic::isUnknown); } else if (operator instanceof LogicalHiveScanOperator || operator instanceof LogicalHudiScanOperator) { if (ConnectContext.get().getSessionVariable().enableHiveColumnStats()) { if (operator instanceof LogicalHiveScanOperator) { return ((LogicalHiveScanOperator) operator).hasUnknownColumn(); } else { return ((LogicalHudiScanOperator) operator).hasUnknownColumn(); } } return true; } else if (operator instanceof LogicalIcebergScanOperator) { IcebergTable table = (IcebergTable) scanOperator.getTable(); try { List<ColumnStatistic> columnStatisticList = IcebergTableStatisticCalculator.getColumnStatistics( new ArrayList<>(), table.getIcebergTable(), scanOperator.getColRefToColumnMetaMap()); return columnStatisticList.stream().anyMatch(ColumnStatistic::isUnknown); } catch (Exception e) { LOG.warn("Iceberg table {} get column failed. error : {}", table.getName(), e); return true; } } else { return true; } } return root.getInputs().stream().anyMatch(Utils::hasUnknownColumnsStats); } public static long getLongFromDateTime(LocalDateTime dateTime) { return dateTime.atZone(ZoneId.systemDefault()).toInstant().getEpochSecond(); } public static LocalDateTime getDatetimeFromLong(long dateTime) { return LocalDateTime.ofInstant(Instant.ofEpochSecond(dateTime), ZoneId.systemDefault()); } public static long convertBitSetToLong(BitSet bitSet, int length) { long gid = 0; for (int b = 0; b < length; ++b) { gid = gid * 2 + (bitSet.get(b) ? 1 : 0); } return gid; } public static ColumnRefOperator findSmallestColumnRef(List<ColumnRefOperator> columnRefOperatorList) { Preconditions.checkState(!columnRefOperatorList.isEmpty()); ColumnRefOperator smallestColumnRef = columnRefOperatorList.get(0); int smallestColumnLength = Integer.MAX_VALUE; for (ColumnRefOperator columnRefOperator : columnRefOperatorList) { Type columnType = columnRefOperator.getType(); if (columnType.isScalarType()) { int columnLength = columnType.getTypeSize(); if (columnLength < smallestColumnLength) { smallestColumnRef = columnRefOperator; smallestColumnLength = columnLength; } } } return smallestColumnRef; } public static boolean canDoReplicatedJoin(OlapTable table, long selectedIndexId, Collection<Long> selectedPartitionId, Collection<Long> selectedTabletId) { ConnectContext ctx = ConnectContext.get(); int backendSize = ctx.getTotalBackendNumber(); int aliveBackendSize = ctx.getAliveBackendNumber(); int schemaHash = table.getSchemaHashByIndexId(selectedIndexId); for (Long partitionId : selectedPartitionId) { Partition partition = table.getPartition(partitionId); if (table.isLakeTable()) { return false; } if (table.getPartitionInfo().getReplicationNum(partitionId) < backendSize) { return false; } long visibleVersion = partition.getVisibleVersion(); MaterializedIndex materializedIndex = partition.getIndex(selectedIndexId); for (Long id : selectedTabletId) { LocalTablet tablet = (LocalTablet) materializedIndex.getTablet(id); if (tablet != null && tablet.getQueryableReplicasSize(visibleVersion, schemaHash) != aliveBackendSize) { return false; } } } return true; } public static boolean isEqualBinaryPredicate(ScalarOperator predicate) { if (predicate instanceof BinaryPredicateOperator) { BinaryPredicateOperator binaryPredicate = (BinaryPredicateOperator) predicate; return binaryPredicate.getBinaryType().isEquivalence(); } if (predicate instanceof CompoundPredicateOperator) { CompoundPredicateOperator compoundPredicate = (CompoundPredicateOperator) predicate; if (compoundPredicate.isAnd()) { return isEqualBinaryPredicate(compoundPredicate.getChild(0)) && isEqualBinaryPredicate(compoundPredicate.getChild(1)); } return false; } return false; } /** * Try cast op to descType, return empty if failed */ public static Optional<ScalarOperator> tryCastConstant(ScalarOperator op, Type descType) { if (!op.isConstantRef() || op.getType().matchesType(descType) || Type.FLOAT.equals(op.getType()) || descType.equals(Type.FLOAT)) { return Optional.empty(); } try { if (((ConstantOperator) op).isNull()) { return Optional.of(ConstantOperator.createNull(descType)); } ConstantOperator result = ((ConstantOperator) op).castToStrictly(descType); if (result.toString().equalsIgnoreCase(op.toString())) { return Optional.of(result); } else if (descType.isDate() && (op.getType().isIntegerType() || op.getType().isStringType())) { if (op.toString().equalsIgnoreCase(result.toString().replaceAll("-", ""))) { return Optional.of(result); } } } catch (Exception ignored) { } return Optional.empty(); } public static Optional<ScalarOperator> tryDecimalCastConstant(CastOperator lhs, ConstantOperator rhs) { Type lhsType = lhs.getType(); Type rhsType = rhs.getType(); Type childType = lhs.getChild(0).getType(); if (!lhsType.isExactNumericType() || !rhsType.isExactNumericType() || !childType.isExactNumericType()) { return Optional.empty(); } if (!Type.isAssignable2Decimal((ScalarType) lhsType, (ScalarType) childType) || !Type.isAssignable2Decimal((ScalarType) childType, (ScalarType) rhsType)) { return Optional.empty(); } if (rhs.isNull()) { return Optional.of(ConstantOperator.createNull(childType)); } try { ConstantOperator result = rhs.castTo(childType); return Optional.of(result); } catch (Exception ignored) { } return Optional.empty(); } public static ScalarOperator transTrue2Null(ScalarOperator predicates) { if (ConstantOperator.TRUE.equals(predicates)) { return null; } return predicates; } public static <T extends ScalarOperator> List<T> collect(ScalarOperator root, Class<T> clazz) { List<T> output = Lists.newArrayList(); collect(root, clazz, output); return output; } private static <T extends ScalarOperator> void collect(ScalarOperator root, Class<T> clazz, List<T> output) { if (clazz.isInstance(root)) { output.add(clazz.cast(root)); } root.getChildren().forEach(child -> collect(child, clazz, output)); } public static Set<MaterializedView> getRelatedMvs(int maxLevel, List<Table> tablesToCheck) { Set<MaterializedView> mvs = Sets.newHashSet(); getRelatedMvs(maxLevel, 0, tablesToCheck, mvs); return mvs; } public static void getRelatedMvs(int maxLevel, int currentLevel, List<Table> tablesToCheck, Set<MaterializedView> mvs) { if (currentLevel >= maxLevel) { return; } Set<MvId> newMvIds = Sets.newHashSet(); for (Table table : tablesToCheck) { Set<MvId> mvIds = table.getRelatedMaterializedViews(); if (mvIds != null && !mvIds.isEmpty()) { newMvIds.addAll(mvIds); } } if (newMvIds.isEmpty()) { return; } List<Table> newMvs = Lists.newArrayList(); for (MvId mvId : newMvIds) { Database db = GlobalStateMgr.getCurrentState().getDb(mvId.getDbId()); if (db == null) { continue; } Table table = db.getTable(mvId.getId()); if (table == null) { continue; } newMvs.add(table); mvs.add((MaterializedView) table); } getRelatedMvs(maxLevel, currentLevel + 1, newMvs, mvs); } public static List<Table> getAllTables(OptExpression root) { List<Table> tables = Lists.newArrayList(); getAllTables(root, tables); return tables; } private static void getAllTables(OptExpression root, List<Table> tables) { if (root.getOp() instanceof LogicalScanOperator) { LogicalScanOperator scanOperator = (LogicalScanOperator) root.getOp(); tables.add(scanOperator.getTable()); } else { for (OptExpression child : root.getInputs()) { getAllTables(child, tables); } } } public static boolean isValidMVPlan(OptExpression root) { if (root == null) { return false; } return isLogicalSPJ(root) || isLogicalSPJG(root); } public static boolean isLogicalSPJG(OptExpression root) { if (root == null) { return false; } Operator operator = root.getOp(); if (!(operator instanceof LogicalAggregationOperator)) { return false; } LogicalAggregationOperator agg = (LogicalAggregationOperator) operator; if (agg.getType() != AggType.GLOBAL) { return false; } OptExpression child = root.inputAt(0); return isLogicalSPJ(child); } public static boolean isLogicalSPJ(OptExpression root) { if (root == null) { return false; } Operator operator = root.getOp(); if (!(operator instanceof LogicalOperator)) { return false; } if (!(operator instanceof LogicalScanOperator) && !(operator instanceof LogicalProjectOperator) && !(operator instanceof LogicalFilterOperator) && !(operator instanceof LogicalJoinOperator)) { return false; } for (OptExpression child : root.getInputs()) { if (!isLogicalSPJ(child)) { return false; } } return true; } public static Pair<OptExpression, LogicalPlan> getRuleOptimizedLogicalPlan(String sql, ColumnRefFactory columnRefFactory, ConnectContext connectContext) { StatementBase mvStmt; try { List<StatementBase> statementBases = com.starrocks.sql.parser.SqlParser.parse(sql, connectContext.getSessionVariable()); Preconditions.checkState(statementBases.size() == 1); mvStmt = statementBases.get(0); } catch (ParsingException parsingException) { LOG.warn("parse sql:{} failed", sql, parsingException); return null; } Preconditions.checkState(mvStmt instanceof QueryStatement); Analyzer.analyze(mvStmt, connectContext); QueryRelation query = ((QueryStatement) mvStmt).getQueryRelation(); LogicalPlan logicalPlan = new RelationTransformer(columnRefFactory, connectContext).transformWithSelectLimit(query); OptimizerConfig optimizerConfig = new OptimizerConfig(OptimizerConfig.OptimizerAlgorithm.RULE_BASED); optimizerConfig.disableRuleSet(RuleSetType.SINGLE_TABLE_MV_REWRITE); Optimizer optimizer = new Optimizer(optimizerConfig); OptExpression optimizedPlan = optimizer.optimize( connectContext, logicalPlan.getRoot(), new PhysicalPropertySet(), new ColumnRefSet(logicalPlan.getOutputColumn()), columnRefFactory); return Pair.create(optimizedPlan, logicalPlan); } public static List<OptExpression> collectScanExprs(OptExpression expression) { List<OptExpression> scanExprs = Lists.newArrayList(); OptExpressionVisitor scanCollector = new OptExpressionVisitor<Void, Void>() { @Override public Void visit(OptExpression optExpression, Void context) { for (OptExpression input : optExpression.getInputs()) { super.visit(input, context); } return null; } @Override public Void visitLogicalTableScan(OptExpression optExpression, Void context) { scanExprs.add(optExpression); return null; } }; expression.getOp().accept(scanCollector, expression, null); return scanExprs; } public static List<ScalarOperator> getAllPredicates(OptExpression root) { List<ScalarOperator> predicates = Lists.newArrayList(); getAllPredicates(root, predicates); return predicates; } private static void getAllPredicates(OptExpression root, List<ScalarOperator> predicates) { Operator operator = root.getOp(); if (operator.getPredicate() != null) { predicates.add(root.getOp().getPredicate()); } if (operator instanceof LogicalJoinOperator) { LogicalJoinOperator joinOperator = (LogicalJoinOperator) operator; if (joinOperator.getOnPredicate() != null) { predicates.add(joinOperator.getOnPredicate()); } } for (OptExpression child : root.getInputs()) { getAllPredicates(child, predicates); } } public static ScalarOperator canonizePredicate(ScalarOperator predicate) { if (predicate == null) { return null; } ScalarOperatorRewriter rewrite = new ScalarOperatorRewriter(); return rewrite.rewrite(predicate, ScalarOperatorRewriter.DEFAULT_REWRITE_SCAN_PREDICATE_RULES); } public static ScalarOperator canonizePredicateForRewrite(ScalarOperator predicate) { if (predicate == null) { return null; } ScalarOperatorRewriter rewrite = new ScalarOperatorRewriter(); return rewrite.rewrite(predicate, ScalarOperatorRewriter.MV_SCALAR_REWRITE_RULES); } public static ScalarOperator getCompensationPredicateForDisjunctive(ScalarOperator src, ScalarOperator target) { List<ScalarOperator> srcItems = Utils.extractDisjunctive(src); List<ScalarOperator> targetItems = Utils.extractDisjunctive(target); int srcLength = srcItems.size(); int targetLength = targetItems.size(); targetItems.removeAll(srcItems); if (targetItems.isEmpty() && srcLength == targetLength) { return ConstantOperator.createBoolean(true); } else if (!targetItems.isEmpty()) { return src; } else { return null; } } public static boolean isColumnEqualPredicate(ScalarOperator predicate) { if (predicate == null) { return false; } ScalarOperatorVisitor<Boolean, Void> checkVisitor = new ScalarOperatorVisitor<Boolean, Void>() { @Override public Boolean visit(ScalarOperator scalarOperator, Void context) { return false; } @Override public Boolean visitCompoundPredicate(CompoundPredicateOperator predicate, Void context) { if (!predicate.isAnd()) { return false; } for (ScalarOperator child : predicate.getChildren()) { Boolean ret = child.accept(this, null); if (!Boolean.TRUE.equals(ret)) { return false; } } return true; } @Override public Boolean visitBinaryPredicate(BinaryPredicateOperator predicate, Void context) { return predicate.getBinaryType().isEqual() && predicate.getChild(0).isColumnRef() && predicate.getChild(1).isColumnRef(); } }; return predicate.accept(checkVisitor, null); } public static Map<ColumnRefOperator, ScalarOperator> getColumnRefMap( OptExpression expression, ColumnRefFactory refFactory) { Map<ColumnRefOperator, ScalarOperator> columnRefMap; if (expression.getOp().getProjection() != null) { columnRefMap = expression.getOp().getProjection().getColumnRefMap(); } else { columnRefMap = Maps.newHashMap(); if (expression.getOp() instanceof LogicalAggregationOperator) { LogicalAggregationOperator agg = (LogicalAggregationOperator) expression.getOp(); Map<ColumnRefOperator, ScalarOperator> keyMap = agg.getGroupingKeys().stream().collect(Collectors.toMap( java.util.function.Function.identity(), java.util.function.Function.identity())); columnRefMap.putAll(keyMap); columnRefMap.putAll(agg.getAggregations()); } else { ColumnRefSet refSet = expression.getOutputColumns(); for (int columnId : refSet.getColumnIds()) { ColumnRefOperator columnRef = refFactory.getColumnRef(columnId); columnRefMap.put(columnRef, columnRef); } } } return columnRefMap; } }
Can we explicitly set `this.index` to 0 in the constructor - for readability reasons?
public void setIndex(int index) { if (this.index == 0) { this.index = index; } }
if (this.index == 0) {
public void setIndex(int index) { if (this.index == 0) { this.index = index; } }
class ItemBulkOperation<TInternal, TContext> extends CosmosItemOperationBase implements Comparable<CosmosItemOperation> { private final TInternal item; private final TContext context; private final String id; private final PartitionKey partitionKey; private final CosmosItemOperationType operationType; private final RequestOptions requestOptions; private String partitionKeyJson; private BulkOperationRetryPolicy bulkOperationRetryPolicy; /** index for preserve ordering in Bulk Executor */ private int index; public ItemBulkOperation( CosmosItemOperationType operationType, String id, PartitionKey partitionKey, RequestOptions requestOptions, TInternal item, TContext context) { checkNotNull(operationType, "expected non-null operationType"); this.operationType = operationType; this.partitionKey = partitionKey; this.id = id; this.item = item; this.context = context; this.requestOptions = requestOptions; } /** * Writes a single operation to JsonSerializable. * TODO(rakkuma): Similarly for hybrid row, operation needs to be written in Hybrid row. * Issue: https: * * @return instance of JsonSerializable containing values for a operation. */ @Override JsonSerializable getSerializedOperationInternal() { final JsonSerializable jsonSerializable = new JsonSerializable(); jsonSerializable.set( BatchRequestResponseConstants.FIELD_OPERATION_TYPE, ModelBridgeInternal.getOperationValueForCosmosItemOperationType(this.getOperationType())); if (StringUtils.isNotEmpty(this.getPartitionKeyJson())) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_PARTITION_KEY, this.getPartitionKeyJson()); } if (StringUtils.isNotEmpty(this.getId())) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_ID, this.getId()); } if (this.getItemInternal() != null) { if (this.getOperationType() == CosmosItemOperationType.PATCH) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_RESOURCE_BODY, PatchUtil.serializableBatchPatchOperation((CosmosPatchOperations) this.getItemInternal(), this.getRequestOptions())); } else { jsonSerializable.set(BatchRequestResponseConstants.FIELD_RESOURCE_BODY, this.getItemInternal()); } } if (this.getRequestOptions() != null) { RequestOptions requestOptions = this.getRequestOptions(); if (StringUtils.isNotEmpty(requestOptions.getIfMatchETag())) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_IF_MATCH, requestOptions.getIfMatchETag()); } if (StringUtils.isNotEmpty(requestOptions.getIfNoneMatchETag())) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_IF_NONE_MATCH, requestOptions.getIfNoneMatchETag()); } if (requestOptions.isContentResponseOnWriteEnabled() != null) { if (!requestOptions.isContentResponseOnWriteEnabled() && BulkExecutorUtil.isWriteOperation(operationType)) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_MINIMAL_RETURN_PREFERENCE, true); } } } return jsonSerializable; } public int getIndex() { return index; } TInternal getItemInternal() { return this.item; } @SuppressWarnings("unchecked") public <T> T getItem() { return (T)this.item; } @SuppressWarnings("unchecked") public <T> T getContext() { return (T)this.context; } public String getId() { return this.id; } public PartitionKey getPartitionKeyValue() { return partitionKey; } public CosmosItemOperationType getOperationType() { return this.operationType; } public RequestOptions getRequestOptions() { return this.requestOptions; } private String getPartitionKeyJson() { return partitionKeyJson; } void setPartitionKeyJson(String value) { partitionKeyJson = value; } BulkOperationRetryPolicy getRetryPolicy() { return bulkOperationRetryPolicy; } void setRetryPolicy(BulkOperationRetryPolicy bulkOperationRetryPolicy) { this.bulkOperationRetryPolicy = bulkOperationRetryPolicy; } @Override public int compareTo(CosmosItemOperation operation) { if (operation instanceof ItemBulkOperation) { ItemBulkOperation<?, ?> bulkOperation = (ItemBulkOperation<?, ?>) operation; return this.index - bulkOperation.index; } return 0; } @Override public boolean equals(Object obj) { return super.equals(obj); } @Override public int hashCode() { return super.hashCode(); } }
class ItemBulkOperation<TInternal, TContext> extends CosmosItemOperationBase implements Comparable<CosmosItemOperation> { private final TInternal item; private final TContext context; private final String id; private final PartitionKey partitionKey; private final CosmosItemOperationType operationType; private final RequestOptions requestOptions; private String partitionKeyJson; private BulkOperationRetryPolicy bulkOperationRetryPolicy; /** index for preserve ordering in Bulk Executor */ private int index; public ItemBulkOperation( CosmosItemOperationType operationType, String id, PartitionKey partitionKey, RequestOptions requestOptions, TInternal item, TContext context) { checkNotNull(operationType, "expected non-null operationType"); this.operationType = operationType; this.partitionKey = partitionKey; this.id = id; this.item = item; this.context = context; this.requestOptions = requestOptions; this.index = 0; } /** * Writes a single operation to JsonSerializable. * TODO(rakkuma): Similarly for hybrid row, operation needs to be written in Hybrid row. * Issue: https: * * @return instance of JsonSerializable containing values for a operation. */ @Override JsonSerializable getSerializedOperationInternal() { final JsonSerializable jsonSerializable = new JsonSerializable(); jsonSerializable.set( BatchRequestResponseConstants.FIELD_OPERATION_TYPE, ModelBridgeInternal.getOperationValueForCosmosItemOperationType(this.getOperationType())); if (StringUtils.isNotEmpty(this.getPartitionKeyJson())) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_PARTITION_KEY, this.getPartitionKeyJson()); } if (StringUtils.isNotEmpty(this.getId())) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_ID, this.getId()); } if (this.getItemInternal() != null) { if (this.getOperationType() == CosmosItemOperationType.PATCH) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_RESOURCE_BODY, PatchUtil.serializableBatchPatchOperation((CosmosPatchOperations) this.getItemInternal(), this.getRequestOptions())); } else { jsonSerializable.set(BatchRequestResponseConstants.FIELD_RESOURCE_BODY, this.getItemInternal()); } } if (this.getRequestOptions() != null) { RequestOptions requestOptions = this.getRequestOptions(); if (StringUtils.isNotEmpty(requestOptions.getIfMatchETag())) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_IF_MATCH, requestOptions.getIfMatchETag()); } if (StringUtils.isNotEmpty(requestOptions.getIfNoneMatchETag())) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_IF_NONE_MATCH, requestOptions.getIfNoneMatchETag()); } if (requestOptions.isContentResponseOnWriteEnabled() != null) { if (!requestOptions.isContentResponseOnWriteEnabled() && BulkExecutorUtil.isWriteOperation(operationType)) { jsonSerializable.set(BatchRequestResponseConstants.FIELD_MINIMAL_RETURN_PREFERENCE, true); } } } return jsonSerializable; } public int getIndex() { return index; } TInternal getItemInternal() { return this.item; } @SuppressWarnings("unchecked") public <T> T getItem() { return (T)this.item; } @SuppressWarnings("unchecked") public <T> T getContext() { return (T)this.context; } public String getId() { return this.id; } public PartitionKey getPartitionKeyValue() { return partitionKey; } public CosmosItemOperationType getOperationType() { return this.operationType; } public RequestOptions getRequestOptions() { return this.requestOptions; } private String getPartitionKeyJson() { return partitionKeyJson; } void setPartitionKeyJson(String value) { partitionKeyJson = value; } BulkOperationRetryPolicy getRetryPolicy() { return bulkOperationRetryPolicy; } void setRetryPolicy(BulkOperationRetryPolicy bulkOperationRetryPolicy) { this.bulkOperationRetryPolicy = bulkOperationRetryPolicy; } @Override public int compareTo(CosmosItemOperation operation) { if (operation instanceof ItemBulkOperation) { ItemBulkOperation<?, ?> bulkOperation = (ItemBulkOperation<?, ?>) operation; return this.index - bulkOperation.index; } return 0; } @Override public boolean equals(Object obj) { return super.equals(obj); } @Override public int hashCode() { return super.hashCode(); } }
agg_state, variant could cast to json? what we will get if cast ipv4 /ipv6 to json.
public static boolean canCastTo(Type sourceType, Type targetType) { if (targetType.isJsonbType()) { return true; } if (sourceType.isVariantType() && (targetType.isScalarType() || targetType.isArrayType())) { return true; } else if (sourceType.isScalarType() && targetType.isScalarType()) { return ScalarType.canCastTo((ScalarType) sourceType, (ScalarType) targetType); } else if (sourceType.isArrayType() && targetType.isArrayType()) { return ArrayType.canCastTo((ArrayType) sourceType, (ArrayType) targetType); } else if (sourceType.isMapType() && targetType.isMapType()) { return MapType.canCastTo((MapType) sourceType, (MapType) targetType); } else if (targetType.isArrayType() && !((ArrayType) targetType).getItemType().isScalarType() && !sourceType.isNull() && !sourceType.isStringType()) { return false; } else if ((targetType.isStructType() || targetType.isMapType()) && sourceType.isStringType()) { return true; } else if (sourceType.isStructType() && targetType.isStructType()) { return StructType.canCastTo((StructType) sourceType, (StructType) targetType); } else if (sourceType.isAggStateType() && targetType.getPrimitiveType().isCharFamily()) { return true; } else if (sourceType.isAggStateType() && targetType.isAggStateType()) { AggStateType sourceAggState = (AggStateType) sourceType; AggStateType targetAggState = (AggStateType) targetType; if (!sourceAggState.getFunctionName().equalsIgnoreCase(targetAggState.getFunctionName())) { return false; } if (sourceAggState.getSubTypes().size() != targetAggState.getSubTypes().size()) { return false; } for (int i = 0; i < sourceAggState.getSubTypes().size(); i++) { if (!targetAggState.getSubTypeNullables().get(i) && sourceAggState.getSubTypeNullables().get(i)) { return false; } if (!canCastTo(sourceAggState.getSubTypes().get(i), targetAggState.getSubTypes().get(i))) { return false; } } return true; } return sourceType.isNull() || sourceType.getPrimitiveType().isCharFamily(); }
return true;
public static boolean canCastTo(Type sourceType, Type targetType) { if (targetType.isJsonbType() && sourceType.isComplexType()) { return true; } if (sourceType.isVariantType() && (targetType.isScalarType() || targetType.isArrayType())) { return true; } else if (sourceType.isScalarType() && targetType.isScalarType()) { return ScalarType.canCastTo((ScalarType) sourceType, (ScalarType) targetType); } else if (sourceType.isArrayType() && targetType.isArrayType()) { return ArrayType.canCastTo((ArrayType) sourceType, (ArrayType) targetType); } else if (sourceType.isMapType() && targetType.isMapType()) { return MapType.canCastTo((MapType) sourceType, (MapType) targetType); } else if (targetType.isArrayType() && !((ArrayType) targetType).getItemType().isScalarType() && !sourceType.isNull() && !sourceType.isStringType()) { return false; } else if ((targetType.isStructType() || targetType.isMapType()) && sourceType.isStringType()) { return true; } else if (sourceType.isStructType() && targetType.isStructType()) { return StructType.canCastTo((StructType) sourceType, (StructType) targetType); } else if (sourceType.isAggStateType() && targetType.getPrimitiveType().isCharFamily()) { return true; } else if (sourceType.isAggStateType() && targetType.isAggStateType()) { AggStateType sourceAggState = (AggStateType) sourceType; AggStateType targetAggState = (AggStateType) targetType; if (!sourceAggState.getFunctionName().equalsIgnoreCase(targetAggState.getFunctionName())) { return false; } if (sourceAggState.getSubTypes().size() != targetAggState.getSubTypes().size()) { return false; } for (int i = 0; i < sourceAggState.getSubTypes().size(); i++) { if (!targetAggState.getSubTypeNullables().get(i) && sourceAggState.getSubTypeNullables().get(i)) { return false; } if (!canCastTo(sourceAggState.getSubTypes().get(i), targetAggState.getSubTypes().get(i))) { return false; } } return true; } return sourceType.isNull() || sourceType.getPrimitiveType().isCharFamily(); }
class should specialize tempalte type properly throw new TypeException("specializeTemplateType not implemented"); } else { return this; }
class should specialize tempalte type properly throw new TypeException("specializeTemplateType not implemented"); } else { return this; }
It should throw ```new SQLException("can not start in a Active transaction")``` immediately but not ```commit``` the current transaction.
public ResponseHeader execute() throws SQLException { switch (tclStatement.getOp()) { case "START": case "BEGIN": /** * we have to let session occupy the thread when doing xa transaction. * according to https: */ if (connectionSession.getTransactionStatus().isInTransaction()) { if (TransactionType.MANUALXA != connectionSession.getTransactionStatus().getTransactionType()) { BackendTransactionManager backendTransactionManager = new BackendTransactionManager((JDBCBackendConnection) connectionSession.getBackendConnection()); backendTransactionManager.commit(); } else { throw new SQLException("already in XA transaction"); } } TransactionHolder.setInTransaction(); connectionSession.setAutoCommit(false); connectionSession.getTransactionStatus().setManualXA(true); JDBCBackendConnection connection = (JDBCBackendConnection) connectionSession.getBackendConnection(); connection.closeDatabaseCommunicationEngines(true); connection.closeConnections(false); return backendHandler.execute(); case "END": case "PREPARE": case "RECOVER": return backendHandler.execute(); case "COMMIT": case "ROLLBACK": try { return backendHandler.execute(); } finally { connectionSession.getTransactionStatus().setManualXA(false); TransactionHolder.clear(); } default: throw new SQLException("unrecognized XA statement"); } }
* according to https:
public ResponseHeader execute() throws SQLException { switch (tclStatement.getOp()) { case "START": case "BEGIN": /** * we have to let session occupy the thread when doing xa transaction. * according to https: */ if (connectionSession.getTransactionStatus().isInTransaction()) { throw new SQLException("can not start in a Active transaction"); } ResponseHeader header = backendHandler.execute(); TransactionHolder.setInTransaction(); connectionSession.getTransactionStatus().setManualXA(true); return header; case "END": case "PREPARE": case "RECOVER": return backendHandler.execute(); case "COMMIT": case "ROLLBACK": try { return backendHandler.execute(); } finally { connectionSession.getTransactionStatus().setManualXA(false); TransactionHolder.clear(); } default: throw new SQLException("unrecognized XA statement " + tclStatement.getOp()); } }
class TransactionXAHandler implements TextProtocolBackendHandler { private final XAStatement tclStatement; private final ConnectionSession connectionSession; private final SchemaAssignedDatabaseBackendHandler backendHandler; public TransactionXAHandler(final SQLStatementContext<? extends TCLStatement> sqlStatementContext, final String sql, final ConnectionSession connectionSession) { this.tclStatement = (XAStatement) sqlStatementContext.getSqlStatement(); this.connectionSession = connectionSession; this.backendHandler = new SchemaAssignedDatabaseBackendHandler(sqlStatementContext, sql, connectionSession); } @Override public boolean next() throws SQLException { return this.tclStatement.getOp().equals("RECOVER") && this.backendHandler.next(); } @Override public Collection<Object> getRowData() throws SQLException { return this.tclStatement.getOp().equals("RECOVER") ? this.backendHandler.getRowData() : Collections.emptyList(); } @Override }
class TransactionXAHandler implements TextProtocolBackendHandler { private final XAStatement tclStatement; private final ConnectionSession connectionSession; private final SchemaAssignedDatabaseBackendHandler backendHandler; public TransactionXAHandler(final SQLStatementContext<? extends TCLStatement> sqlStatementContext, final String sql, final ConnectionSession connectionSession) { this.tclStatement = (XAStatement) sqlStatementContext.getSqlStatement(); this.connectionSession = connectionSession; this.backendHandler = new SchemaAssignedDatabaseBackendHandler(sqlStatementContext, sql, connectionSession); } @Override public boolean next() throws SQLException { return this.tclStatement.getOp().equals("RECOVER") && this.backendHandler.next(); } @Override public Collection<Object> getRowData() throws SQLException { return this.tclStatement.getOp().equals("RECOVER") ? this.backendHandler.getRowData() : Collections.emptyList(); } @Override }
Should we make matching case-insensitive? ```java Pattern pattern = Pattern.compile("/subscriptions/([\\w-]+)/", Pattern.CASE_INSENSITIVE); ```
public Response intercept(Chain chain) throws IOException { Response response = chain.proceed(chain.request()); if (!response.isSuccessful()) { String content = errorBody(response.body()); RestClient restClient = new RestClient.Builder() .withBaseUrl("https: .withCredentials(credentials) .withSerializerAdapter(new AzureJacksonAdapter()) .withResponseBuilderFactory(new AzureResponseBuilder.Factory()) .build(); CloudError cloudError = restClient.serializerAdapter().deserialize(content, CloudError.class); if ("MissingSubscriptionRegistration".equals(cloudError.code())) { Pattern pattern = Pattern.compile("/subscriptions/([\\w-]+)/"); Matcher matcher = pattern.matcher(chain.request().url().toString()); matcher.find(); ResourceManager resourceManager = ResourceManager.authenticate(restClient) .withSubscription(matcher.group(1)); pattern = Pattern.compile(".*'(.*)'"); matcher = pattern.matcher(cloudError.message()); matcher.find(); Provider provider = registerProvider(matcher.group(1), resourceManager); while (provider.registrationState().equals("Unregistered") || provider.registrationState().equalsIgnoreCase("Registering")) { SdkContext.sleep(5 * 1000); provider = resourceManager.providers().getByName(provider.namespace()); } response = chain.proceed(chain.request()); } } return response; }
Pattern pattern = Pattern.compile("/subscriptions/([\\w-]+)/");
public Response intercept(Chain chain) throws IOException { Response response = chain.proceed(chain.request()); if (!response.isSuccessful()) { String content = errorBody(response.body()); AzureJacksonAdapter jacksonAdapter = new AzureJacksonAdapter(); CloudError cloudError = jacksonAdapter.deserialize(content, CloudError.class); if ("MissingSubscriptionRegistration".equals(cloudError.code())) { Pattern pattern = Pattern.compile("/subscriptions/([\\w-]+)/", Pattern.CASE_INSENSITIVE); Matcher matcher = pattern.matcher(chain.request().url().toString()); matcher.find(); RestClient restClient = new RestClient.Builder() .withBaseUrl("https: .withCredentials(credentials) .withSerializerAdapter(jacksonAdapter) .withResponseBuilderFactory(new AzureResponseBuilder.Factory()) .build(); ResourceManager resourceManager = ResourceManager.authenticate(restClient) .withSubscription(matcher.group(1)); pattern = Pattern.compile(".*'(.*)'"); matcher = pattern.matcher(cloudError.message()); matcher.find(); Provider provider = registerProvider(matcher.group(1), resourceManager); while (provider.registrationState().equalsIgnoreCase("Unregistered") || provider.registrationState().equalsIgnoreCase("Registering")) { SdkContext.sleep(5 * 1000); provider = resourceManager.providers().getByName(provider.namespace()); } response = chain.proceed(chain.request()); } } return response; }
class ProviderRegistrationInterceptor implements Interceptor { private AzureTokenCredentials credentials; /** * Initialize a provider registration interceptor with a credential that's authorized * to register the provider. * @param credentials the credential for provider registration */ public ProviderRegistrationInterceptor(AzureTokenCredentials credentials) { this.credentials = credentials; } @Override private String errorBody(ResponseBody responseBody) throws IOException { if (responseBody == null) { return null; } BufferedSource source = responseBody.source(); source.request(Long.MAX_VALUE); Buffer buffer = source.buffer(); return buffer.readUtf8(); } private Provider registerProvider(String namespace, ResourceManager resourceManager) { return resourceManager.providers().register(namespace); } }
class ProviderRegistrationInterceptor implements Interceptor { private final AzureTokenCredentials credentials; /** * Initialize a provider registration interceptor with a credential that's authorized * to register the provider. * @param credentials the credential for provider registration */ public ProviderRegistrationInterceptor(AzureTokenCredentials credentials) { this.credentials = credentials; } @Override private String errorBody(ResponseBody responseBody) throws IOException { if (responseBody == null) { return null; } BufferedSource source = responseBody.source(); source.request(Long.MAX_VALUE); Buffer buffer = source.buffer(); return buffer.readUtf8(); } private Provider registerProvider(String namespace, ResourceManager resourceManager) { return resourceManager.providers().register(namespace); } }
I kind of don't like the current solution as well, but your suggestion implies that we have a `@Nullable` argument as ctor argument, which is also not great. Afaict in your suggestion, we also have to check whether the checkpointId is set and fail eventually.
public int getVersion() { return 0; }
return 0;
public int getVersion() { return 2; }
class CommittableCollectorSerializer<CommT> implements SimpleVersionedSerializer<CommittableCollector<CommT>> { private static final int MAGIC_NUMBER = 0xb91f252c; private final SimpleVersionedSerializer<CommT> committableSerializer; private final int subtaskId; private final int numberOfSubtasks; public CommittableCollectorSerializer( SimpleVersionedSerializer<CommT> committableSerializer, int subtaskId, int numberOfSubtasks) { this.committableSerializer = checkNotNull(committableSerializer); this.subtaskId = subtaskId; this.numberOfSubtasks = numberOfSubtasks; } @Override @Override public byte[] serialize(CommittableCollector<CommT> committableCollector) throws IOException { DataOutputSerializer out = new DataOutputSerializer(256); out.writeInt(MAGIC_NUMBER); serializeV2(committableCollector, out); return out.getCopyOfBuffer(); } @Override public CommittableCollector<CommT> deserialize(int version, byte[] serialized) throws IOException { final DataInputDeserializer in = new DataInputDeserializer(serialized); if (version == 1) { return deserializeV1(in); } if (version == 2) { validateMagicNumber(in); return deserializeV2(in); } throw new IOException("Unrecognized version or corrupt state: " + version); } private CommittableCollector<CommT> deserializeV1(DataInputView in) throws IOException { return CommittableCollector.ofLegacy( SinkV1CommittableDeserializer.readVersionAndDeserializeList( committableSerializer, in)); } private void serializeV2( CommittableCollector<CommT> committableCollector, DataOutputView dataOutputView) throws IOException { SimpleVersionedSerialization.writeVersionAndSerializeList( new CheckpointSimpleVersionedSerializer(), new ArrayList<>(committableCollector.getCheckpointCommittables()), dataOutputView); } private CommittableCollector<CommT> deserializeV2(DataInputDeserializer in) throws IOException { List<CheckpointCommittableManagerImpl<CommT>> checkpoints = SimpleVersionedSerialization.readVersionAndDeserializeList( new CheckpointSimpleVersionedSerializer(), in); return new CommittableCollector<>( checkpoints.stream() .collect( Collectors.toMap( CheckpointCommittableManagerImpl::getCheckpointId, e -> e)), subtaskId, numberOfSubtasks); } private static void validateMagicNumber(DataInputView in) throws IOException { final int magicNumber = in.readInt(); if (magicNumber != MAGIC_NUMBER) { throw new IOException( String.format("Corrupt data: Unexpected magic number %08X", magicNumber)); } } private class CheckpointSimpleVersionedSerializer implements SimpleVersionedSerializer<CheckpointCommittableManagerImpl<CommT>> { @Override public int getVersion() { return 0; } @Override public byte[] serialize(CheckpointCommittableManagerImpl<CommT> checkpoint) throws IOException { DataOutputSerializer out = new DataOutputSerializer(256); out.writeLong(checkpoint.getCheckpointId()); SimpleVersionedSerialization.writeVersionAndSerializeList( new SubtaskSimpleVersionedSerializer(), new ArrayList<>(checkpoint.getSubtaskCommittableManagers()), out); return out.getCopyOfBuffer(); } @Override public CheckpointCommittableManagerImpl<CommT> deserialize(int version, byte[] serialized) throws IOException { DataInputDeserializer in = new DataInputDeserializer(serialized); long checkpointId = in.readLong(); List<SubtaskCommittableManager<CommT>> subtasks = SimpleVersionedSerialization.readVersionAndDeserializeList( new SubtaskSimpleVersionedSerializer(checkpointId), in); return new CheckpointCommittableManagerImpl<>( subtasks.stream() .collect( Collectors.toMap( SubtaskCommittableManager::getSubtaskId, e -> e)), subtaskId, numberOfSubtasks, checkpointId); } } private class SubtaskSimpleVersionedSerializer implements SimpleVersionedSerializer<SubtaskCommittableManager<CommT>> { @Nullable private final Long checkpointId; /** * This ctor must be used to create a deserializer where the checkpointId is used to set the * checkpointId of the deserialized SubtaskCommittableManager. * * @param checkpointId used to recover the SubtaskCommittableManager */ public SubtaskSimpleVersionedSerializer(long checkpointId) { this.checkpointId = checkpointId; } public SubtaskSimpleVersionedSerializer() { this.checkpointId = null; } @Override public int getVersion() { return 0; } @Override public byte[] serialize(SubtaskCommittableManager<CommT> subtask) throws IOException { DataOutputSerializer out = new DataOutputSerializer(256); SimpleVersionedSerialization.writeVersionAndSerializeList( new RequestSimpleVersionedSerializer(), new ArrayList<>(subtask.getRequests()), out); out.writeInt(subtask.getNumCommittables()); out.writeInt(subtask.getNumDrained()); out.writeInt(subtask.getNumFailed()); return out.getCopyOfBuffer(); } @Override public SubtaskCommittableManager<CommT> deserialize(int version, byte[] serialized) throws IOException { DataInputDeserializer in = new DataInputDeserializer(serialized); List<CommitRequestImpl<CommT>> requests = SimpleVersionedSerialization.readVersionAndDeserializeList( new RequestSimpleVersionedSerializer(), in); return new SubtaskCommittableManager<>( requests, in.readInt(), in.readInt(), in.readInt(), subtaskId, checkNotNull( checkpointId, "CheckpointId must be set to align the SubtaskCommittableManager with holding CheckpointCommittableManager.")); } private class RequestSimpleVersionedSerializer implements SimpleVersionedSerializer<CommitRequestImpl<CommT>> { @Override public int getVersion() { return 0; } @Override public byte[] serialize(CommitRequestImpl<CommT> request) throws IOException { DataOutputSerializer out = new DataOutputSerializer(256); SimpleVersionedSerialization.writeVersionAndSerialize( committableSerializer, request.getCommittable(), out); out.writeInt(request.getNumberOfRetries()); out.writeInt(request.getState().ordinal()); return out.getCopyOfBuffer(); } @Override public CommitRequestImpl<CommT> deserialize(int version, byte[] serialized) throws IOException { DataInputDeserializer in = new DataInputDeserializer(serialized); CommT committable = SimpleVersionedSerialization.readVersionAndDeSerialize( committableSerializer, in); return new CommitRequestImpl<>( committable, in.readInt(), CommitRequestState.values()[in.readInt()]); } } } }
class CommittableCollectorSerializer<CommT> implements SimpleVersionedSerializer<CommittableCollector<CommT>> { private static final int MAGIC_NUMBER = 0xb91f252c; private final SimpleVersionedSerializer<CommT> committableSerializer; private final int subtaskId; private final int numberOfSubtasks; public CommittableCollectorSerializer( SimpleVersionedSerializer<CommT> committableSerializer, int subtaskId, int numberOfSubtasks) { this.committableSerializer = checkNotNull(committableSerializer); this.subtaskId = subtaskId; this.numberOfSubtasks = numberOfSubtasks; } @Override @Override public byte[] serialize(CommittableCollector<CommT> committableCollector) throws IOException { DataOutputSerializer out = new DataOutputSerializer(256); out.writeInt(MAGIC_NUMBER); serializeV2(committableCollector, out); return out.getCopyOfBuffer(); } @Override public CommittableCollector<CommT> deserialize(int version, byte[] serialized) throws IOException { final DataInputDeserializer in = new DataInputDeserializer(serialized); if (version == 1) { return deserializeV1(in); } if (version == 2) { validateMagicNumber(in); return deserializeV2(in); } throw new IOException("Unrecognized version or corrupt state: " + version); } private CommittableCollector<CommT> deserializeV1(DataInputView in) throws IOException { return CommittableCollector.ofLegacy( SinkV1CommittableDeserializer.readVersionAndDeserializeList( committableSerializer, in)); } private void serializeV2( CommittableCollector<CommT> committableCollector, DataOutputView dataOutputView) throws IOException { SimpleVersionedSerialization.writeVersionAndSerializeList( new CheckpointSimpleVersionedSerializer(), new ArrayList<>(committableCollector.getCheckpointCommittables()), dataOutputView); } private CommittableCollector<CommT> deserializeV2(DataInputDeserializer in) throws IOException { List<CheckpointCommittableManagerImpl<CommT>> checkpoints = SimpleVersionedSerialization.readVersionAndDeserializeList( new CheckpointSimpleVersionedSerializer(), in); return new CommittableCollector<>( checkpoints.stream() .collect( Collectors.toMap( CheckpointCommittableManagerImpl::getCheckpointId, e -> e)), subtaskId, numberOfSubtasks); } private static void validateMagicNumber(DataInputView in) throws IOException { final int magicNumber = in.readInt(); if (magicNumber != MAGIC_NUMBER) { throw new IOException( String.format("Corrupt data: Unexpected magic number %08X", magicNumber)); } } private class CheckpointSimpleVersionedSerializer implements SimpleVersionedSerializer<CheckpointCommittableManagerImpl<CommT>> { @Override public int getVersion() { return 0; } @Override public byte[] serialize(CheckpointCommittableManagerImpl<CommT> checkpoint) throws IOException { DataOutputSerializer out = new DataOutputSerializer(256); out.writeLong(checkpoint.getCheckpointId()); SimpleVersionedSerialization.writeVersionAndSerializeList( new SubtaskSimpleVersionedSerializer(), new ArrayList<>(checkpoint.getSubtaskCommittableManagers()), out); return out.getCopyOfBuffer(); } @Override public CheckpointCommittableManagerImpl<CommT> deserialize(int version, byte[] serialized) throws IOException { DataInputDeserializer in = new DataInputDeserializer(serialized); long checkpointId = in.readLong(); List<SubtaskCommittableManager<CommT>> subtasks = SimpleVersionedSerialization.readVersionAndDeserializeList( new SubtaskSimpleVersionedSerializer(checkpointId), in); return new CheckpointCommittableManagerImpl<>( subtasks.stream() .collect( Collectors.toMap( SubtaskCommittableManager::getSubtaskId, e -> e)), subtaskId, numberOfSubtasks, checkpointId); } } private class SubtaskSimpleVersionedSerializer implements SimpleVersionedSerializer<SubtaskCommittableManager<CommT>> { @Nullable private final Long checkpointId; /** * This ctor must be used to create a deserializer where the checkpointId is used to set the * checkpointId of the deserialized SubtaskCommittableManager. * * @param checkpointId used to recover the SubtaskCommittableManager */ public SubtaskSimpleVersionedSerializer(long checkpointId) { this.checkpointId = checkpointId; } /** * When using this ctor, you cannot use the serializer for deserialization because it misses * the checkpointId. For deserialization please use {@link * */ public SubtaskSimpleVersionedSerializer() { this.checkpointId = null; } @Override public int getVersion() { return 0; } @Override public byte[] serialize(SubtaskCommittableManager<CommT> subtask) throws IOException { DataOutputSerializer out = new DataOutputSerializer(256); SimpleVersionedSerialization.writeVersionAndSerializeList( new RequestSimpleVersionedSerializer(), new ArrayList<>(subtask.getRequests()), out); out.writeInt(subtask.getNumCommittables()); out.writeInt(subtask.getNumDrained()); out.writeInt(subtask.getNumFailed()); return out.getCopyOfBuffer(); } @Override public SubtaskCommittableManager<CommT> deserialize(int version, byte[] serialized) throws IOException { DataInputDeserializer in = new DataInputDeserializer(serialized); List<CommitRequestImpl<CommT>> requests = SimpleVersionedSerialization.readVersionAndDeserializeList( new RequestSimpleVersionedSerializer(), in); return new SubtaskCommittableManager<>( requests, in.readInt(), in.readInt(), in.readInt(), subtaskId, checkNotNull( checkpointId, "CheckpointId must be set to align the SubtaskCommittableManager with holding CheckpointCommittableManager.")); } private class RequestSimpleVersionedSerializer implements SimpleVersionedSerializer<CommitRequestImpl<CommT>> { @Override public int getVersion() { return 0; } @Override public byte[] serialize(CommitRequestImpl<CommT> request) throws IOException { DataOutputSerializer out = new DataOutputSerializer(256); SimpleVersionedSerialization.writeVersionAndSerialize( committableSerializer, request.getCommittable(), out); out.writeInt(request.getNumberOfRetries()); out.writeInt(request.getState().ordinal()); return out.getCopyOfBuffer(); } @Override public CommitRequestImpl<CommT> deserialize(int version, byte[] serialized) throws IOException { DataInputDeserializer in = new DataInputDeserializer(serialized); CommT committable = SimpleVersionedSerialization.readVersionAndDeSerialize( committableSerializer, in); return new CommitRequestImpl<>( committable, in.readInt(), CommitRequestState.values()[in.readInt()]); } } } }
I've addressed this in another PR https://github.com/apache/beam/pull/9820/files#r335994291 but tl;dr: the ConnectionHandler used in the constructor and `close` is not the same one used in `start`. I now defer initializing the class-level ConnectionHandler to `start` as it feels more in-line with start/stop semantics.
public Instant getWatermark() { return checkpointMark.oldestTimestamp; }
return checkpointMark.oldestTimestamp;
public Instant getWatermark() { return checkpointMark.oldestTimestamp; }
class UnboundedRabbitMqReader extends UnboundedSource.UnboundedReader<RabbitMqMessage> { private final RabbitMQSource source; private RabbitMqMessage current; private byte[] currentRecordId; private ConnectionHandler connectionHandler; private String queueName; private Instant currentTimestamp; private final RabbitMQCheckpointMark checkpointMark; UnboundedRabbitMqReader(RabbitMQSource source, RabbitMQCheckpointMark checkpointMark) throws IOException { this.source = source; this.current = null; this.checkpointMark = checkpointMark != null ? checkpointMark : new RabbitMQCheckpointMark(); } @Override @Override public UnboundedSource.CheckpointMark getCheckpointMark() { return checkpointMark; } @Override public RabbitMQSource getCurrentSource() { return source; } @Override public byte[] getCurrentRecordId() { if (current == null) { throw new NoSuchElementException(); } if (currentRecordId != null) { return currentRecordId; } else { return "".getBytes(StandardCharsets.UTF_8); } } @Override public Instant getCurrentTimestamp() { if (currentTimestamp == null) { throw new NoSuchElementException(); } return currentTimestamp; } @Override public RabbitMqMessage getCurrent() { if (current == null) { throw new NoSuchElementException(); } return current; } @Override public boolean start() throws IOException { try { connectionHandler = new ConnectionHandler(source.spec.uri()); connectionHandler.start(); Channel channel = connectionHandler.getChannel(); queueName = source.spec.queue(); if (source.spec.queueDeclare()) { channel.queueDeclare(queueName, false, false, false, null); } if (source.spec.exchange() != null) { channel.exchangeDeclare(source.spec.exchange(), source.spec.exchangeType()); if (queueName == null) { queueName = channel.queueDeclare().getQueue(); } channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey()); } checkpointMark.channel = channel; channel.txSelect(); } catch (Exception e) { throw new IOException(e); } return advance(); } @Override public boolean advance() throws IOException { try { Channel channel = connectionHandler.getChannel(); GetResponse delivery = channel.basicGet(queueName, false); if (delivery == null) { return false; } if (source.spec.useCorrelationId()) { String correlationId = delivery.getProps().getCorrelationId(); if (correlationId == null) { throw new IOException( "RabbitMqIO.Read uses message correlation ID, but received " + "message has a null correlation ID"); } currentRecordId = correlationId.getBytes(StandardCharsets.UTF_8); } long deliveryTag = delivery.getEnvelope().getDeliveryTag(); checkpointMark.sessionIds.add(deliveryTag); current = new RabbitMqMessage(source.spec.routingKey(), delivery); currentTimestamp = new Instant(delivery.getProps().getTimestamp()); if (currentTimestamp.isBefore(checkpointMark.oldestTimestamp)) { checkpointMark.oldestTimestamp = currentTimestamp; } } catch (Exception e) { throw new IOException(e); } return true; } @Override public void close() throws IOException { if (connectionHandler != null) { connectionHandler.stop(); } } }
class UnboundedRabbitMqReader extends UnboundedSource.UnboundedReader<RabbitMqMessage> { private final RabbitMQSource source; private RabbitMqMessage current; private byte[] currentRecordId; private ConnectionHandler connectionHandler; private String queueName; private Instant currentTimestamp; private final RabbitMQCheckpointMark checkpointMark; UnboundedRabbitMqReader(RabbitMQSource source, RabbitMQCheckpointMark checkpointMark) throws IOException { this.source = source; this.current = null; this.checkpointMark = checkpointMark != null ? checkpointMark : new RabbitMQCheckpointMark(); } @Override @Override public UnboundedSource.CheckpointMark getCheckpointMark() { return checkpointMark; } @Override public RabbitMQSource getCurrentSource() { return source; } @Override public byte[] getCurrentRecordId() { if (current == null) { throw new NoSuchElementException(); } if (currentRecordId != null) { return currentRecordId; } else { return "".getBytes(StandardCharsets.UTF_8); } } @Override public Instant getCurrentTimestamp() { if (currentTimestamp == null) { throw new NoSuchElementException(); } return currentTimestamp; } @Override public RabbitMqMessage getCurrent() { if (current == null) { throw new NoSuchElementException(); } return current; } @Override public boolean start() throws IOException { try { connectionHandler = new ConnectionHandler(source.spec.uri()); connectionHandler.start(); Channel channel = connectionHandler.getChannel(); queueName = source.spec.queue(); if (source.spec.queueDeclare()) { channel.queueDeclare(queueName, false, false, false, null); } if (source.spec.exchange() != null) { channel.exchangeDeclare(source.spec.exchange(), source.spec.exchangeType()); if (queueName == null) { queueName = channel.queueDeclare().getQueue(); } channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey()); } checkpointMark.channel = channel; channel.txSelect(); } catch (Exception e) { throw new IOException(e); } return advance(); } @Override public boolean advance() throws IOException { try { Channel channel = connectionHandler.getChannel(); GetResponse delivery = channel.basicGet(queueName, false); if (delivery == null) { return false; } if (source.spec.useCorrelationId()) { String correlationId = delivery.getProps().getCorrelationId(); if (correlationId == null) { throw new IOException( "RabbitMqIO.Read uses message correlation ID, but received " + "message has a null correlation ID"); } currentRecordId = correlationId.getBytes(StandardCharsets.UTF_8); } long deliveryTag = delivery.getEnvelope().getDeliveryTag(); checkpointMark.sessionIds.add(deliveryTag); current = new RabbitMqMessage(source.spec.routingKey(), delivery); currentTimestamp = new Instant(delivery.getProps().getTimestamp()); if (currentTimestamp.isBefore(checkpointMark.oldestTimestamp)) { checkpointMark.oldestTimestamp = currentTimestamp; } } catch (Exception e) { throw new IOException(e); } return true; } @Override public void close() throws IOException { if (connectionHandler != null) { connectionHandler.stop(); } } }
I left it because for our infra tests here, when we change something for a playback we re-record it with the same method. So this is the "record" path that isn't persisted but handy to have around.
public void testRedactRequestBodyRegex() { HttpClient client = interceptorManager.getPlaybackClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).build(); HttpRequest request = new HttpRequest(HttpMethod.POST, "http: request.setHeader(HttpHeaderName.CONTENT_TYPE, "application/x-www-form-urlencoded"); request.setBody("first_value=value&client_secret=aVerySecretSecret&other=value&is=cool"); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } RecordedTestProxyData recordedTestProxyData = readDataFromFile(); RecordedTestProxyData.TestProxyDataRecord record = recordedTestProxyData.getTestProxyDataRecords().get(0); assertEquals(record.getRequestBody(), "first_value=value&client_secret=REDACTED&other=value&is=cool"); }
public void testRedactRequestBodyRegex() { HttpClient client = interceptorManager.getPlaybackClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).build(); interceptorManager.addMatchers(new CustomMatcher().setHeadersKeyOnlyMatch(Collections.singletonList("Accept"))); HttpRequest request = new HttpRequest(HttpMethod.POST, "http: request.setHeader(HttpHeaderName.CONTENT_TYPE, "application/x-www-form-urlencoded"); request.setBody("first_value=value&client_secret=aVerySecretSecret&other=value&is=cool"); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } RecordedTestProxyData recordedTestProxyData = readDataFromFile(); RecordedTestProxyData.TestProxyDataRecord record = recordedTestProxyData.getTestProxyDataRecords().get(0); assertEquals(record.getRequestBody(), "first_value=value&client_secret=REDACTED&other=value&is=cool"); }
class TestProxyTests extends TestProxyTestBase { public static final String TEST_DATA = "{\"test\":\"proxy\"}"; static TestProxyTestServer server; private static final ObjectMapper RECORD_MAPPER = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT); private static final List<TestProxySanitizer> CUSTOM_SANITIZER = new ArrayList<>(); public static final String REDACTED = "REDACTED"; private static final HttpHeaderName OCP_APIM_SUBSCRIPTION_KEY = HttpHeaderName.fromString("Ocp-Apim-Subscription-Key"); static { CUSTOM_SANITIZER.add(new TestProxySanitizer("$..modelId", null, REDACTED, TestProxySanitizerType.BODY_KEY)); CUSTOM_SANITIZER.add(new TestProxySanitizer("TableName\\\"*:*\\\"(?<tablename>.*)\\\"", REDACTED, TestProxySanitizerType.BODY_REGEX).setGroupForReplace("tablename")); } @BeforeAll public static void setupClass() { server = new TestProxyTestServer(); } @AfterAll public static void teardownClass() { server.close(); } @Test @Tag("Record") public void testBasicRecord() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).policies(interceptorManager.getRecordPolicy()).build(); testResourceNamer.randomName("test", 10); testResourceNamer.now(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } } @Test @Tag("Playback") public void testOrdering() { String name = testResourceNamer.randomName("test", 10); assertEquals("test32950", name); } @Test @Tag("Record") @DoNotRecord public void testDoNotRecord() { testResourceNamer.now(); } @Test @Tag("Playback") @DoNotRecord public void testDoNotPlayback() { testResourceNamer.now(); } @Test @Tag("Playback") public void testMismatch() { HttpClient client = interceptorManager.getPlaybackClient(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: RuntimeException thrown = assertThrows(RuntimeException.class, () -> client.sendSync(request, Context.NONE)); assertTrue(thrown.getMessage().contains("Uri doesn't match")); } @Test @Tag("Record") @RecordWithoutRequestBody public void testRecordWithPath() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).policies(interceptorManager.getRecordPolicy()).build(); testResourceNamer.randomName("test", 10); testResourceNamer.now(); HttpRequest request = new HttpRequest(HttpMethod.POST, "http: .setHeader(HttpHeaderName.CONTENT_TYPE, "application/json") .setHeader(HttpHeaderName.CONTENT_LENGTH, String.valueOf(TEST_DATA.length())); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } } @Test @Tag("Record") public void testRecordWithHeaders() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).policies(interceptorManager.getRecordPolicy()).build(); testResourceNamer.randomName("test", 10); testResourceNamer.now(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: .setHeader(HttpHeaderName.fromString("header1"), "value1") .setHeader(HttpHeaderName.fromString("header2"), "value2"); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } } @Test @Tag("Playback") public void testPlayback() { HttpClient client = interceptorManager.getPlaybackClient(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: .setHeader(HttpHeaderName.ACCEPT, "*/*"); try (HttpResponse response = client.sendSync(request, Context.NONE)) { assertEquals("first path", response.getBodyAsBinaryData().toString()); assertEquals(200, response.getStatusCode()); } } @Test @Tag("Live") public void testCannotGetPlaybackClient() { RuntimeException thrown = assertThrows(IllegalStateException.class, () -> interceptorManager.getPlaybackClient()); assertEquals("A playback client can only be requested in PLAYBACK mode.", thrown.getMessage()); } @Test @Tag("Live") public void testCannotGetRecordPolicy() { RuntimeException thrown = assertThrows(IllegalStateException.class, () -> interceptorManager.getRecordPolicy()); assertEquals("A recording policy can only be requested in RECORD mode.", thrown.getMessage()); } @Test @Tag("Playback") public void testRecordWithRedaction() { interceptorManager.addSanitizers(CUSTOM_SANITIZER); HttpClient client = interceptorManager.getPlaybackClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).build(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: .setHeader(OCP_APIM_SUBSCRIPTION_KEY, "SECRET_API_KEY") .setHeader(HttpHeaderName.CONTENT_TYPE, "application/json") .setHeader(HttpHeaderName.ACCEPT, "*/*"); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(response.getStatusCode(), 200); assertEquals(200, response.getStatusCode()); RecordedTestProxyData recordedTestProxyData = readDataFromFile(); RecordedTestProxyData.TestProxyDataRecord record = recordedTestProxyData.getTestProxyDataRecords().get(0); assertEquals("http: assertEquals(REDACTED, record.getHeaders().get("Ocp-Apim-Subscription-Key")); assertTrue(record.getResponseHeaders() .get("Operation-Location") .startsWith("https: assertEquals(REDACTED, record.getResponse().get("modelId")); assertEquals(REDACTED, record.getResponse().get("client_secret")); } } @Test @Tag("Playback") public void testPlaybackWithRedaction() { interceptorManager.addSanitizers(CUSTOM_SANITIZER); interceptorManager.addMatchers(Collections.singletonList( new CustomMatcher().setExcludedHeaders(Collections.singletonList("Ocp-Apim-Subscription-Key")))); HttpClient client = interceptorManager.getPlaybackClient(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: .setHeader(OCP_APIM_SUBSCRIPTION_KEY, "SECRET_API_KEY") .setHeader(HttpHeaderName.CONTENT_TYPE, "application/json") .setHeader(HttpHeaderName.ACCEPT, "*/*"); try (HttpResponse response = client.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } } @Test @Tag("Playback") public void testBodyRegexRedactRecord() { HttpClient client = interceptorManager.getPlaybackClient(); interceptorManager.addSanitizers(CUSTOM_SANITIZER); interceptorManager.addMatchers(new CustomMatcher().setHeadersKeyOnlyMatch(Collections.singletonList("Accept"))); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).build(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: request.setHeader(HttpHeaderName.CONTENT_TYPE, "application/json"); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } RecordedTestProxyData recordedTestProxyData = readDataFromFile(); RecordedTestProxyData.TestProxyDataRecord record = recordedTestProxyData.getTestProxyDataRecords().get(0); assertEquals("http: assertTrue(record.getResponse() .get("Body") .contains("<UserDelegationKey><SignedTid>REDACTED</SignedTid></UserDelegationKey>")); assertTrue(record.getResponse().get("primaryKey").contains("<PrimaryKey>REDACTED</PrimaryKey>")); assertEquals(record.getResponse().get("TableName"), REDACTED); } @Test @Tag("Playback") @Test @Tag("Live") public void canGetTestProxyVersion() { String version = TestProxyUtils.getTestProxyVersion(this.getTestClassPath()); assertNotNull(version); } @Test @Tag("Record") public void testResetTestProxyData() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); final HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).policies(interceptorManager.getRecordPolicy()).build(); try (HttpResponse response = pipeline.sendSync(new HttpRequest(HttpMethod.GET, "http: assertEquals(200, response.getStatusCode()); HttpHeaders headers = response.getRequest().getHeaders(); assertNull(headers.get(HttpHeaderName.fromString("x-recording-upstream-base-uri"))); assertNull(headers.get(HttpHeaderName.fromString("x-recording-mode"))); assertNull(headers.get(HttpHeaderName.fromString("x-recording-id"))); assertNull(headers.get(HttpHeaderName.fromString("x-recording-skip"))); } } @Test @Tag("Record") public void testRecordWithRedirect() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client) .policies(new RedirectPolicy(), interceptorManager.getRecordPolicy()) .build(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); assertEquals("http: response.getRequest().getUrl().toString()); assertNull( response.getRequest().getHeaders().get(HttpHeaderName.fromString("x-recording-upstream-base-uri"))); } } private RecordedTestProxyData readDataFromFile() { try { BufferedReader reader = Files.newBufferedReader(Paths.get(interceptorManager.getRecordingFileLocation())); return RECORD_MAPPER.readValue(reader, RecordedTestProxyData.class); } catch (IOException ex) { throw new UncheckedIOException(ex); } } @JsonIgnoreProperties(ignoreUnknown = true) static class RecordedTestProxyData { @JsonProperty("Entries") private final LinkedList<TestProxyDataRecord> testProxyDataRecords; RecordedTestProxyData() { testProxyDataRecords = new LinkedList<>(); } public LinkedList<TestProxyDataRecord> getTestProxyDataRecords() { return testProxyDataRecords; } @JsonIgnoreProperties(ignoreUnknown = true) static class TestProxyDataRecord { @JsonProperty("RequestMethod") private String method; @JsonProperty("RequestUri") private String uri; @JsonProperty("RequestHeaders") private Map<String, String> headers; @JsonProperty("ResponseBody") private Map<String, String> response; @JsonProperty("ResponseHeaders") private Map<String, String> responseHeaders; @JsonProperty("RequestBody") private String requestBody; public String getMethod() { return method; } public String getUri() { return uri; } public Map<String, String> getHeaders() { return headers; } public Map<String, String> getResponse() { return response; } public Map<String, String> getResponseHeaders() { return responseHeaders; } public String getRequestBody() { return requestBody; } } } }
class TestProxyTests extends TestProxyTestBase { public static final String TEST_DATA = "{\"test\":\"proxy\"}"; static TestProxyTestServer server; private static final ObjectMapper RECORD_MAPPER = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT); private static final List<TestProxySanitizer> CUSTOM_SANITIZER = new ArrayList<>(); public static final String REDACTED = "REDACTED"; private static final HttpHeaderName OCP_APIM_SUBSCRIPTION_KEY = HttpHeaderName.fromString("Ocp-Apim-Subscription-Key"); static { CUSTOM_SANITIZER.add(new TestProxySanitizer("$..modelId", null, REDACTED, TestProxySanitizerType.BODY_KEY)); CUSTOM_SANITIZER.add(new TestProxySanitizer("TableName\\\"*:*\\\"(?<tablename>.*)\\\"", REDACTED, TestProxySanitizerType.BODY_REGEX).setGroupForReplace("tablename")); } @BeforeAll public static void setupClass() { server = new TestProxyTestServer(); } @AfterAll public static void teardownClass() { server.close(); } @Test @Tag("Record") public void testBasicRecord() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).policies(interceptorManager.getRecordPolicy()).build(); testResourceNamer.randomName("test", 10); testResourceNamer.now(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } } @Test @Tag("Playback") public void testOrdering() { String name = testResourceNamer.randomName("test", 10); assertEquals("test32950", name); } @Test @Tag("Record") @DoNotRecord public void testDoNotRecord() { testResourceNamer.now(); } @Test @Tag("Playback") @DoNotRecord public void testDoNotPlayback() { testResourceNamer.now(); } @Test @Tag("Playback") public void testMismatch() { HttpClient client = interceptorManager.getPlaybackClient(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: RuntimeException thrown = assertThrows(RuntimeException.class, () -> client.sendSync(request, Context.NONE)); assertTrue(thrown.getMessage().contains("Uri doesn't match")); } @Test @Tag("Record") @RecordWithoutRequestBody public void testRecordWithPath() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).policies(interceptorManager.getRecordPolicy()).build(); testResourceNamer.randomName("test", 10); testResourceNamer.now(); HttpRequest request = new HttpRequest(HttpMethod.POST, "http: .setHeader(HttpHeaderName.CONTENT_TYPE, "application/json") .setHeader(HttpHeaderName.CONTENT_LENGTH, String.valueOf(TEST_DATA.length())); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } } @Test @Tag("Record") public void testRecordWithHeaders() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).policies(interceptorManager.getRecordPolicy()).build(); testResourceNamer.randomName("test", 10); testResourceNamer.now(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: .setHeader(HttpHeaderName.fromString("header1"), "value1") .setHeader(HttpHeaderName.fromString("header2"), "value2"); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } } @Test @Tag("Playback") public void testPlayback() { HttpClient client = interceptorManager.getPlaybackClient(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: .setHeader(HttpHeaderName.ACCEPT, "*/*"); try (HttpResponse response = client.sendSync(request, Context.NONE)) { assertEquals("first path", response.getBodyAsBinaryData().toString()); assertEquals(200, response.getStatusCode()); } } @Test @Tag("Live") public void testCannotGetPlaybackClient() { RuntimeException thrown = assertThrows(IllegalStateException.class, () -> interceptorManager.getPlaybackClient()); assertEquals("A playback client can only be requested in PLAYBACK mode.", thrown.getMessage()); } @Test @Tag("Live") public void testCannotGetRecordPolicy() { RuntimeException thrown = assertThrows(IllegalStateException.class, () -> interceptorManager.getRecordPolicy()); assertEquals("A recording policy can only be requested in RECORD mode.", thrown.getMessage()); } @Test @Tag("Playback") public void testRecordWithRedaction() { interceptorManager.addSanitizers(CUSTOM_SANITIZER); HttpClient client = interceptorManager.getPlaybackClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).build(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: .setHeader(OCP_APIM_SUBSCRIPTION_KEY, "SECRET_API_KEY") .setHeader(HttpHeaderName.CONTENT_TYPE, "application/json") .setHeader(HttpHeaderName.ACCEPT, "*/*"); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(response.getStatusCode(), 200); assertEquals(200, response.getStatusCode()); RecordedTestProxyData recordedTestProxyData = readDataFromFile(); RecordedTestProxyData.TestProxyDataRecord record = recordedTestProxyData.getTestProxyDataRecords().get(0); assertEquals("http: assertEquals(REDACTED, record.getHeaders().get("Ocp-Apim-Subscription-Key")); assertTrue(record.getResponseHeaders() .get("Operation-Location") .startsWith("https: assertEquals(REDACTED, record.getResponse().get("modelId")); assertEquals(REDACTED, record.getResponse().get("client_secret")); } } @Test @Tag("Playback") public void testPlaybackWithRedaction() { interceptorManager.addSanitizers(CUSTOM_SANITIZER); interceptorManager.addMatchers(Collections.singletonList( new CustomMatcher().setExcludedHeaders(Collections.singletonList("Ocp-Apim-Subscription-Key")))); HttpClient client = interceptorManager.getPlaybackClient(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: .setHeader(OCP_APIM_SUBSCRIPTION_KEY, "SECRET_API_KEY") .setHeader(HttpHeaderName.CONTENT_TYPE, "application/json") .setHeader(HttpHeaderName.ACCEPT, "*/*"); try (HttpResponse response = client.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } } @Test @Tag("Playback") public void testBodyRegexRedactRecord() { HttpClient client = interceptorManager.getPlaybackClient(); interceptorManager.addSanitizers(CUSTOM_SANITIZER); interceptorManager.addMatchers(new CustomMatcher().setHeadersKeyOnlyMatch(Collections.singletonList("Accept"))); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).build(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: request.setHeader(HttpHeaderName.CONTENT_TYPE, "application/json"); try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); } RecordedTestProxyData recordedTestProxyData = readDataFromFile(); RecordedTestProxyData.TestProxyDataRecord record = recordedTestProxyData.getTestProxyDataRecords().get(0); assertEquals("http: assertTrue(record.getResponse() .get("Body") .contains("<UserDelegationKey><SignedTid>REDACTED</SignedTid></UserDelegationKey>")); assertTrue(record.getResponse().get("primaryKey").contains("<PrimaryKey>REDACTED</PrimaryKey>")); assertEquals(record.getResponse().get("TableName"), REDACTED); } @Test @Tag("Playback") @Test @Tag("Live") public void canGetTestProxyVersion() { String version = TestProxyUtils.getTestProxyVersion(this.getTestClassPath()); assertNotNull(version); } @Test @Tag("Record") public void testResetTestProxyData() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); final HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client).policies(interceptorManager.getRecordPolicy()).build(); try (HttpResponse response = pipeline.sendSync(new HttpRequest(HttpMethod.GET, "http: assertEquals(200, response.getStatusCode()); HttpHeaders headers = response.getRequest().getHeaders(); assertNull(headers.get(HttpHeaderName.fromString("x-recording-upstream-base-uri"))); assertNull(headers.get(HttpHeaderName.fromString("x-recording-mode"))); assertNull(headers.get(HttpHeaderName.fromString("x-recording-id"))); assertNull(headers.get(HttpHeaderName.fromString("x-recording-skip"))); } } @Test @Tag("Record") public void testRecordWithRedirect() { HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient(); HttpPipeline pipeline = new HttpPipelineBuilder().httpClient(client) .policies(new RedirectPolicy(), interceptorManager.getRecordPolicy()) .build(); HttpRequest request = new HttpRequest(HttpMethod.GET, "http: try (HttpResponse response = pipeline.sendSync(request, Context.NONE)) { assertEquals(200, response.getStatusCode()); assertEquals("http: response.getRequest().getUrl().toString()); assertNull( response.getRequest().getHeaders().get(HttpHeaderName.fromString("x-recording-upstream-base-uri"))); } } private RecordedTestProxyData readDataFromFile() { try { BufferedReader reader = Files.newBufferedReader(Paths.get(interceptorManager.getRecordingFileLocation())); return RECORD_MAPPER.readValue(reader, RecordedTestProxyData.class); } catch (IOException ex) { throw new UncheckedIOException(ex); } } @JsonIgnoreProperties(ignoreUnknown = true) static class RecordedTestProxyData { @JsonProperty("Entries") private final LinkedList<TestProxyDataRecord> testProxyDataRecords; RecordedTestProxyData() { testProxyDataRecords = new LinkedList<>(); } public LinkedList<TestProxyDataRecord> getTestProxyDataRecords() { return testProxyDataRecords; } @JsonIgnoreProperties(ignoreUnknown = true) static class TestProxyDataRecord { @JsonProperty("RequestMethod") private String method; @JsonProperty("RequestUri") private String uri; @JsonProperty("RequestHeaders") private Map<String, String> headers; @JsonProperty("ResponseBody") private Map<String, String> response; @JsonProperty("ResponseHeaders") private Map<String, String> responseHeaders; @JsonProperty("RequestBody") private String requestBody; public String getMethod() { return method; } public String getUri() { return uri; } public Map<String, String> getHeaders() { return headers; } public Map<String, String> getResponse() { return response; } public Map<String, String> getResponseHeaders() { return responseHeaders; } public String getRequestBody() { return requestBody; } } } }
[SpotBugs-P1] Reliance on default encoding: Found a call to a method which will perform a byte to String (or String to byte) conversion, and will assume that the default platform encoding is suitable.
private void processSendWork() { synchronized (this.pendingSendLock) { if (!this.isSendLoopRunning) { this.isSendLoopRunning = true; } else { return; } } TRACE_LOGGER.debug("Processing pending sends to '{}'. Available link credit '{}'", this.sendPath, this.linkCredit); try { if (!this.ensureLinkIsOpen().isDone()) { return; } final Sender sendLinkCurrent = this.sendLink; while (sendLinkCurrent != null && sendLinkCurrent.getLocalState() == EndpointState.ACTIVE && sendLinkCurrent.getRemoteState() == EndpointState.ACTIVE && this.linkCredit > 0) { final WeightedDeliveryTag deliveryTag; final SendWorkItem<DeliveryState> sendData; synchronized (this.pendingSendLock) { deliveryTag = this.pendingSends.poll(); if (deliveryTag == null) { TRACE_LOGGER.debug("There are no pending sends to '{}'.", this.sendPath); this.isSendLoopRunning = false; break; } else { sendData = this.pendingSendsData.get(deliveryTag.getDeliveryTag()); if (sendData == null) { TRACE_LOGGER.debug("SendData not found for this delivery. path:{}, linkName:{}, deliveryTag:{}", this.sendPath, this.sendLink.getName(), deliveryTag); continue; } } } if (sendData.getWork() != null && sendData.getWork().isDone()) { this.pendingSendsData.remove(deliveryTag.getDeliveryTag()); continue; } Delivery delivery = null; boolean linkAdvance = false; int sentMsgSize = 0; Exception sendException = null; try { delivery = sendLinkCurrent.delivery(deliveryTag.getDeliveryTag().getBytes(UTF_8)); delivery.setMessageFormat(sendData.getMessageFormat()); TransactionContext transaction = sendData.getTransaction(); if (transaction != TransactionContext.NULL_TXN) { TransactionalState transactionalState = new TransactionalState(); transactionalState.setTxnId(new Binary(transaction.getTransactionId().array())); delivery.disposition(transactionalState); } TRACE_LOGGER.debug("Sending message delivery '{}' to '{}'", deliveryTag.getDeliveryTag(), this.sendPath); sentMsgSize = sendLinkCurrent.send(sendData.getMessage(), 0, sendData.getEncodedMessageSize()); assert sentMsgSize == sendData.getEncodedMessageSize() : "Contract of the ProtonJ library for Sender.Send API changed"; linkAdvance = sendLinkCurrent.advance(); } catch(Exception exception) { sendException = exception; } if (linkAdvance) { this.linkCredit--; sendData.setWaitingForAck(); } else { TRACE_LOGGER.warn("Sendlink advance failed. path:{}, linkName:{}, deliveryTag:{}, sentMessageSize:{}, payloadActualSiz:{}", this.sendPath, this.sendLink.getName(), deliveryTag, sentMsgSize, sendData.getEncodedMessageSize()); if (delivery != null) { delivery.free(); } Exception completionException = sendException != null ? new OperationCancelledException("Send operation failed. Please see cause for more details", sendException) : new OperationCancelledException(String.format(Locale.US, "Send operation failed while advancing delivery(tag: %s) on SendLink(path: %s).", this.sendPath, deliveryTag)); AsyncUtil.completeFutureExceptionally(sendData.getWork(), completionException); } } } finally { synchronized (this.pendingSendLock) { if (this.isSendLoopRunning) { this.isSendLoopRunning = false; } } } }
delivery = sendLinkCurrent.delivery(deliveryTag.getDeliveryTag().getBytes(UTF_8));
private void processSendWork() { synchronized (this.pendingSendLock) { if (!this.isSendLoopRunning) { this.isSendLoopRunning = true; } else { return; } } TRACE_LOGGER.debug("Processing pending sends to '{}'. Available link credit '{}'", this.sendPath, this.linkCredit); try { if (!this.ensureLinkIsOpen().isDone()) { return; } final Sender sendLinkCurrent = this.sendLink; while (sendLinkCurrent != null && sendLinkCurrent.getLocalState() == EndpointState.ACTIVE && sendLinkCurrent.getRemoteState() == EndpointState.ACTIVE && this.linkCredit > 0) { final WeightedDeliveryTag deliveryTag; final SendWorkItem<DeliveryState> sendData; synchronized (this.pendingSendLock) { deliveryTag = this.pendingSends.poll(); if (deliveryTag == null) { TRACE_LOGGER.debug("There are no pending sends to '{}'.", this.sendPath); this.isSendLoopRunning = false; break; } else { sendData = this.pendingSendsData.get(deliveryTag.getDeliveryTag()); if (sendData == null) { TRACE_LOGGER.debug("SendData not found for this delivery. path:{}, linkName:{}, deliveryTag:{}", this.sendPath, this.sendLink.getName(), deliveryTag); continue; } } } if (sendData.getWork() != null && sendData.getWork().isDone()) { this.pendingSendsData.remove(deliveryTag.getDeliveryTag()); continue; } Delivery delivery = null; boolean linkAdvance = false; int sentMsgSize = 0; Exception sendException = null; try { delivery = sendLinkCurrent.delivery(deliveryTag.getDeliveryTag().getBytes(UTF_8)); delivery.setMessageFormat(sendData.getMessageFormat()); TransactionContext transaction = sendData.getTransaction(); if (transaction != TransactionContext.NULL_TXN) { TransactionalState transactionalState = new TransactionalState(); transactionalState.setTxnId(new Binary(transaction.getTransactionId().array())); delivery.disposition(transactionalState); } TRACE_LOGGER.debug("Sending message delivery '{}' to '{}'", deliveryTag.getDeliveryTag(), this.sendPath); sentMsgSize = sendLinkCurrent.send(sendData.getMessage(), 0, sendData.getEncodedMessageSize()); assert sentMsgSize == sendData.getEncodedMessageSize() : "Contract of the ProtonJ library for Sender.Send API changed"; linkAdvance = sendLinkCurrent.advance(); } catch(Exception exception) { sendException = exception; } if (linkAdvance) { this.linkCredit--; sendData.setWaitingForAck(); } else { TRACE_LOGGER.warn("Sendlink advance failed. path:{}, linkName:{}, deliveryTag:{}, sentMessageSize:{}, payloadActualSiz:{}", this.sendPath, this.sendLink.getName(), deliveryTag, sentMsgSize, sendData.getEncodedMessageSize()); if (delivery != null) { delivery.free(); } Exception completionException = sendException != null ? new OperationCancelledException("Send operation failed. Please see cause for more details", sendException) : new OperationCancelledException(String.format(Locale.US, "Send operation failed while advancing delivery(tag: %s) on SendLink(path: %s).", this.sendPath, deliveryTag)); AsyncUtil.completeFutureExceptionally(sendData.getWork(), completionException); } } } finally { synchronized (this.pendingSendLock) { if (this.isSendLoopRunning) { this.isSendLoopRunning = false; } } } }
class CoreMessageSender extends ClientEntity implements IAmqpSender, IErrorContextProvider { private static final Logger TRACE_LOGGER = LoggerFactory.getLogger(CoreMessageSender.class); private static final String SEND_TIMED_OUT = "Send operation timed out"; private static final Duration LINK_REOPEN_TIMEOUT = Duration.ofMinutes(5); private final Object requestResonseLinkCreationLock = new Object(); private final MessagingFactory underlyingFactory; private final String sendPath; private final String sasTokenAudienceURI; private final Duration operationTimeout; private final RetryPolicy retryPolicy; private final CompletableFuture<Void> linkClose; private final Object pendingSendLock; private final ConcurrentHashMap<String, SendWorkItem<DeliveryState>> pendingSendsData; private final PriorityQueue<WeightedDeliveryTag> pendingSends; private final DispatchHandler sendWork; private final MessagingEntityType entityType; private boolean isSendLoopRunning; private Sender sendLink; private RequestResponseLink requestResponseLink; private CompletableFuture<CoreMessageSender> linkFirstOpen; private int linkCredit; private Exception lastKnownLinkError; private Instant lastKnownErrorReportedAt; private ScheduledFuture<?> sasTokenRenewTimerFuture; private CompletableFuture<Void> requestResponseLinkCreationFuture; private CompletableFuture<Void> sendLinkReopenFuture; private SenderLinkSettings linkSettings; private String transferDestinationPath; private String transferSasTokenAudienceURI; private boolean isSendVia; private int maxMessageSize; @Deprecated public static CompletableFuture<CoreMessageSender> create( final MessagingFactory factory, final String clientId, final String senderPath, final String transferDestinationPath) { return CoreMessageSender.create(factory, clientId, senderPath, transferDestinationPath, null); } public static CompletableFuture<CoreMessageSender> create( final MessagingFactory factory, final String clientId, final String senderPath, final String transferDestinationPath, final MessagingEntityType entityType) { return CoreMessageSender.create(factory, clientId, entityType, CoreMessageSender.getDefaultLinkProperties(senderPath, transferDestinationPath, factory, entityType)); } static CompletableFuture<CoreMessageSender> create( final MessagingFactory factory, final String clientId, final MessagingEntityType entityType, final SenderLinkSettings linkSettings) { TRACE_LOGGER.info("Creating core message sender to '{}'", linkSettings.linkPath); final Connection connection = factory.getConnection(); final String sendLinkNamePrefix = "Sender".concat(TrackingUtil.TRACKING_ID_TOKEN_SEPARATOR).concat(StringUtil.getShortRandomString()); linkSettings.linkName = !StringUtil.isNullOrEmpty(connection.getRemoteContainer()) ? sendLinkNamePrefix.concat(TrackingUtil.TRACKING_ID_TOKEN_SEPARATOR).concat(connection.getRemoteContainer()) : sendLinkNamePrefix; final CoreMessageSender msgSender = new CoreMessageSender(factory, clientId, entityType, linkSettings); TimeoutTracker openLinkTracker = TimeoutTracker.create(factory.getOperationTimeout()); msgSender.initializeLinkOpen(openLinkTracker); CompletableFuture<Void> authenticationFuture = null; if (linkSettings.requiresAuthentication) { authenticationFuture = msgSender.sendTokenAndSetRenewTimer(false); } else { authenticationFuture = CompletableFuture.completedFuture(null); } authenticationFuture.handleAsync((v, sasTokenEx) -> { if (sasTokenEx != null) { Throwable cause = ExceptionUtil.extractAsyncCompletionCause(sasTokenEx); TRACE_LOGGER.error("Sending SAS Token to '{}' failed.", msgSender.sendPath, cause); msgSender.linkFirstOpen.completeExceptionally(cause); } else { try { msgSender.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() { @Override public void onEvent() { msgSender.createSendLink(msgSender.linkSettings); } }); } catch (IOException ioException) { msgSender.cancelSASTokenRenewTimer(); msgSender.linkFirstOpen.completeExceptionally(new ServiceBusException(false, "Failed to create Sender, see cause for more details.", ioException)); } } return null; }, MessagingFactory.INTERNAL_THREAD_POOL); return msgSender.linkFirstOpen; } private CompletableFuture<Void> createRequestResponseLink() { synchronized (this.requestResonseLinkCreationLock) { if (this.requestResponseLinkCreationFuture == null) { this.requestResponseLinkCreationFuture = new CompletableFuture<Void>(); this.underlyingFactory.obtainRequestResponseLinkAsync(this.sendPath, this.transferDestinationPath, this.entityType).handleAsync((rrlink, ex) -> { if (ex == null) { this.requestResponseLink = rrlink; this.requestResponseLinkCreationFuture.complete(null); } else { Throwable cause = ExceptionUtil.extractAsyncCompletionCause(ex); this.requestResponseLinkCreationFuture.completeExceptionally(cause); synchronized (this.requestResonseLinkCreationLock) { this.requestResponseLinkCreationFuture = null; } } return null; }, MessagingFactory.INTERNAL_THREAD_POOL); } return this.requestResponseLinkCreationFuture; } } private void closeRequestResponseLink() { synchronized (this.requestResonseLinkCreationLock) { if (this.requestResponseLinkCreationFuture != null) { this.requestResponseLinkCreationFuture.thenRun(() -> { this.underlyingFactory.releaseRequestResponseLink(this.sendPath, this.transferDestinationPath); this.requestResponseLink = null; }); this.requestResponseLinkCreationFuture = null; } } } private CoreMessageSender(final MessagingFactory factory, final String sendLinkName, final MessagingEntityType entityType, final SenderLinkSettings linkSettings) { super(sendLinkName); this.sendPath = linkSettings.linkPath; this.entityType = entityType; if (linkSettings.linkProperties != null) { String transferPath = (String)linkSettings.linkProperties.getOrDefault(ClientConstants.LINK_TRANSFER_DESTINATION_PROPERTY, null); if (transferPath != null && !transferPath.isEmpty()) { this.transferDestinationPath = transferPath; this.isSendVia = true; this.transferSasTokenAudienceURI = String.format(ClientConstants.SAS_TOKEN_AUDIENCE_FORMAT, factory.getHostName(), transferDestinationPath); } else { this.transferDestinationPath = null; } } this.sasTokenAudienceURI = String.format(ClientConstants.SAS_TOKEN_AUDIENCE_FORMAT, factory.getHostName(), linkSettings.linkPath); this.underlyingFactory = factory; this.operationTimeout = factory.getOperationTimeout(); this.linkSettings = linkSettings; this.lastKnownLinkError = null; this.lastKnownErrorReportedAt = Instant.EPOCH; this.retryPolicy = factory.getRetryPolicy(); this.pendingSendLock = new Object(); this.pendingSendsData = new ConcurrentHashMap<String, SendWorkItem<DeliveryState>>(); this.pendingSends = new PriorityQueue<WeightedDeliveryTag>(1000, new DeliveryTagComparator()); this.linkCredit = 0; this.linkClose = new CompletableFuture<Void>(); this.sendLinkReopenFuture = null; this.isSendLoopRunning = false; this.sendWork = new DispatchHandler() { @Override public void onEvent() { CoreMessageSender.this.processSendWork(); } }; } public String getSendPath() { return this.sendPath; } private static String generateRandomDeliveryTag() { return UUID.randomUUID().toString().replace("-", StringUtil.EMPTY); } CompletableFuture<DeliveryState> sendCoreAsync( final byte[] bytes, final int arrayOffset, final int messageFormat, final TransactionContext transaction) { this.throwIfClosed(this.lastKnownLinkError); TRACE_LOGGER.debug("Sending message to '{}'", this.sendPath); String deliveryTag = CoreMessageSender.generateRandomDeliveryTag(); CompletableFuture<DeliveryState> onSendFuture = new CompletableFuture<DeliveryState>(); SendWorkItem<DeliveryState> sendWorkItem = new SendWorkItem<DeliveryState>(bytes, arrayOffset, messageFormat, deliveryTag, transaction, onSendFuture, this.operationTimeout); this.enlistSendRequest(deliveryTag, sendWorkItem, false); this.scheduleSendTimeout(sendWorkItem); return onSendFuture; } private void scheduleSendTimeout(SendWorkItem<DeliveryState> sendWorkItem) { ScheduledFuture<?> timeoutTask = Timer.schedule(() -> { if (!sendWorkItem.getWork().isDone()) { TRACE_LOGGER.warn("Delivery '{}' to '{}' did not receive ack from service. Throwing timeout.", sendWorkItem.getDeliveryTag(), CoreMessageSender.this.sendPath); CoreMessageSender.this.pendingSendsData.remove(sendWorkItem.getDeliveryTag()); CoreMessageSender.this.throwSenderTimeout(sendWorkItem.getWork(), sendWorkItem.getLastKnownException()); } }, sendWorkItem.getTimeoutTracker().remaining(), TimerType.OneTimeRun); sendWorkItem.setTimeoutTask(timeoutTask); } private void enlistSendRequest(String deliveryTag, SendWorkItem<DeliveryState> sendWorkItem, boolean isRetrySend) { synchronized (this.pendingSendLock) { this.pendingSendsData.put(deliveryTag, sendWorkItem); this.pendingSends.offer(new WeightedDeliveryTag(deliveryTag, isRetrySend ? 1 : 0)); if (!this.isSendLoopRunning) { try { this.underlyingFactory.scheduleOnReactorThread(this.sendWork); } catch (IOException ioException) { AsyncUtil.completeFutureExceptionally(sendWorkItem.getWork(), new ServiceBusException(false, "Send failed while dispatching to Reactor, see cause for more details.", ioException)); } } } } private void reSendAsync(String deliveryTag, SendWorkItem<DeliveryState> retryingSendWorkItem, boolean reuseDeliveryTag) { if (!retryingSendWorkItem.getWork().isDone() && retryingSendWorkItem.cancelTimeoutTask(false)) { Duration remainingTime = retryingSendWorkItem.getTimeoutTracker().remaining(); if (!remainingTime.isNegative() && !remainingTime.isZero()) { if (!reuseDeliveryTag) { deliveryTag = CoreMessageSender.generateRandomDeliveryTag(); retryingSendWorkItem.setDeliveryTag(deliveryTag); } this.enlistSendRequest(deliveryTag, retryingSendWorkItem, true); this.scheduleSendTimeout(retryingSendWorkItem); } } } public CompletableFuture<Void> sendAsync(final Iterable<Message> messages, TransactionContext transaction) { if (messages == null || IteratorUtil.sizeEquals(messages, 0)) { throw new IllegalArgumentException("Sending Empty batch of messages is not allowed."); } TRACE_LOGGER.debug("Sending a batch of messages to '{}'", this.sendPath); Message firstMessage = messages.iterator().next(); if (IteratorUtil.sizeEquals(messages, 1)) { return this.sendAsync(firstMessage, transaction); } Message batchMessage = Proton.message(); batchMessage.setMessageAnnotations(firstMessage.getMessageAnnotations()); byte[] bytes = null; int byteArrayOffset = 0; try { Pair<byte[], Integer> encodedPair = Util.encodeMessageToMaxSizeArray(batchMessage, this.maxMessageSize); bytes = encodedPair.getFirstItem(); byteArrayOffset = encodedPair.getSecondItem(); for (Message amqpMessage: messages) { Message messageWrappedByData = Proton.message(); encodedPair = Util.encodeMessageToOptimalSizeArray(amqpMessage, this.maxMessageSize); messageWrappedByData.setBody(new Data(new Binary(encodedPair.getFirstItem(), 0, encodedPair.getSecondItem()))); int encodedSize = Util.encodeMessageToCustomArray(messageWrappedByData, bytes, byteArrayOffset, this.maxMessageSize - byteArrayOffset - 1); byteArrayOffset = byteArrayOffset + encodedSize; } } catch (PayloadSizeExceededException ex) { TRACE_LOGGER.error("Payload size of batch of messages exceeded limit", ex); final CompletableFuture<Void> sendTask = new CompletableFuture<Void>(); sendTask.completeExceptionally(ex); return sendTask; } return this.sendCoreAsync(bytes, byteArrayOffset, AmqpConstants.AMQP_BATCH_MESSAGE_FORMAT, transaction).thenAccept((x) -> { /*Do nothing*/ }); } public CompletableFuture<Void> sendAsync(Message msg, TransactionContext transaction) { return this.sendAndReturnDeliveryStateAsync(msg, transaction).thenAccept((x) -> { /*Do nothing*/ }); } CompletableFuture<DeliveryState> sendAndReturnDeliveryStateAsync(Message msg, TransactionContext transaction) { try { Pair<byte[], Integer> encodedPair = Util.encodeMessageToOptimalSizeArray(msg, this.maxMessageSize); return this.sendCoreAsync(encodedPair.getFirstItem(), encodedPair.getSecondItem(), DeliveryImpl.DEFAULT_MESSAGE_FORMAT, transaction); } catch (PayloadSizeExceededException exception) { TRACE_LOGGER.error("Payload size of message exceeded limit", exception); final CompletableFuture<DeliveryState> sendTask = new CompletableFuture<DeliveryState>(); sendTask.completeExceptionally(exception); return sendTask; } } @Override public void onOpenComplete(Exception completionException) { if (completionException == null) { this.maxMessageSize = Util.getMaxMessageSizeFromLink(this.sendLink); this.lastKnownLinkError = null; this.retryPolicy.resetRetryCount(this.getClientId()); if (this.sendLinkReopenFuture != null && !this.sendLinkReopenFuture.isDone()) { AsyncUtil.completeFuture(this.sendLinkReopenFuture, null); } if (!this.linkFirstOpen.isDone()) { TRACE_LOGGER.info("Opened send link to '{}'", this.sendPath); AsyncUtil.completeFuture(this.linkFirstOpen, this); } else { synchronized (this.pendingSendLock) { if (!this.pendingSendsData.isEmpty()) { LinkedList<String> unacknowledgedSends = new LinkedList<String>(); unacknowledgedSends.addAll(this.pendingSendsData.keySet()); if (unacknowledgedSends.size() > 0) { Iterator<String> reverseReader = unacknowledgedSends.iterator(); while (reverseReader.hasNext()) { String unacknowledgedSend = reverseReader.next(); if (this.pendingSendsData.get(unacknowledgedSend).isWaitingForAck()) { this.pendingSends.offer(new WeightedDeliveryTag(unacknowledgedSend, 1)); } } } unacknowledgedSends.clear(); } } } } else { this.cancelSASTokenRenewTimer(); if (!this.linkFirstOpen.isDone()) { TRACE_LOGGER.error("Opening send link '{}' to '{}' failed", this.sendLink.getName(), this.sendPath, completionException); this.setClosed(); ExceptionUtil.completeExceptionally(this.linkFirstOpen, completionException, this, true); } if (this.sendLinkReopenFuture != null && !this.sendLinkReopenFuture.isDone()) { TRACE_LOGGER.warn("Opening send link '{}' to '{}' failed", this.sendLink.getName(), this.sendPath, completionException); AsyncUtil.completeFutureExceptionally(this.sendLinkReopenFuture, completionException); } } } @Override public void onClose(ErrorCondition condition) { Exception completionException = condition != null ? ExceptionUtil.toException(condition) : new ServiceBusException(ClientConstants.DEFAULT_IS_TRANSIENT, "The entity has been closed due to transient failures (underlying link closed), please retry the operation."); this.onError(completionException); } @Override public void onError(Exception completionException) { this.linkCredit = 0; if (this.getIsClosingOrClosed()) { Exception failureException = completionException == null ? new OperationCancelledException("Send cancelled as the Sender instance is Closed before the sendOperation completed.") : completionException; this.clearAllPendingSendsWithException(failureException); TRACE_LOGGER.info("Send link to '{}' closed", this.sendPath); AsyncUtil.completeFuture(this.linkClose, null); return; } else { this.underlyingFactory.deregisterForConnectionError(this.sendLink); this.lastKnownLinkError = completionException; this.lastKnownErrorReportedAt = Instant.now(); this.onOpenComplete(completionException); if (completionException != null && (!(completionException instanceof ServiceBusException) || !((ServiceBusException) completionException).getIsTransient())) { TRACE_LOGGER.warn("Send link '{}' to '{}' closed. Failing all pending send requests.", this.sendLink.getName(), this.sendPath); this.clearAllPendingSendsWithException(completionException); } else { final Map.Entry<String, SendWorkItem<DeliveryState>> pendingSendEntry = IteratorUtil.getFirst(this.pendingSendsData.entrySet()); if (pendingSendEntry != null && pendingSendEntry.getValue() != null) { final TimeoutTracker tracker = pendingSendEntry.getValue().getTimeoutTracker(); if (tracker != null) { final Duration nextRetryInterval = this.retryPolicy.getNextRetryInterval(this.getClientId(), completionException, tracker.remaining()); if (nextRetryInterval != null) { TRACE_LOGGER.warn("Send link '{}' to '{}' closed. Will retry link creation after '{}'.", this.sendLink.getName(), this.sendPath, nextRetryInterval); Timer.schedule(() -> {CoreMessageSender.this.ensureLinkIsOpen();}, nextRetryInterval, TimerType.OneTimeRun); } } } } } } @Override public void onSendComplete(final Delivery delivery) { DeliveryState outcome = delivery.getRemoteState(); final String deliveryTag = new String(delivery.getTag(), UTF_8); TRACE_LOGGER.debug("Received ack for delivery. path:{}, linkName:{}, deliveryTag:{}, outcome:{}", CoreMessageSender.this.sendPath, this.sendLink.getName(), deliveryTag, outcome); final SendWorkItem<DeliveryState> pendingSendWorkItem = this.pendingSendsData.remove(deliveryTag); if (pendingSendWorkItem != null) { if (outcome instanceof TransactionalState) { TRACE_LOGGER.trace("State of delivery is Transactional, retrieving outcome: {}", outcome); Outcome transactionalOutcome = ((TransactionalState) outcome).getOutcome(); if (transactionalOutcome instanceof DeliveryState) { outcome = (DeliveryState) transactionalOutcome; } else { this.cleanupFailedSend(pendingSendWorkItem, new ServiceBusException(false, "Unknown delivery state: " + outcome.toString())); return; } } if (outcome instanceof Accepted) { this.lastKnownLinkError = null; this.retryPolicy.resetRetryCount(this.getClientId()); pendingSendWorkItem.cancelTimeoutTask(false); AsyncUtil.completeFuture(pendingSendWorkItem.getWork(), outcome); } else if (outcome instanceof Declared) { AsyncUtil.completeFuture(pendingSendWorkItem.getWork(), outcome); } else if (outcome instanceof Rejected) { Rejected rejected = (Rejected) outcome; ErrorCondition error = rejected.getError(); Exception exception = ExceptionUtil.toException(error); if (ExceptionUtil.isGeneralError(error.getCondition())) { this.lastKnownLinkError = exception; this.lastKnownErrorReportedAt = Instant.now(); } Duration retryInterval = this.retryPolicy.getNextRetryInterval( this.getClientId(), exception, pendingSendWorkItem.getTimeoutTracker().remaining()); if (retryInterval == null) { this.cleanupFailedSend(pendingSendWorkItem, exception); } else { TRACE_LOGGER.warn("Send failed for delivery '{}'. Will retry after '{}'", deliveryTag, retryInterval); pendingSendWorkItem.setLastKnownException(exception); Timer.schedule(() -> {CoreMessageSender.this.reSendAsync(deliveryTag, pendingSendWorkItem, false);}, retryInterval, TimerType.OneTimeRun); } } else if (outcome instanceof Released) { this.cleanupFailedSend(pendingSendWorkItem, new OperationCancelledException(outcome.toString())); } else { this.cleanupFailedSend(pendingSendWorkItem, new ServiceBusException(false, outcome.toString())); } } else { TRACE_LOGGER.warn("Delivery mismatch. path:{}, linkName:{}, delivery:{}", this.sendPath, this.sendLink.getName(), deliveryTag); } } private void clearAllPendingSendsWithException(Throwable failureException) { synchronized (this.pendingSendLock) { for (Map.Entry<String, SendWorkItem<DeliveryState>> pendingSend: this.pendingSendsData.entrySet()) { this.cleanupFailedSend(pendingSend.getValue(), failureException); } this.pendingSendsData.clear(); this.pendingSends.clear(); } } private void cleanupFailedSend(final SendWorkItem<DeliveryState> failedSend, final Throwable exception) { failedSend.cancelTimeoutTask(false); ExceptionUtil.completeExceptionally(failedSend.getWork(), exception, this, true); } private static SenderLinkSettings getDefaultLinkProperties(String sendPath, String transferDestinationPath, MessagingFactory underlyingFactory, MessagingEntityType entityType) { SenderLinkSettings linkSettings = new SenderLinkSettings(); linkSettings.linkPath = sendPath; final Target target = new Target(); target.setAddress(sendPath); linkSettings.target = target; linkSettings.source = new Source(); linkSettings.settleMode = SenderSettleMode.UNSETTLED; linkSettings.requiresAuthentication = true; Map<Symbol, Object> linkProperties = new HashMap<>(); linkProperties.put(ClientConstants.LINK_TIMEOUT_PROPERTY, UnsignedInteger.valueOf(Util.adjustServerTimeout(underlyingFactory.getOperationTimeout()).toMillis())); if (entityType != null) { linkProperties.put(ClientConstants.ENTITY_TYPE_PROPERTY, entityType.getIntValue()); } if (transferDestinationPath != null && !transferDestinationPath.isEmpty()) { linkProperties.put(ClientConstants.LINK_TRANSFER_DESTINATION_PROPERTY, transferDestinationPath); } linkSettings.linkProperties = linkProperties; return linkSettings; } private void createSendLink(SenderLinkSettings linkSettings) { TRACE_LOGGER.info("Creating send link to '{}'", this.sendPath); final Connection connection = this.underlyingFactory.getConnection(); final Session session = connection.session(); session.setOutgoingWindow(Integer.MAX_VALUE); session.open(); BaseHandler.setHandler(session, new SessionHandler(sendPath)); final Sender sender = session.sender(linkSettings.linkName); sender.setTarget(linkSettings.target); sender.setSource(linkSettings.source); sender.setProperties(linkSettings.linkProperties); TRACE_LOGGER.debug("Send link settle mode '{}'", linkSettings.settleMode); sender.setSenderSettleMode(linkSettings.settleMode); SendLinkHandler handler = new SendLinkHandler(CoreMessageSender.this); BaseHandler.setHandler(sender, handler); sender.open(); this.sendLink = sender; this.underlyingFactory.registerForConnectionError(this.sendLink); } CompletableFuture<Void> sendTokenAndSetRenewTimer(boolean retryOnFailure) { if (this.getIsClosingOrClosed()) { return CompletableFuture.completedFuture(null); } else { CompletableFuture<ScheduledFuture<?>> sendTokenFuture = this.underlyingFactory.sendSecurityTokenAndSetRenewTimer(this.sasTokenAudienceURI, retryOnFailure, () -> this.sendTokenAndSetRenewTimer(true)); CompletableFuture<Void> sasTokenFuture = sendTokenFuture.thenAccept((f) -> {this.sasTokenRenewTimerFuture = f;}); if (this.transferDestinationPath!= null && !this.transferDestinationPath.isEmpty()) { CompletableFuture<Void> transferSendTokenFuture = this.underlyingFactory.sendSecurityToken(this.transferSasTokenAudienceURI); return CompletableFuture.allOf(sasTokenFuture, transferSendTokenFuture); } return sasTokenFuture; } } private void cancelSASTokenRenewTimer() { if (this.sasTokenRenewTimerFuture != null && !this.sasTokenRenewTimerFuture.isDone()) { this.sasTokenRenewTimerFuture.cancel(true); TRACE_LOGGER.debug("Cancelled SAS Token renew timer"); } } private void initializeLinkOpen(TimeoutTracker timeout) { this.linkFirstOpen = new CompletableFuture<CoreMessageSender>(); Timer.schedule( () -> { if (!CoreMessageSender.this.linkFirstOpen.isDone()) { CoreMessageSender.this.closeInternals(false); CoreMessageSender.this.setClosed(); Exception operationTimedout = new TimeoutException( String.format(Locale.US, "Open operation on SendLink(%s) on Entity(%s) timed out at %s.", CoreMessageSender.this.sendLink.getName(), CoreMessageSender.this.getSendPath(), ZonedDateTime.now().toString()), CoreMessageSender.this.lastKnownErrorReportedAt.isAfter(Instant.now().minusSeconds(ClientConstants.SERVER_BUSY_BASE_SLEEP_TIME_IN_SECS)) ? CoreMessageSender.this.lastKnownLinkError : null); TRACE_LOGGER.warn(operationTimedout.getMessage()); ExceptionUtil.completeExceptionally(CoreMessageSender.this.linkFirstOpen, operationTimedout, CoreMessageSender.this, true); } } , timeout.remaining() , TimerType.OneTimeRun); } @Override public ErrorContext getContext() { final boolean isLinkOpened = this.linkFirstOpen != null && this.linkFirstOpen.isDone(); final String referenceId = this.sendLink != null && this.sendLink.getRemoteProperties() != null && this.sendLink.getRemoteProperties().containsKey(ClientConstants.TRACKING_ID_PROPERTY) ? this.sendLink.getRemoteProperties().get(ClientConstants.TRACKING_ID_PROPERTY).toString() : ((this.sendLink != null) ? this.sendLink.getName() : null); SenderErrorContext errorContext = new SenderErrorContext( this.underlyingFactory!=null ? this.underlyingFactory.getHostName() : null, this.sendPath, referenceId, isLinkOpened && this.sendLink != null ? this.sendLink.getCredit() : null); return errorContext; } @Override public void onFlow(final int creditIssued) { this.lastKnownLinkError = null; if (creditIssued <= 0) { return; } TRACE_LOGGER.debug("Received flow frame. path:{}, linkName:{}, remoteLinkCredit:{}, pendingSendsWaitingForCredit:{}, pendingSendsWaitingDelivery:{}", this.sendPath, this.sendLink.getName(), creditIssued, this.pendingSends.size(), this.pendingSendsData.size() - this.pendingSends.size()); this.linkCredit = this.linkCredit + creditIssued; this.sendWork.onEvent(); } private synchronized CompletableFuture<Void> ensureLinkIsOpen() { if (!(this.sendLink.getLocalState() == EndpointState.ACTIVE && this.sendLink.getRemoteState() == EndpointState.ACTIVE)) { if (this.sendLinkReopenFuture == null || this.sendLinkReopenFuture.isDone()) { TRACE_LOGGER.info("Recreating send link to '{}'", this.sendPath); this.retryPolicy.incrementRetryCount(CoreMessageSender.this.getClientId()); this.sendLinkReopenFuture = new CompletableFuture<>(); final CompletableFuture<Void> linkReopenFutureThatCanBeCancelled = this.sendLinkReopenFuture; Timer.schedule( () -> { if (!linkReopenFutureThatCanBeCancelled.isDone()) { CoreMessageSender.this.cancelSASTokenRenewTimer(); Exception operationTimedout = new TimeoutException( String.format(Locale.US, "%s operation on SendLink(%s) to path(%s) timed out at %s.", "Open", CoreMessageSender.this.sendLink.getName(), CoreMessageSender.this.sendPath, ZonedDateTime.now())); TRACE_LOGGER.warn(operationTimedout.getMessage()); linkReopenFutureThatCanBeCancelled.completeExceptionally(operationTimedout); } } , CoreMessageSender.LINK_REOPEN_TIMEOUT , TimerType.OneTimeRun); this.cancelSASTokenRenewTimer(); CompletableFuture<Void> authenticationFuture; if (linkSettings.requiresAuthentication) { authenticationFuture = this.sendTokenAndSetRenewTimer(false); } else { authenticationFuture = CompletableFuture.completedFuture(null); } authenticationFuture.handleAsync((v, sendTokenEx) -> { if (sendTokenEx != null) { Throwable cause = ExceptionUtil.extractAsyncCompletionCause(sendTokenEx); TRACE_LOGGER.error("Sending SAS Token to '{}' failed.", this.sendPath, cause); this.sendLinkReopenFuture.completeExceptionally(sendTokenEx); this.clearAllPendingSendsWithException(sendTokenEx); } else { try { this.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() { @Override public void onEvent() { CoreMessageSender.this.createSendLink(CoreMessageSender.this.linkSettings); } }); } catch (IOException ioEx) { this.sendLinkReopenFuture.completeExceptionally(ioEx); } } return null; }, MessagingFactory.INTERNAL_THREAD_POOL); } return this.sendLinkReopenFuture; } else { return CompletableFuture.completedFuture(null); } } private void throwSenderTimeout(CompletableFuture<DeliveryState> pendingSendWork, Exception lastKnownException) { Exception cause = lastKnownException; if (lastKnownException == null && this.lastKnownLinkError != null) { cause = this.lastKnownErrorReportedAt.isAfter(Instant.now().minusMillis(this.operationTimeout.toMillis())) ? this.lastKnownLinkError : null; } boolean isClientSideTimeout = (cause == null || !(cause instanceof ServiceBusException)); ServiceBusException exception = isClientSideTimeout ? new TimeoutException(String.format(Locale.US, "%s %s %s.", CoreMessageSender.SEND_TIMED_OUT, " at ", ZonedDateTime.now(), cause)) : (ServiceBusException) cause; TRACE_LOGGER.error("Send timed out", exception); ExceptionUtil.completeExceptionally(pendingSendWork, exception, this, true); } private void scheduleLinkCloseTimeout(final TimeoutTracker timeout) { Timer.schedule( () -> { if (!linkClose.isDone()) { Exception operationTimedout = new TimeoutException(String.format(Locale.US, "%s operation on Send Link(%s) timed out at %s", "Close", CoreMessageSender.this.sendLink.getName(), ZonedDateTime.now())); TRACE_LOGGER.warn(operationTimedout.getMessage()); ExceptionUtil.completeExceptionally(linkClose, operationTimedout, CoreMessageSender.this, true); } } , timeout.remaining() , TimerType.OneTimeRun); } @Override protected CompletableFuture<Void> onClose() { this.closeInternals(true); return this.linkClose; } private void closeInternals(boolean waitForCloseCompletion) { if (!this.getIsClosed()) { if (this.sendLink != null && this.sendLink.getLocalState() != EndpointState.CLOSED) { try { this.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() { @Override public void onEvent() { if (CoreMessageSender.this.sendLink != null && CoreMessageSender.this.sendLink.getLocalState() != EndpointState.CLOSED) { TRACE_LOGGER.info("Closing send link to '{}'", CoreMessageSender.this.sendPath); CoreMessageSender.this.underlyingFactory.deregisterForConnectionError(CoreMessageSender.this.sendLink); CoreMessageSender.this.sendLink.close(); if(waitForCloseCompletion) { CoreMessageSender.this.scheduleLinkCloseTimeout(TimeoutTracker.create(CoreMessageSender.this.operationTimeout)); } else { AsyncUtil.completeFuture(CoreMessageSender.this.linkClose, null); } } } }); } catch (IOException e) { AsyncUtil.completeFutureExceptionally(this.linkClose, e); } } else { AsyncUtil.completeFuture(this.linkClose, null); } this.cancelSASTokenRenewTimer(); this.closeRequestResponseLink(); } } private static class WeightedDeliveryTag { private final String deliveryTag; private final int priority; WeightedDeliveryTag(final String deliveryTag, final int priority) { this.deliveryTag = deliveryTag; this.priority = priority; } public String getDeliveryTag() { return this.deliveryTag; } public int getPriority() { return this.priority; } } private static class DeliveryTagComparator implements Comparator<WeightedDeliveryTag> { @Override public int compare(WeightedDeliveryTag deliveryTag0, WeightedDeliveryTag deliveryTag1) { return deliveryTag1.getPriority() - deliveryTag0.getPriority(); } } public CompletableFuture<long[]> scheduleMessageAsync(Message[] messages, TransactionContext transaction, Duration timeout) { TRACE_LOGGER.debug("Sending '{}' scheduled message(s) to '{}'", messages.length, this.sendPath); return this.createRequestResponseLink().thenComposeAsync((v) -> { HashMap requestBodyMap = new HashMap(); Collection<HashMap> messageList = new LinkedList<HashMap>(); for (Message message : messages) { HashMap messageEntry = new HashMap(); Pair<byte[], Integer> encodedPair; try { encodedPair = Util.encodeMessageToOptimalSizeArray(message, this.maxMessageSize); } catch(PayloadSizeExceededException exception) { TRACE_LOGGER.error("Payload size of message exceeded limit", exception); final CompletableFuture<long[]> scheduleMessagesTask = new CompletableFuture<long[]>(); scheduleMessagesTask.completeExceptionally(exception); return scheduleMessagesTask; } messageEntry.put(ClientConstants.REQUEST_RESPONSE_MESSAGE, new Binary(encodedPair.getFirstItem(), 0, encodedPair.getSecondItem())); messageEntry.put(ClientConstants.REQUEST_RESPONSE_MESSAGE_ID, message.getMessageId()); String sessionId = message.getGroupId(); if (!StringUtil.isNullOrEmpty(sessionId)) { messageEntry.put(ClientConstants.REQUEST_RESPONSE_SESSION_ID, sessionId); } Object partitionKey = message.getMessageAnnotations().getValue().get(Symbol.valueOf(ClientConstants.PARTITIONKEYNAME)); if (partitionKey != null && !((String)partitionKey).isEmpty()) { messageEntry.put(ClientConstants.REQUEST_RESPONSE_PARTITION_KEY, (String)partitionKey); } Object viaPartitionKey = message.getMessageAnnotations().getValue().get(Symbol.valueOf(ClientConstants.VIAPARTITIONKEYNAME)); if (viaPartitionKey != null && !((String)viaPartitionKey).isEmpty()) { messageEntry.put(ClientConstants.REQUEST_RESPONSE_VIA_PARTITION_KEY, (String)viaPartitionKey); } messageList.add(messageEntry); } requestBodyMap.put(ClientConstants.REQUEST_RESPONSE_MESSAGES, messageList); Message requestMessage = RequestResponseUtils.createRequestMessageFromPropertyBag(ClientConstants.REQUEST_RESPONSE_SCHEDULE_MESSAGE_OPERATION, requestBodyMap, Util.adjustServerTimeout(timeout), this.sendLink.getName()); CompletableFuture<Message> responseFuture = this.requestResponseLink.requestAysnc(requestMessage, transaction, timeout); return responseFuture.thenComposeAsync((responseMessage) -> { CompletableFuture<long[]> returningFuture = new CompletableFuture<long[]>(); int statusCode = RequestResponseUtils.getResponseStatusCode(responseMessage); if (statusCode == ClientConstants.REQUEST_RESPONSE_OK_STATUS_CODE) { long[] sequenceNumbers = (long[])RequestResponseUtils.getResponseBody(responseMessage).get(ClientConstants.REQUEST_RESPONSE_SEQUENCE_NUMBERS); if (TRACE_LOGGER.isDebugEnabled()) { TRACE_LOGGER.debug("Scheduled messages sent. Received sequence numbers '{}'", Arrays.toString(sequenceNumbers)); } returningFuture.complete(sequenceNumbers); } else { Exception scheduleException = RequestResponseUtils.genereateExceptionFromResponse(responseMessage); TRACE_LOGGER.error("Sending scheduled messages to '{}' failed.", this.sendPath, scheduleException); returningFuture.completeExceptionally(scheduleException); } return returningFuture; }, MessagingFactory.INTERNAL_THREAD_POOL); }, MessagingFactory.INTERNAL_THREAD_POOL); } public CompletableFuture<Void> cancelScheduledMessageAsync(Long[] sequenceNumbers, Duration timeout) { if (TRACE_LOGGER.isDebugEnabled()) { TRACE_LOGGER.debug("Cancelling scheduled message(s) '{}' to '{}'", Arrays.toString(sequenceNumbers), this.sendPath); } return this.createRequestResponseLink().thenComposeAsync((v) -> { HashMap requestBodyMap = new HashMap(); requestBodyMap.put(ClientConstants.REQUEST_RESPONSE_SEQUENCE_NUMBERS, sequenceNumbers); Message requestMessage = RequestResponseUtils.createRequestMessageFromPropertyBag(ClientConstants.REQUEST_RESPONSE_CANCEL_CHEDULE_MESSAGE_OPERATION, requestBodyMap, Util.adjustServerTimeout(timeout), this.sendLink.getName()); CompletableFuture<Message> responseFuture = this.requestResponseLink.requestAysnc(requestMessage, TransactionContext.NULL_TXN, timeout); return responseFuture.thenComposeAsync((responseMessage) -> { CompletableFuture<Void> returningFuture = new CompletableFuture<Void>(); int statusCode = RequestResponseUtils.getResponseStatusCode(responseMessage); if (statusCode == ClientConstants.REQUEST_RESPONSE_OK_STATUS_CODE) { TRACE_LOGGER.debug("Cancelled scheduled messages in '{}'", this.sendPath); returningFuture.complete(null); } else { Exception failureException = RequestResponseUtils.genereateExceptionFromResponse(responseMessage); TRACE_LOGGER.error("Cancelling scheduled messages in '{}' failed.", this.sendPath, failureException); returningFuture.completeExceptionally(failureException); } return returningFuture; }, MessagingFactory.INTERNAL_THREAD_POOL); }, MessagingFactory.INTERNAL_THREAD_POOL); } public CompletableFuture<Collection<Message>> peekMessagesAsync(long fromSequenceNumber, int messageCount) { TRACE_LOGGER.debug("Peeking '{}' messages in '{}' from sequence number '{}'", messageCount, this.sendPath, fromSequenceNumber); return this.createRequestResponseLink().thenComposeAsync((v) -> CommonRequestResponseOperations.peekMessagesAsync(this.requestResponseLink, this.operationTimeout, fromSequenceNumber, messageCount, null, this.sendLink.getName()), MessagingFactory.INTERNAL_THREAD_POOL); } }
class CoreMessageSender extends ClientEntity implements IAmqpSender, IErrorContextProvider { private static final Logger TRACE_LOGGER = LoggerFactory.getLogger(CoreMessageSender.class); private static final String SEND_TIMED_OUT = "Send operation timed out"; private static final Duration LINK_REOPEN_TIMEOUT = Duration.ofMinutes(5); private final Object requestResonseLinkCreationLock = new Object(); private final MessagingFactory underlyingFactory; private final String sendPath; private final String sasTokenAudienceURI; private final Duration operationTimeout; private final RetryPolicy retryPolicy; private final CompletableFuture<Void> linkClose; private final Object pendingSendLock; private final ConcurrentHashMap<String, SendWorkItem<DeliveryState>> pendingSendsData; private final PriorityQueue<WeightedDeliveryTag> pendingSends; private final DispatchHandler sendWork; private final MessagingEntityType entityType; private boolean isSendLoopRunning; private Sender sendLink; private RequestResponseLink requestResponseLink; private CompletableFuture<CoreMessageSender> linkFirstOpen; private int linkCredit; private Exception lastKnownLinkError; private Instant lastKnownErrorReportedAt; private ScheduledFuture<?> sasTokenRenewTimerFuture; private CompletableFuture<Void> requestResponseLinkCreationFuture; private CompletableFuture<Void> sendLinkReopenFuture; private SenderLinkSettings linkSettings; private String transferDestinationPath; private String transferSasTokenAudienceURI; private boolean isSendVia; private int maxMessageSize; @Deprecated public static CompletableFuture<CoreMessageSender> create( final MessagingFactory factory, final String clientId, final String senderPath, final String transferDestinationPath) { return CoreMessageSender.create(factory, clientId, senderPath, transferDestinationPath, null); } public static CompletableFuture<CoreMessageSender> create( final MessagingFactory factory, final String clientId, final String senderPath, final String transferDestinationPath, final MessagingEntityType entityType) { return CoreMessageSender.create(factory, clientId, entityType, CoreMessageSender.getDefaultLinkProperties(senderPath, transferDestinationPath, factory, entityType)); } static CompletableFuture<CoreMessageSender> create( final MessagingFactory factory, final String clientId, final MessagingEntityType entityType, final SenderLinkSettings linkSettings) { TRACE_LOGGER.info("Creating core message sender to '{}'", linkSettings.linkPath); final Connection connection = factory.getConnection(); final String sendLinkNamePrefix = "Sender".concat(TrackingUtil.TRACKING_ID_TOKEN_SEPARATOR).concat(StringUtil.getShortRandomString()); linkSettings.linkName = !StringUtil.isNullOrEmpty(connection.getRemoteContainer()) ? sendLinkNamePrefix.concat(TrackingUtil.TRACKING_ID_TOKEN_SEPARATOR).concat(connection.getRemoteContainer()) : sendLinkNamePrefix; final CoreMessageSender msgSender = new CoreMessageSender(factory, clientId, entityType, linkSettings); TimeoutTracker openLinkTracker = TimeoutTracker.create(factory.getOperationTimeout()); msgSender.initializeLinkOpen(openLinkTracker); CompletableFuture<Void> authenticationFuture = null; if (linkSettings.requiresAuthentication) { authenticationFuture = msgSender.sendTokenAndSetRenewTimer(false); } else { authenticationFuture = CompletableFuture.completedFuture(null); } authenticationFuture.handleAsync((v, sasTokenEx) -> { if (sasTokenEx != null) { Throwable cause = ExceptionUtil.extractAsyncCompletionCause(sasTokenEx); TRACE_LOGGER.error("Sending SAS Token to '{}' failed.", msgSender.sendPath, cause); msgSender.linkFirstOpen.completeExceptionally(cause); } else { try { msgSender.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() { @Override public void onEvent() { msgSender.createSendLink(msgSender.linkSettings); } }); } catch (IOException ioException) { msgSender.cancelSASTokenRenewTimer(); msgSender.linkFirstOpen.completeExceptionally(new ServiceBusException(false, "Failed to create Sender, see cause for more details.", ioException)); } } return null; }, MessagingFactory.INTERNAL_THREAD_POOL); return msgSender.linkFirstOpen; } private CompletableFuture<Void> createRequestResponseLink() { synchronized (this.requestResonseLinkCreationLock) { if (this.requestResponseLinkCreationFuture == null) { this.requestResponseLinkCreationFuture = new CompletableFuture<Void>(); this.underlyingFactory.obtainRequestResponseLinkAsync(this.sendPath, this.transferDestinationPath, this.entityType).handleAsync((rrlink, ex) -> { if (ex == null) { this.requestResponseLink = rrlink; this.requestResponseLinkCreationFuture.complete(null); } else { Throwable cause = ExceptionUtil.extractAsyncCompletionCause(ex); this.requestResponseLinkCreationFuture.completeExceptionally(cause); synchronized (this.requestResonseLinkCreationLock) { this.requestResponseLinkCreationFuture = null; } } return null; }, MessagingFactory.INTERNAL_THREAD_POOL); } return this.requestResponseLinkCreationFuture; } } private void closeRequestResponseLink() { synchronized (this.requestResonseLinkCreationLock) { if (this.requestResponseLinkCreationFuture != null) { this.requestResponseLinkCreationFuture.thenRun(() -> { this.underlyingFactory.releaseRequestResponseLink(this.sendPath, this.transferDestinationPath); this.requestResponseLink = null; }); this.requestResponseLinkCreationFuture = null; } } } private CoreMessageSender(final MessagingFactory factory, final String sendLinkName, final MessagingEntityType entityType, final SenderLinkSettings linkSettings) { super(sendLinkName); this.sendPath = linkSettings.linkPath; this.entityType = entityType; if (linkSettings.linkProperties != null) { String transferPath = (String)linkSettings.linkProperties.getOrDefault(ClientConstants.LINK_TRANSFER_DESTINATION_PROPERTY, null); if (transferPath != null && !transferPath.isEmpty()) { this.transferDestinationPath = transferPath; this.isSendVia = true; this.transferSasTokenAudienceURI = String.format(ClientConstants.SAS_TOKEN_AUDIENCE_FORMAT, factory.getHostName(), transferDestinationPath); } else { this.transferDestinationPath = null; } } this.sasTokenAudienceURI = String.format(ClientConstants.SAS_TOKEN_AUDIENCE_FORMAT, factory.getHostName(), linkSettings.linkPath); this.underlyingFactory = factory; this.operationTimeout = factory.getOperationTimeout(); this.linkSettings = linkSettings; this.lastKnownLinkError = null; this.lastKnownErrorReportedAt = Instant.EPOCH; this.retryPolicy = factory.getRetryPolicy(); this.pendingSendLock = new Object(); this.pendingSendsData = new ConcurrentHashMap<String, SendWorkItem<DeliveryState>>(); this.pendingSends = new PriorityQueue<WeightedDeliveryTag>(1000, new DeliveryTagComparator()); this.linkCredit = 0; this.linkClose = new CompletableFuture<Void>(); this.sendLinkReopenFuture = null; this.isSendLoopRunning = false; this.sendWork = new DispatchHandler() { @Override public void onEvent() { CoreMessageSender.this.processSendWork(); } }; } public String getSendPath() { return this.sendPath; } private static String generateRandomDeliveryTag() { return UUID.randomUUID().toString().replace("-", StringUtil.EMPTY); } CompletableFuture<DeliveryState> sendCoreAsync( final byte[] bytes, final int arrayOffset, final int messageFormat, final TransactionContext transaction) { this.throwIfClosed(this.lastKnownLinkError); TRACE_LOGGER.debug("Sending message to '{}'", this.sendPath); String deliveryTag = CoreMessageSender.generateRandomDeliveryTag(); CompletableFuture<DeliveryState> onSendFuture = new CompletableFuture<DeliveryState>(); SendWorkItem<DeliveryState> sendWorkItem = new SendWorkItem<DeliveryState>(bytes, arrayOffset, messageFormat, deliveryTag, transaction, onSendFuture, this.operationTimeout); this.enlistSendRequest(deliveryTag, sendWorkItem, false); this.scheduleSendTimeout(sendWorkItem); return onSendFuture; } private void scheduleSendTimeout(SendWorkItem<DeliveryState> sendWorkItem) { ScheduledFuture<?> timeoutTask = Timer.schedule(() -> { if (!sendWorkItem.getWork().isDone()) { TRACE_LOGGER.warn("Delivery '{}' to '{}' did not receive ack from service. Throwing timeout.", sendWorkItem.getDeliveryTag(), CoreMessageSender.this.sendPath); CoreMessageSender.this.pendingSendsData.remove(sendWorkItem.getDeliveryTag()); CoreMessageSender.this.throwSenderTimeout(sendWorkItem.getWork(), sendWorkItem.getLastKnownException()); } }, sendWorkItem.getTimeoutTracker().remaining(), TimerType.OneTimeRun); sendWorkItem.setTimeoutTask(timeoutTask); } private void enlistSendRequest(String deliveryTag, SendWorkItem<DeliveryState> sendWorkItem, boolean isRetrySend) { synchronized (this.pendingSendLock) { this.pendingSendsData.put(deliveryTag, sendWorkItem); this.pendingSends.offer(new WeightedDeliveryTag(deliveryTag, isRetrySend ? 1 : 0)); if (!this.isSendLoopRunning) { try { this.underlyingFactory.scheduleOnReactorThread(this.sendWork); } catch (IOException ioException) { AsyncUtil.completeFutureExceptionally(sendWorkItem.getWork(), new ServiceBusException(false, "Send failed while dispatching to Reactor, see cause for more details.", ioException)); } } } } private void reSendAsync(String deliveryTag, SendWorkItem<DeliveryState> retryingSendWorkItem, boolean reuseDeliveryTag) { if (!retryingSendWorkItem.getWork().isDone() && retryingSendWorkItem.cancelTimeoutTask(false)) { Duration remainingTime = retryingSendWorkItem.getTimeoutTracker().remaining(); if (!remainingTime.isNegative() && !remainingTime.isZero()) { if (!reuseDeliveryTag) { deliveryTag = CoreMessageSender.generateRandomDeliveryTag(); retryingSendWorkItem.setDeliveryTag(deliveryTag); } this.enlistSendRequest(deliveryTag, retryingSendWorkItem, true); this.scheduleSendTimeout(retryingSendWorkItem); } } } public CompletableFuture<Void> sendAsync(final Iterable<Message> messages, TransactionContext transaction) { if (messages == null || IteratorUtil.sizeEquals(messages, 0)) { throw new IllegalArgumentException("Sending Empty batch of messages is not allowed."); } TRACE_LOGGER.debug("Sending a batch of messages to '{}'", this.sendPath); Message firstMessage = messages.iterator().next(); if (IteratorUtil.sizeEquals(messages, 1)) { return this.sendAsync(firstMessage, transaction); } Message batchMessage = Proton.message(); batchMessage.setMessageAnnotations(firstMessage.getMessageAnnotations()); byte[] bytes = null; int byteArrayOffset = 0; try { Pair<byte[], Integer> encodedPair = Util.encodeMessageToMaxSizeArray(batchMessage, this.maxMessageSize); bytes = encodedPair.getFirstItem(); byteArrayOffset = encodedPair.getSecondItem(); for (Message amqpMessage: messages) { Message messageWrappedByData = Proton.message(); encodedPair = Util.encodeMessageToOptimalSizeArray(amqpMessage, this.maxMessageSize); messageWrappedByData.setBody(new Data(new Binary(encodedPair.getFirstItem(), 0, encodedPair.getSecondItem()))); int encodedSize = Util.encodeMessageToCustomArray(messageWrappedByData, bytes, byteArrayOffset, this.maxMessageSize - byteArrayOffset - 1); byteArrayOffset = byteArrayOffset + encodedSize; } } catch (PayloadSizeExceededException ex) { TRACE_LOGGER.error("Payload size of batch of messages exceeded limit", ex); final CompletableFuture<Void> sendTask = new CompletableFuture<Void>(); sendTask.completeExceptionally(ex); return sendTask; } return this.sendCoreAsync(bytes, byteArrayOffset, AmqpConstants.AMQP_BATCH_MESSAGE_FORMAT, transaction).thenAccept((x) -> { /*Do nothing*/ }); } public CompletableFuture<Void> sendAsync(Message msg, TransactionContext transaction) { return this.sendAndReturnDeliveryStateAsync(msg, transaction).thenAccept((x) -> { /*Do nothing*/ }); } CompletableFuture<DeliveryState> sendAndReturnDeliveryStateAsync(Message msg, TransactionContext transaction) { try { Pair<byte[], Integer> encodedPair = Util.encodeMessageToOptimalSizeArray(msg, this.maxMessageSize); return this.sendCoreAsync(encodedPair.getFirstItem(), encodedPair.getSecondItem(), DeliveryImpl.DEFAULT_MESSAGE_FORMAT, transaction); } catch (PayloadSizeExceededException exception) { TRACE_LOGGER.error("Payload size of message exceeded limit", exception); final CompletableFuture<DeliveryState> sendTask = new CompletableFuture<DeliveryState>(); sendTask.completeExceptionally(exception); return sendTask; } } @Override public void onOpenComplete(Exception completionException) { if (completionException == null) { this.maxMessageSize = Util.getMaxMessageSizeFromLink(this.sendLink); this.lastKnownLinkError = null; this.retryPolicy.resetRetryCount(this.getClientId()); if (this.sendLinkReopenFuture != null && !this.sendLinkReopenFuture.isDone()) { AsyncUtil.completeFuture(this.sendLinkReopenFuture, null); } if (!this.linkFirstOpen.isDone()) { TRACE_LOGGER.info("Opened send link to '{}'", this.sendPath); AsyncUtil.completeFuture(this.linkFirstOpen, this); } else { synchronized (this.pendingSendLock) { if (!this.pendingSendsData.isEmpty()) { LinkedList<String> unacknowledgedSends = new LinkedList<String>(); unacknowledgedSends.addAll(this.pendingSendsData.keySet()); if (unacknowledgedSends.size() > 0) { Iterator<String> reverseReader = unacknowledgedSends.iterator(); while (reverseReader.hasNext()) { String unacknowledgedSend = reverseReader.next(); if (this.pendingSendsData.get(unacknowledgedSend).isWaitingForAck()) { this.pendingSends.offer(new WeightedDeliveryTag(unacknowledgedSend, 1)); } } } unacknowledgedSends.clear(); } } } } else { this.cancelSASTokenRenewTimer(); if (!this.linkFirstOpen.isDone()) { TRACE_LOGGER.error("Opening send link '{}' to '{}' failed", this.sendLink.getName(), this.sendPath, completionException); this.setClosed(); ExceptionUtil.completeExceptionally(this.linkFirstOpen, completionException, this, true); } if (this.sendLinkReopenFuture != null && !this.sendLinkReopenFuture.isDone()) { TRACE_LOGGER.warn("Opening send link '{}' to '{}' failed", this.sendLink.getName(), this.sendPath, completionException); AsyncUtil.completeFutureExceptionally(this.sendLinkReopenFuture, completionException); } } } @Override public void onClose(ErrorCondition condition) { Exception completionException = condition != null ? ExceptionUtil.toException(condition) : new ServiceBusException(ClientConstants.DEFAULT_IS_TRANSIENT, "The entity has been closed due to transient failures (underlying link closed), please retry the operation."); this.onError(completionException); } @Override public void onError(Exception completionException) { this.linkCredit = 0; if (this.getIsClosingOrClosed()) { Exception failureException = completionException == null ? new OperationCancelledException("Send cancelled as the Sender instance is Closed before the sendOperation completed.") : completionException; this.clearAllPendingSendsWithException(failureException); TRACE_LOGGER.info("Send link to '{}' closed", this.sendPath); AsyncUtil.completeFuture(this.linkClose, null); return; } else { this.underlyingFactory.deregisterForConnectionError(this.sendLink); this.lastKnownLinkError = completionException; this.lastKnownErrorReportedAt = Instant.now(); this.onOpenComplete(completionException); if (completionException != null && (!(completionException instanceof ServiceBusException) || !((ServiceBusException) completionException).getIsTransient())) { TRACE_LOGGER.warn("Send link '{}' to '{}' closed. Failing all pending send requests.", this.sendLink.getName(), this.sendPath); this.clearAllPendingSendsWithException(completionException); } else { final Map.Entry<String, SendWorkItem<DeliveryState>> pendingSendEntry = IteratorUtil.getFirst(this.pendingSendsData.entrySet()); if (pendingSendEntry != null && pendingSendEntry.getValue() != null) { final TimeoutTracker tracker = pendingSendEntry.getValue().getTimeoutTracker(); if (tracker != null) { final Duration nextRetryInterval = this.retryPolicy.getNextRetryInterval(this.getClientId(), completionException, tracker.remaining()); if (nextRetryInterval != null) { TRACE_LOGGER.warn("Send link '{}' to '{}' closed. Will retry link creation after '{}'.", this.sendLink.getName(), this.sendPath, nextRetryInterval); Timer.schedule(() -> {CoreMessageSender.this.ensureLinkIsOpen();}, nextRetryInterval, TimerType.OneTimeRun); } } } } } } @Override public void onSendComplete(final Delivery delivery) { DeliveryState outcome = delivery.getRemoteState(); final String deliveryTag = new String(delivery.getTag(), UTF_8); TRACE_LOGGER.debug("Received ack for delivery. path:{}, linkName:{}, deliveryTag:{}, outcome:{}", CoreMessageSender.this.sendPath, this.sendLink.getName(), deliveryTag, outcome); final SendWorkItem<DeliveryState> pendingSendWorkItem = this.pendingSendsData.remove(deliveryTag); if (pendingSendWorkItem != null) { if (outcome instanceof TransactionalState) { TRACE_LOGGER.trace("State of delivery is Transactional, retrieving outcome: {}", outcome); Outcome transactionalOutcome = ((TransactionalState) outcome).getOutcome(); if (transactionalOutcome instanceof DeliveryState) { outcome = (DeliveryState) transactionalOutcome; } else { this.cleanupFailedSend(pendingSendWorkItem, new ServiceBusException(false, "Unknown delivery state: " + outcome.toString())); return; } } if (outcome instanceof Accepted) { this.lastKnownLinkError = null; this.retryPolicy.resetRetryCount(this.getClientId()); pendingSendWorkItem.cancelTimeoutTask(false); AsyncUtil.completeFuture(pendingSendWorkItem.getWork(), outcome); } else if (outcome instanceof Declared) { AsyncUtil.completeFuture(pendingSendWorkItem.getWork(), outcome); } else if (outcome instanceof Rejected) { Rejected rejected = (Rejected) outcome; ErrorCondition error = rejected.getError(); Exception exception = ExceptionUtil.toException(error); if (ExceptionUtil.isGeneralError(error.getCondition())) { this.lastKnownLinkError = exception; this.lastKnownErrorReportedAt = Instant.now(); } Duration retryInterval = this.retryPolicy.getNextRetryInterval( this.getClientId(), exception, pendingSendWorkItem.getTimeoutTracker().remaining()); if (retryInterval == null) { this.cleanupFailedSend(pendingSendWorkItem, exception); } else { TRACE_LOGGER.warn("Send failed for delivery '{}'. Will retry after '{}'", deliveryTag, retryInterval); pendingSendWorkItem.setLastKnownException(exception); Timer.schedule(() -> {CoreMessageSender.this.reSendAsync(deliveryTag, pendingSendWorkItem, false);}, retryInterval, TimerType.OneTimeRun); } } else if (outcome instanceof Released) { this.cleanupFailedSend(pendingSendWorkItem, new OperationCancelledException(outcome.toString())); } else { this.cleanupFailedSend(pendingSendWorkItem, new ServiceBusException(false, outcome.toString())); } } else { TRACE_LOGGER.warn("Delivery mismatch. path:{}, linkName:{}, delivery:{}", this.sendPath, this.sendLink.getName(), deliveryTag); } } private void clearAllPendingSendsWithException(Throwable failureException) { synchronized (this.pendingSendLock) { for (Map.Entry<String, SendWorkItem<DeliveryState>> pendingSend: this.pendingSendsData.entrySet()) { this.cleanupFailedSend(pendingSend.getValue(), failureException); } this.pendingSendsData.clear(); this.pendingSends.clear(); } } private void cleanupFailedSend(final SendWorkItem<DeliveryState> failedSend, final Throwable exception) { failedSend.cancelTimeoutTask(false); ExceptionUtil.completeExceptionally(failedSend.getWork(), exception, this, true); } private static SenderLinkSettings getDefaultLinkProperties(String sendPath, String transferDestinationPath, MessagingFactory underlyingFactory, MessagingEntityType entityType) { SenderLinkSettings linkSettings = new SenderLinkSettings(); linkSettings.linkPath = sendPath; final Target target = new Target(); target.setAddress(sendPath); linkSettings.target = target; linkSettings.source = new Source(); linkSettings.settleMode = SenderSettleMode.UNSETTLED; linkSettings.requiresAuthentication = true; Map<Symbol, Object> linkProperties = new HashMap<>(); linkProperties.put(ClientConstants.LINK_TIMEOUT_PROPERTY, UnsignedInteger.valueOf(Util.adjustServerTimeout(underlyingFactory.getOperationTimeout()).toMillis())); if (entityType != null) { linkProperties.put(ClientConstants.ENTITY_TYPE_PROPERTY, entityType.getIntValue()); } if (transferDestinationPath != null && !transferDestinationPath.isEmpty()) { linkProperties.put(ClientConstants.LINK_TRANSFER_DESTINATION_PROPERTY, transferDestinationPath); } linkSettings.linkProperties = linkProperties; return linkSettings; } private void createSendLink(SenderLinkSettings linkSettings) { TRACE_LOGGER.info("Creating send link to '{}'", this.sendPath); final Connection connection = this.underlyingFactory.getConnection(); final Session session = connection.session(); session.setOutgoingWindow(Integer.MAX_VALUE); session.open(); BaseHandler.setHandler(session, new SessionHandler(sendPath)); final Sender sender = session.sender(linkSettings.linkName); sender.setTarget(linkSettings.target); sender.setSource(linkSettings.source); sender.setProperties(linkSettings.linkProperties); TRACE_LOGGER.debug("Send link settle mode '{}'", linkSettings.settleMode); sender.setSenderSettleMode(linkSettings.settleMode); SendLinkHandler handler = new SendLinkHandler(CoreMessageSender.this); BaseHandler.setHandler(sender, handler); sender.open(); this.sendLink = sender; this.underlyingFactory.registerForConnectionError(this.sendLink); } CompletableFuture<Void> sendTokenAndSetRenewTimer(boolean retryOnFailure) { if (this.getIsClosingOrClosed()) { return CompletableFuture.completedFuture(null); } else { CompletableFuture<ScheduledFuture<?>> sendTokenFuture = this.underlyingFactory.sendSecurityTokenAndSetRenewTimer(this.sasTokenAudienceURI, retryOnFailure, () -> this.sendTokenAndSetRenewTimer(true)); CompletableFuture<Void> sasTokenFuture = sendTokenFuture.thenAccept((f) -> {this.sasTokenRenewTimerFuture = f;}); if (this.transferDestinationPath!= null && !this.transferDestinationPath.isEmpty()) { CompletableFuture<Void> transferSendTokenFuture = this.underlyingFactory.sendSecurityToken(this.transferSasTokenAudienceURI); return CompletableFuture.allOf(sasTokenFuture, transferSendTokenFuture); } return sasTokenFuture; } } private void cancelSASTokenRenewTimer() { if (this.sasTokenRenewTimerFuture != null && !this.sasTokenRenewTimerFuture.isDone()) { this.sasTokenRenewTimerFuture.cancel(true); TRACE_LOGGER.debug("Cancelled SAS Token renew timer"); } } private void initializeLinkOpen(TimeoutTracker timeout) { this.linkFirstOpen = new CompletableFuture<CoreMessageSender>(); Timer.schedule( () -> { if (!CoreMessageSender.this.linkFirstOpen.isDone()) { CoreMessageSender.this.closeInternals(false); CoreMessageSender.this.setClosed(); Exception operationTimedout = new TimeoutException( String.format(Locale.US, "Open operation on SendLink(%s) on Entity(%s) timed out at %s.", CoreMessageSender.this.sendLink.getName(), CoreMessageSender.this.getSendPath(), ZonedDateTime.now().toString()), CoreMessageSender.this.lastKnownErrorReportedAt.isAfter(Instant.now().minusSeconds(ClientConstants.SERVER_BUSY_BASE_SLEEP_TIME_IN_SECS)) ? CoreMessageSender.this.lastKnownLinkError : null); TRACE_LOGGER.warn(operationTimedout.getMessage()); ExceptionUtil.completeExceptionally(CoreMessageSender.this.linkFirstOpen, operationTimedout, CoreMessageSender.this, true); } }, timeout.remaining(), TimerType.OneTimeRun); } @Override public ErrorContext getContext() { final boolean isLinkOpened = this.linkFirstOpen != null && this.linkFirstOpen.isDone(); final String referenceId = this.sendLink != null && this.sendLink.getRemoteProperties() != null && this.sendLink.getRemoteProperties().containsKey(ClientConstants.TRACKING_ID_PROPERTY) ? this.sendLink.getRemoteProperties().get(ClientConstants.TRACKING_ID_PROPERTY).toString() : ((this.sendLink != null) ? this.sendLink.getName() : null); SenderErrorContext errorContext = new SenderErrorContext( this.underlyingFactory!=null ? this.underlyingFactory.getHostName() : null, this.sendPath, referenceId, isLinkOpened && this.sendLink != null ? this.sendLink.getCredit() : null); return errorContext; } @Override public void onFlow(final int creditIssued) { this.lastKnownLinkError = null; if (creditIssued <= 0) { return; } TRACE_LOGGER.debug("Received flow frame. path:{}, linkName:{}, remoteLinkCredit:{}, pendingSendsWaitingForCredit:{}, pendingSendsWaitingDelivery:{}", this.sendPath, this.sendLink.getName(), creditIssued, this.pendingSends.size(), this.pendingSendsData.size() - this.pendingSends.size()); this.linkCredit = this.linkCredit + creditIssued; this.sendWork.onEvent(); } private synchronized CompletableFuture<Void> ensureLinkIsOpen() { if (!(this.sendLink.getLocalState() == EndpointState.ACTIVE && this.sendLink.getRemoteState() == EndpointState.ACTIVE)) { if (this.sendLinkReopenFuture == null || this.sendLinkReopenFuture.isDone()) { TRACE_LOGGER.info("Recreating send link to '{}'", this.sendPath); this.retryPolicy.incrementRetryCount(CoreMessageSender.this.getClientId()); this.sendLinkReopenFuture = new CompletableFuture<>(); final CompletableFuture<Void> linkReopenFutureThatCanBeCancelled = this.sendLinkReopenFuture; Timer.schedule( () -> { if (!linkReopenFutureThatCanBeCancelled.isDone()) { CoreMessageSender.this.cancelSASTokenRenewTimer(); Exception operationTimedout = new TimeoutException( String.format(Locale.US, "%s operation on SendLink(%s) to path(%s) timed out at %s.", "Open", CoreMessageSender.this.sendLink.getName(), CoreMessageSender.this.sendPath, ZonedDateTime.now())); TRACE_LOGGER.warn(operationTimedout.getMessage()); linkReopenFutureThatCanBeCancelled.completeExceptionally(operationTimedout); } } , CoreMessageSender.LINK_REOPEN_TIMEOUT , TimerType.OneTimeRun); this.cancelSASTokenRenewTimer(); CompletableFuture<Void> authenticationFuture = null; if (linkSettings.requiresAuthentication) { authenticationFuture = this.sendTokenAndSetRenewTimer(false); } else { authenticationFuture = CompletableFuture.completedFuture(null); } authenticationFuture.handleAsync((v, sendTokenEx) -> { if (sendTokenEx != null) { Throwable cause = ExceptionUtil.extractAsyncCompletionCause(sendTokenEx); TRACE_LOGGER.error("Sending SAS Token to '{}' failed.", this.sendPath, cause); this.sendLinkReopenFuture.completeExceptionally(sendTokenEx); this.clearAllPendingSendsWithException(sendTokenEx); } else { try { this.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() { @Override public void onEvent() { CoreMessageSender.this.createSendLink(CoreMessageSender.this.linkSettings); } }); } catch (IOException ioEx) { this.sendLinkReopenFuture.completeExceptionally(ioEx); } } return null; }, MessagingFactory.INTERNAL_THREAD_POOL); } return this.sendLinkReopenFuture; } else { return CompletableFuture.completedFuture(null); } } private void throwSenderTimeout(CompletableFuture<DeliveryState> pendingSendWork, Exception lastKnownException) { Exception cause = lastKnownException; if (lastKnownException == null && this.lastKnownLinkError != null) { cause = this.lastKnownErrorReportedAt.isAfter(Instant.now().minusMillis(this.operationTimeout.toMillis())) ? this.lastKnownLinkError : null; } boolean isClientSideTimeout = (cause == null || !(cause instanceof ServiceBusException)); ServiceBusException exception = isClientSideTimeout ? new TimeoutException(String.format(Locale.US, "%s %s %s.", CoreMessageSender.SEND_TIMED_OUT, " at ", ZonedDateTime.now(), cause)) : (ServiceBusException) cause; TRACE_LOGGER.error("Send timed out", exception); ExceptionUtil.completeExceptionally(pendingSendWork, exception, this, true); } private void scheduleLinkCloseTimeout(final TimeoutTracker timeout) { Timer.schedule( () -> { if (!linkClose.isDone()) { Exception operationTimedout = new TimeoutException(String.format(Locale.US, "%s operation on Send Link(%s) timed out at %s", "Close", CoreMessageSender.this.sendLink.getName(), ZonedDateTime.now())); TRACE_LOGGER.warn(operationTimedout.getMessage()); ExceptionUtil.completeExceptionally(linkClose, operationTimedout, CoreMessageSender.this, true); } }, timeout.remaining(), TimerType.OneTimeRun); } @Override protected CompletableFuture<Void> onClose() { this.closeInternals(true); return this.linkClose; } private void closeInternals(boolean waitForCloseCompletion) { if (!this.getIsClosed()) { if (this.sendLink != null && this.sendLink.getLocalState() != EndpointState.CLOSED) { try { this.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() { @Override public void onEvent() { if (CoreMessageSender.this.sendLink != null && CoreMessageSender.this.sendLink.getLocalState() != EndpointState.CLOSED) { TRACE_LOGGER.info("Closing send link to '{}'", CoreMessageSender.this.sendPath); CoreMessageSender.this.underlyingFactory.deregisterForConnectionError(CoreMessageSender.this.sendLink); CoreMessageSender.this.sendLink.close(); if (waitForCloseCompletion) { CoreMessageSender.this.scheduleLinkCloseTimeout(TimeoutTracker.create(CoreMessageSender.this.operationTimeout)); } else { AsyncUtil.completeFuture(CoreMessageSender.this.linkClose, null); } } } }); } catch (IOException e) { AsyncUtil.completeFutureExceptionally(this.linkClose, e); } } else { AsyncUtil.completeFuture(this.linkClose, null); } this.cancelSASTokenRenewTimer(); this.closeRequestResponseLink(); } } private static class WeightedDeliveryTag { private final String deliveryTag; private final int priority; WeightedDeliveryTag(final String deliveryTag, final int priority) { this.deliveryTag = deliveryTag; this.priority = priority; } public String getDeliveryTag() { return this.deliveryTag; } public int getPriority() { return this.priority; } } private static class DeliveryTagComparator implements Comparator<WeightedDeliveryTag> { @Override public int compare(WeightedDeliveryTag deliveryTag0, WeightedDeliveryTag deliveryTag1) { return deliveryTag1.getPriority() - deliveryTag0.getPriority(); } } public CompletableFuture<long[]> scheduleMessageAsync(Message[] messages, TransactionContext transaction, Duration timeout) { TRACE_LOGGER.debug("Sending '{}' scheduled message(s) to '{}'", messages.length, this.sendPath); return this.createRequestResponseLink().thenComposeAsync((v) -> { HashMap requestBodyMap = new HashMap(); Collection<HashMap> messageList = new LinkedList<HashMap>(); for (Message message : messages) { HashMap messageEntry = new HashMap(); Pair<byte[], Integer> encodedPair; try { encodedPair = Util.encodeMessageToOptimalSizeArray(message, this.maxMessageSize); } catch(PayloadSizeExceededException exception) { TRACE_LOGGER.error("Payload size of message exceeded limit", exception); final CompletableFuture<long[]> scheduleMessagesTask = new CompletableFuture<long[]>(); scheduleMessagesTask.completeExceptionally(exception); return scheduleMessagesTask; } messageEntry.put(ClientConstants.REQUEST_RESPONSE_MESSAGE, new Binary(encodedPair.getFirstItem(), 0, encodedPair.getSecondItem())); messageEntry.put(ClientConstants.REQUEST_RESPONSE_MESSAGE_ID, message.getMessageId()); String sessionId = message.getGroupId(); if (!StringUtil.isNullOrEmpty(sessionId)) { messageEntry.put(ClientConstants.REQUEST_RESPONSE_SESSION_ID, sessionId); } Object partitionKey = message.getMessageAnnotations().getValue().get(Symbol.valueOf(ClientConstants.PARTITIONKEYNAME)); if (partitionKey != null && !((String)partitionKey).isEmpty()) { messageEntry.put(ClientConstants.REQUEST_RESPONSE_PARTITION_KEY, (String)partitionKey); } Object viaPartitionKey = message.getMessageAnnotations().getValue().get(Symbol.valueOf(ClientConstants.VIAPARTITIONKEYNAME)); if (viaPartitionKey != null && !((String)viaPartitionKey).isEmpty()) { messageEntry.put(ClientConstants.REQUEST_RESPONSE_VIA_PARTITION_KEY, (String)viaPartitionKey); } messageList.add(messageEntry); } requestBodyMap.put(ClientConstants.REQUEST_RESPONSE_MESSAGES, messageList); Message requestMessage = RequestResponseUtils.createRequestMessageFromPropertyBag(ClientConstants.REQUEST_RESPONSE_SCHEDULE_MESSAGE_OPERATION, requestBodyMap, Util.adjustServerTimeout(timeout), this.sendLink.getName()); CompletableFuture<Message> responseFuture = this.requestResponseLink.requestAysnc(requestMessage, transaction, timeout); return responseFuture.thenComposeAsync((responseMessage) -> { CompletableFuture<long[]> returningFuture = new CompletableFuture<long[]>(); int statusCode = RequestResponseUtils.getResponseStatusCode(responseMessage); if (statusCode == ClientConstants.REQUEST_RESPONSE_OK_STATUS_CODE) { long[] sequenceNumbers = (long[])RequestResponseUtils.getResponseBody(responseMessage).get(ClientConstants.REQUEST_RESPONSE_SEQUENCE_NUMBERS); if (TRACE_LOGGER.isDebugEnabled()) { TRACE_LOGGER.debug("Scheduled messages sent. Received sequence numbers '{}'", Arrays.toString(sequenceNumbers)); } returningFuture.complete(sequenceNumbers); } else { Exception scheduleException = RequestResponseUtils.genereateExceptionFromResponse(responseMessage); TRACE_LOGGER.error("Sending scheduled messages to '{}' failed.", this.sendPath, scheduleException); returningFuture.completeExceptionally(scheduleException); } return returningFuture; }, MessagingFactory.INTERNAL_THREAD_POOL); }, MessagingFactory.INTERNAL_THREAD_POOL); } public CompletableFuture<Void> cancelScheduledMessageAsync(Long[] sequenceNumbers, Duration timeout) { if (TRACE_LOGGER.isDebugEnabled()) { TRACE_LOGGER.debug("Cancelling scheduled message(s) '{}' to '{}'", Arrays.toString(sequenceNumbers), this.sendPath); } return this.createRequestResponseLink().thenComposeAsync((v) -> { HashMap requestBodyMap = new HashMap(); requestBodyMap.put(ClientConstants.REQUEST_RESPONSE_SEQUENCE_NUMBERS, sequenceNumbers); Message requestMessage = RequestResponseUtils.createRequestMessageFromPropertyBag(ClientConstants.REQUEST_RESPONSE_CANCEL_CHEDULE_MESSAGE_OPERATION, requestBodyMap, Util.adjustServerTimeout(timeout), this.sendLink.getName()); CompletableFuture<Message> responseFuture = this.requestResponseLink.requestAysnc(requestMessage, TransactionContext.NULL_TXN, timeout); return responseFuture.thenComposeAsync((responseMessage) -> { CompletableFuture<Void> returningFuture = new CompletableFuture<Void>(); int statusCode = RequestResponseUtils.getResponseStatusCode(responseMessage); if (statusCode == ClientConstants.REQUEST_RESPONSE_OK_STATUS_CODE) { TRACE_LOGGER.debug("Cancelled scheduled messages in '{}'", this.sendPath); returningFuture.complete(null); } else { Exception failureException = RequestResponseUtils.genereateExceptionFromResponse(responseMessage); TRACE_LOGGER.error("Cancelling scheduled messages in '{}' failed.", this.sendPath, failureException); returningFuture.completeExceptionally(failureException); } return returningFuture; }, MessagingFactory.INTERNAL_THREAD_POOL); }, MessagingFactory.INTERNAL_THREAD_POOL); } public CompletableFuture<Collection<Message>> peekMessagesAsync(long fromSequenceNumber, int messageCount) { TRACE_LOGGER.debug("Peeking '{}' messages in '{}' from sequence number '{}'", messageCount, this.sendPath, fromSequenceNumber); return this.createRequestResponseLink().thenComposeAsync((v) -> CommonRequestResponseOperations.peekMessagesAsync(this.requestResponseLink, this.operationTimeout, fromSequenceNumber, messageCount, null, this.sendLink.getName()), MessagingFactory.INTERNAL_THREAD_POOL); } }
Why use array list here?
private Collection<String> parseResultColumnNames(final Result result) { Collection<String> row = new ArrayList<>(result.listCells().size() + 2); row.add(ROW_KEY_COLUMN_NAME); for (Cell each : result.listCells()) { String column = new String(CellUtil.cloneQualifier(each), StandardCharsets.UTF_8); row.add(column); } row.add(TIMESTAMP_COLUMN_NAME); return row; }
Collection<String> row = new ArrayList<>(result.listCells().size() + 2);
private Collection<String> parseResultColumnNames(final Result result) { Collection<String> columnNames = new LinkedList<>(); columnNames.add(ROW_KEY_COLUMN_NAME); for (Cell each : result.listCells()) { String column = new String(CellUtil.cloneQualifier(each), StandardCharsets.UTF_8); columnNames.add(column); } columnNames.add(TIMESTAMP_COLUMN_NAME); return columnNames; }
class HBaseGetResultSet implements HBaseQueryResultSet { private static final String ROW_KEY_COLUMN_NAME = "rowKey"; private static final String CONTENT_COLUMN_NAME = "content"; private static final String TIMESTAMP_COLUMN_NAME = "timestamp"; private SelectStatementContext statementContext; private long resultNum; private long maxLimitResultSize; @Getter private Collection<String> columnNames = Collections.singleton(ROW_KEY_COLUMN_NAME); private Result compensateResult; private Iterator<Result> rows; /** * Init data. * * @param sqlStatementContext SQL statement context */ @Override public void init(final SQLStatementContext sqlStatementContext) { statementContext = (SelectStatementContext) sqlStatementContext; initResultNum(sqlStatementContext); HBaseOperation operation = HBaseOperationConverterFactory.newInstance(sqlStatementContext).convert(); long startMills = System.currentTimeMillis(); if (operation.getOperation() instanceof Get) { executeGetRequest(operation); } else if (operation.getOperation() instanceof HBaseSelectOperation) { executeGetsRequest(operation); } else { executeScanRequest(operation); } logExecuteTime(startMills); } private void initResultNum(final SQLStatementContext sqlStatementContext) { resultNum = 0; maxLimitResultSize = HBaseContext.getInstance().getProps().<Long>getValue(HBasePropertyKey.MAX_SCAN_LIMIT_SIZE); Optional<PaginationValueSegment> paginationSegment = ((MySQLSelectStatement) sqlStatementContext.getSqlStatement()).getLimit().flatMap(LimitSegment::getRowCount); paginationSegment.ifPresent(optional -> maxLimitResultSize = Math.min(maxLimitResultSize, ((NumberLiteralLimitValueSegment) optional).getValue())); } private void executeGetRequest(final HBaseOperation operation) { Result result = HBaseExecutor.executeQuery(operation.getTableName(), table -> table.get((Get) operation.getOperation())); Collection<Result> rows = 0 == result.rawCells().length ? Collections.emptyList() : Collections.singleton(result); this.rows = rows.iterator(); setColumnNames(this.rows); } private void executeGetsRequest(final HBaseOperation operation) { List<Result> results = Arrays.asList(HBaseExecutor.executeQuery(operation.getTableName(), table -> table.get(((HBaseSelectOperation) operation.getOperation()).getGets()))); results = results.stream().filter(result -> result.rawCells().length > 0).collect(Collectors.toList()); if (statementContext.getOrderByContext().isGenerated()) { results.sort(this::compareResult); } rows = results.iterator(); setColumnNames(rows); } private int compareResult(final Result result1, final Result result2) { return Bytes.toString(result1.getRow()).compareTo(Bytes.toString(result2.getRow())); } private void executeScanRequest(final HBaseOperation hbaseOperation) { Scan scan = (Scan) hbaseOperation.getOperation(); scan.setLimit((int) maxLimitResultSize); ResultScanner resultScanner = HBaseExecutor.executeQuery(hbaseOperation.getTableName(), table -> table.getScanner(scan)); rows = resultScanner.iterator(); setColumnNames(rows); } private void setColumnNames(final Iterator<Result> rows) { if (rows.hasNext()) { compensateResult = rows.next(); } columnNames = null == compensateResult ? Arrays.asList(ROW_KEY_COLUMN_NAME, CONTENT_COLUMN_NAME) : parseResultColumnNames(compensateResult); } private Map<String, String> parseResult(final Result result) { Map<String, String> row = new CaseInsensitiveMap<>(); row.put(ROW_KEY_COLUMN_NAME, Bytes.toString(result.getRow())); Long timestamp = null; for (Cell each : result.listCells()) { String column = new String(CellUtil.cloneQualifier(each), StandardCharsets.UTF_8); String value = new String(CellUtil.cloneValue(each), StandardCharsets.UTF_8); if (null == timestamp) { timestamp = each.getTimestamp(); } row.put(column, value); } row.put(TIMESTAMP_COLUMN_NAME, String.valueOf(timestamp)); return row; } private void logExecuteTime(final long startMills) { long endMills = System.currentTimeMillis(); String tableName = statementContext.getSqlStatement().getFrom() instanceof SimpleTableSegment ? ((SimpleTableSegment) statementContext.getSqlStatement().getFrom()).getTableName().getIdentifier().getValue() : statementContext.getSqlStatement().getFrom().toString(); String whereClause = getWhereClause(); if (endMills - startMills > HBaseContext.getInstance().getProps().<Long>getValue(HBasePropertyKey.EXECUTE_TIME_OUT)) { log.info(String.format("query hbase table: %s, where case: %s , query %dms time out", tableName, whereClause, endMills - startMills)); } else { log.info(String.format("query hbase table: %s, where case: %s , execute time: %dms", tableName, whereClause, endMills - startMills)); } } private String getWhereClause() { if (!statementContext.getSqlStatement().getWhere().isPresent()) { return ""; } StringBuilder result = new StringBuilder(); ExpressionSegment expressionSegment = statementContext.getSqlStatement().getWhere().get().getExpr(); if (expressionSegment instanceof BetweenExpression) { result.append(((BetweenExpression) expressionSegment).getBetweenExpr()); } else if (expressionSegment instanceof BinaryOperationExpression) { result.append(expressionSegment.getText()); } return result.toString(); } @Override public boolean next() { return resultNum < maxLimitResultSize && (rows.hasNext() || null != compensateResult); } @Override public Collection<Object> getRowData() { Map<String, String> row; if (null == compensateResult) { row = parseResult(rows.next()); } else { row = parseResult(compensateResult); compensateResult = null; } resultNum++; return columnNames.stream().map(each -> row.getOrDefault(each, "")).collect(Collectors.toList()); } @Override public Class<MySQLSelectStatement> getType() { return MySQLSelectStatement.class; } }
class HBaseGetResultSet implements HBaseQueryResultSet { private static final String ROW_KEY_COLUMN_NAME = "rowKey"; private static final String CONTENT_COLUMN_NAME = "content"; private static final String TIMESTAMP_COLUMN_NAME = "timestamp"; private SelectStatementContext statementContext; private long resultNum; private long maxLimitResultSize; @Getter private Collection<String> columnNames = Collections.singleton(ROW_KEY_COLUMN_NAME); private Result compensateResult; private Iterator<Result> rows; /** * Init data. * * @param sqlStatementContext SQL statement context */ @Override public void init(final SQLStatementContext sqlStatementContext) { statementContext = (SelectStatementContext) sqlStatementContext; initResultNum(sqlStatementContext); HBaseOperation operation = HBaseOperationConverterFactory.newInstance(sqlStatementContext).convert(); long startMills = System.currentTimeMillis(); if (operation.getOperation() instanceof Get) { executeGetRequest(operation); } else if (operation.getOperation() instanceof HBaseSelectOperation) { executeGetsRequest(operation); } else { executeScanRequest(operation); } logExecuteTime(startMills); } private void initResultNum(final SQLStatementContext sqlStatementContext) { resultNum = 0; maxLimitResultSize = HBaseContext.getInstance().getProps().<Long>getValue(HBasePropertyKey.MAX_SCAN_LIMIT_SIZE); Optional<PaginationValueSegment> paginationSegment = ((MySQLSelectStatement) sqlStatementContext.getSqlStatement()).getLimit().flatMap(LimitSegment::getRowCount); paginationSegment.ifPresent(optional -> maxLimitResultSize = Math.min(maxLimitResultSize, ((NumberLiteralLimitValueSegment) optional).getValue())); } private void executeGetRequest(final HBaseOperation operation) { Result result = HBaseExecutor.executeQuery(operation.getTableName(), table -> table.get((Get) operation.getOperation())); Collection<Result> rows = 0 == result.rawCells().length ? Collections.emptyList() : Collections.singleton(result); this.rows = rows.iterator(); setColumnNames(this.rows); } private void executeGetsRequest(final HBaseOperation operation) { List<Result> results = Arrays.asList(HBaseExecutor.executeQuery(operation.getTableName(), table -> table.get(((HBaseSelectOperation) operation.getOperation()).getGets()))); results = results.stream().filter(result -> result.rawCells().length > 0).collect(Collectors.toList()); if (statementContext.getOrderByContext().isGenerated()) { results.sort(this::compareResult); } rows = results.iterator(); setColumnNames(rows); } private int compareResult(final Result result1, final Result result2) { return Bytes.toString(result1.getRow()).compareTo(Bytes.toString(result2.getRow())); } private void executeScanRequest(final HBaseOperation hbaseOperation) { Scan scan = (Scan) hbaseOperation.getOperation(); scan.setLimit((int) maxLimitResultSize); ResultScanner resultScanner = HBaseExecutor.executeQuery(hbaseOperation.getTableName(), table -> table.getScanner(scan)); rows = resultScanner.iterator(); setColumnNames(rows); } private void setColumnNames(final Iterator<Result> rows) { if (rows.hasNext()) { compensateResult = rows.next(); } columnNames = null == compensateResult ? Arrays.asList(ROW_KEY_COLUMN_NAME, CONTENT_COLUMN_NAME) : parseResultColumnNames(compensateResult); } private Map<String, String> parseResult(final Result result) { Map<String, String> row = new CaseInsensitiveMap<>(); row.put(ROW_KEY_COLUMN_NAME, Bytes.toString(result.getRow())); Long timestamp = null; for (Cell each : result.listCells()) { String column = new String(CellUtil.cloneQualifier(each), StandardCharsets.UTF_8); String value = new String(CellUtil.cloneValue(each), StandardCharsets.UTF_8); if (null == timestamp) { timestamp = each.getTimestamp(); } row.put(column, value); } row.put(TIMESTAMP_COLUMN_NAME, String.valueOf(timestamp)); return row; } private void logExecuteTime(final long startMills) { long endMills = System.currentTimeMillis(); String tableName = statementContext.getSqlStatement().getFrom() instanceof SimpleTableSegment ? ((SimpleTableSegment) statementContext.getSqlStatement().getFrom()).getTableName().getIdentifier().getValue() : statementContext.getSqlStatement().getFrom().toString(); String whereClause = getWhereClause(); if (endMills - startMills > HBaseContext.getInstance().getProps().<Long>getValue(HBasePropertyKey.EXECUTE_TIME_OUT)) { log.info(String.format("query hbase table: %s, where case: %s , query %dms time out", tableName, whereClause, endMills - startMills)); } else { log.info(String.format("query hbase table: %s, where case: %s , execute time: %dms", tableName, whereClause, endMills - startMills)); } } private String getWhereClause() { if (!statementContext.getSqlStatement().getWhere().isPresent()) { return ""; } StringBuilder result = new StringBuilder(); ExpressionSegment expressionSegment = statementContext.getSqlStatement().getWhere().get().getExpr(); if (expressionSegment instanceof BetweenExpression) { result.append(((BetweenExpression) expressionSegment).getBetweenExpr()); } else if (expressionSegment instanceof BinaryOperationExpression) { result.append(expressionSegment.getText()); } return result.toString(); } @Override public boolean next() { return resultNum < maxLimitResultSize && (rows.hasNext() || null != compensateResult); } @Override public Collection<Object> getRowData() { Map<String, String> row; if (null == compensateResult) { row = parseResult(rows.next()); } else { row = parseResult(compensateResult); compensateResult = null; } resultNum++; return columnNames.stream().map(each -> row.getOrDefault(each, "")).collect(Collectors.toList()); } @Override public Class<MySQLSelectStatement> getType() { return MySQLSelectStatement.class; } }
I think you should be using the variant that creates a new `Closeable` context : `addCloseableContextDataLevel` that you can use in a try-with-resource.
private void dispatch(RoutingContext routingContext, InputStream is, VertxOutput output) { ManagedContext requestContext = beanContainer.requestContext(); requestContext.activate(); routingContext.remove(QuarkusHttpUser.AUTH_FAILURE_HANDLER); if (association != null) { association.setIdentity(QuarkusHttpUser.getSecurityIdentity(routingContext, null)); } currentVertxRequest.setCurrent(routingContext); try { Context ctx = vertx.getOrCreateContext(); HttpServerRequest request = routingContext.request(); ResteasyUriInfo uriInfo = VertxUtil.extractUriInfo(request, rootPath); ResteasyHttpHeaders headers = VertxUtil.extractHttpHeaders(request); HttpServerResponse response = request.response(); VertxHttpResponse vertxResponse = new VertxHttpResponse(request, dispatcher.getProviderFactory(), request.method(), allocator, output, routingContext); LazyHostSupplier hostSupplier = new LazyHostSupplier(request); VertxHttpRequest vertxRequest = new VertxHttpRequest(ctx, routingContext, headers, uriInfo, request.rawMethod(), hostSupplier, dispatcher.getDispatcher(), vertxResponse, requestContext, executor); vertxRequest.setInputStream(is); try { Map<Class<?>, Object> map = new HashMap<>(); map.put(SecurityContext.class, new QuarkusResteasySecurityContext(request, routingContext)); map.put(RoutingContext.class, routingContext); ResteasyContext.pushContextDataMap(map); ContextUtil.pushContext(routingContext); dispatcher.service(ctx, request, response, vertxRequest, vertxResponse, true); } catch (Failure e1) { vertxResponse.setStatus(e1.getErrorCode()); if (e1.isLoggable()) { log.error(e1); } } catch (Throwable ex) { routingContext.fail(ex); } boolean suspended = vertxRequest.getAsyncContext().isSuspended(); boolean requestContextActive = requestContext.isActive(); if (!suspended) { try { if (requestContextActive) { requestContext.terminate(); } } finally { try { vertxResponse.finish(); } catch (IOException e) { log.debug("IOException writing JAX-RS response", e); } } } else { requestContext.deactivate(); } } catch (Throwable t) { try { routingContext.fail(t); } finally { if (requestContext.isActive()) { requestContext.terminate(); } } } }
ResteasyContext.pushContextDataMap(map);
private void dispatch(RoutingContext routingContext, InputStream is, VertxOutput output) { ManagedContext requestContext = beanContainer.requestContext(); requestContext.activate(); routingContext.remove(QuarkusHttpUser.AUTH_FAILURE_HANDLER); if (association != null) { association.setIdentity(QuarkusHttpUser.getSecurityIdentity(routingContext, null)); } currentVertxRequest.setCurrent(routingContext); try { Context ctx = vertx.getOrCreateContext(); HttpServerRequest request = routingContext.request(); ResteasyUriInfo uriInfo = VertxUtil.extractUriInfo(request, rootPath); ResteasyHttpHeaders headers = VertxUtil.extractHttpHeaders(request); HttpServerResponse response = request.response(); VertxHttpResponse vertxResponse = new VertxHttpResponse(request, dispatcher.getProviderFactory(), request.method(), allocator, output, routingContext); LazyHostSupplier hostSupplier = new LazyHostSupplier(request); VertxHttpRequest vertxRequest = new VertxHttpRequest(ctx, routingContext, headers, uriInfo, request.rawMethod(), hostSupplier, dispatcher.getDispatcher(), vertxResponse, requestContext, executor); vertxRequest.setInputStream(is); Map<Class<?>, Object> map = new HashMap<>(); map.put(SecurityContext.class, new QuarkusResteasySecurityContext(request, routingContext)); map.put(RoutingContext.class, routingContext); try (ResteasyContext.CloseableContext restCtx = ResteasyContext.addCloseableContextDataLevel(map)) { ContextUtil.pushContext(routingContext); dispatcher.service(ctx, request, response, vertxRequest, vertxResponse, true); } catch (Failure e1) { vertxResponse.setStatus(e1.getErrorCode()); if (e1.isLoggable()) { log.error(e1); } } catch (Throwable ex) { routingContext.fail(ex); } boolean suspended = vertxRequest.getAsyncContext().isSuspended(); boolean requestContextActive = requestContext.isActive(); if (!suspended) { try { if (requestContextActive) { requestContext.terminate(); } } finally { try { vertxResponse.finish(); } catch (IOException e) { log.debug("IOException writing JAX-RS response", e); } } } else { requestContext.deactivate(); } } catch (Throwable t) { try { routingContext.fail(t); } finally { if (requestContext.isActive()) { requestContext.terminate(); } } } }
class VertxRequestHandler implements Handler<RoutingContext> { private static final Logger log = Logger.getLogger("io.quarkus.resteasy"); protected final Vertx vertx; protected final RequestDispatcher dispatcher; protected final String rootPath; protected final BufferAllocator allocator; protected final BeanContainer beanContainer; protected final CurrentIdentityAssociation association; protected final CurrentVertxRequest currentVertxRequest; protected final Executor executor; protected final long readTimeout; public VertxRequestHandler(Vertx vertx, BeanContainer beanContainer, ResteasyDeployment deployment, String rootPath, BufferAllocator allocator, Executor executor, long readTimeout) { this.vertx = vertx; this.beanContainer = beanContainer; this.dispatcher = new RequestDispatcher((SynchronousDispatcher) deployment.getDispatcher(), deployment.getProviderFactory(), null, Thread.currentThread().getContextClassLoader()); this.rootPath = rootPath; this.allocator = allocator; this.executor = executor; this.readTimeout = readTimeout; Instance<CurrentIdentityAssociation> association = CDI.current().select(CurrentIdentityAssociation.class); this.association = association.isResolvable() ? association.get() : null; currentVertxRequest = CDI.current().select(CurrentVertxRequest.class).get(); } @Override public void handle(RoutingContext request) { InputStream is; try { if (request.getBody() != null) { is = new ByteArrayInputStream(request.getBody().getBytes()); } else { is = new VertxInputStream(request, readTimeout); } } catch (IOException e) { request.fail(e); return; } if (BlockingOperationControl.isBlockingAllowed()) { try { dispatch(request, is, new VertxBlockingOutput(request.request())); } catch (Throwable e) { request.fail(e); } } else { executor.execute(new Runnable() { @Override public void run() { try { dispatch(request, is, new VertxBlockingOutput(request.request())); } catch (Throwable e) { request.fail(e); } } }); } } }
class VertxRequestHandler implements Handler<RoutingContext> { private static final Logger log = Logger.getLogger("io.quarkus.resteasy"); protected final Vertx vertx; protected final RequestDispatcher dispatcher; protected final String rootPath; protected final BufferAllocator allocator; protected final BeanContainer beanContainer; protected final CurrentIdentityAssociation association; protected final CurrentVertxRequest currentVertxRequest; protected final Executor executor; protected final long readTimeout; public VertxRequestHandler(Vertx vertx, BeanContainer beanContainer, ResteasyDeployment deployment, String rootPath, BufferAllocator allocator, Executor executor, long readTimeout) { this.vertx = vertx; this.beanContainer = beanContainer; this.dispatcher = new RequestDispatcher((SynchronousDispatcher) deployment.getDispatcher(), deployment.getProviderFactory(), null, Thread.currentThread().getContextClassLoader()); this.rootPath = rootPath; this.allocator = allocator; this.executor = executor; this.readTimeout = readTimeout; Instance<CurrentIdentityAssociation> association = CDI.current().select(CurrentIdentityAssociation.class); this.association = association.isResolvable() ? association.get() : null; currentVertxRequest = CDI.current().select(CurrentVertxRequest.class).get(); } @Override public void handle(RoutingContext request) { InputStream is; try { if (request.getBody() != null) { is = new ByteArrayInputStream(request.getBody().getBytes()); } else { is = new VertxInputStream(request, readTimeout); } } catch (IOException e) { request.fail(e); return; } if (BlockingOperationControl.isBlockingAllowed()) { try { dispatch(request, is, new VertxBlockingOutput(request.request())); } catch (Throwable e) { request.fail(e); } } else { executor.execute(new Runnable() { @Override public void run() { try { dispatch(request, is, new VertxBlockingOutput(request.request())); } catch (Throwable e) { request.fail(e); } } }); } } }
It will throw an exception in the `fe.log`. The debug log is un-necessary.
public boolean exceedLimit(boolean usingHardLimit) { LOG.debug("using hard limit: {}, diskAvailableCapacityB: {}, totalCapacityB: {}", usingHardLimit, diskAvailableCapacityB, totalCapacityB); return DiskInfo.exceedLimit(diskAvailableCapacityB, totalCapacityB, usingHardLimit); }
LOG.debug("using hard limit: {}, diskAvailableCapacityB: {}, totalCapacityB: {}",
public boolean exceedLimit(boolean usingHardLimit) { LOG.debug("using hard limit: {}, diskAvailableCapacityB: {}, totalCapacityB: {}", usingHardLimit, diskAvailableCapacityB, totalCapacityB); return DiskInfo.exceedLimit(diskAvailableCapacityB, totalCapacityB, usingHardLimit); }
class DiskInfo implements Writable { private static final Logger LOG = LogManager.getLogger(DiskInfo.class); public enum DiskState { ONLINE, OFFLINE } private static final long DEFAULT_CAPACITY_B = 1024 * 1024 * 1024 * 1024L; private String rootPath; private long totalCapacityB; private long dataUsedCapacityB; private long diskAvailableCapacityB; private DiskState state; private long pathHash = 0; private TStorageMedium storageMedium; private DiskInfo() { } public DiskInfo(String rootPath) { this.rootPath = rootPath; this.totalCapacityB = DEFAULT_CAPACITY_B; this.dataUsedCapacityB = 0; this.diskAvailableCapacityB = totalCapacityB; this.state = DiskState.ONLINE; this.pathHash = 0; this.storageMedium = TStorageMedium.HDD; } public String getRootPath() { return rootPath; } public long getTotalCapacityB() { return totalCapacityB; } public void setTotalCapacityB(long totalCapacityB) { this.totalCapacityB = totalCapacityB; } /** * OtherUsed (totalCapacityB - diskAvailableCapacityB - dataUsedCapacityB) may hold a lot of disk space, * disk usage percent = DataUsedCapacityB / TotalCapacityB in balance, * using dataUsedCapacityB + diskAvailableCapacityB as total capacity is more reasonable. */ public long getDataTotalCapacityB() { return dataUsedCapacityB + diskAvailableCapacityB; } public long getDataUsedCapacityB() { return dataUsedCapacityB; } public void setDataUsedCapacityB(long dataUsedCapacityB) { this.dataUsedCapacityB = dataUsedCapacityB; } public long getAvailableCapacityB() { return diskAvailableCapacityB; } public void setAvailableCapacityB(long availableCapacityB) { this.diskAvailableCapacityB = availableCapacityB; } public double getUsedPct() { return (totalCapacityB - diskAvailableCapacityB) / (double) (totalCapacityB <= 0 ? 1 : totalCapacityB); } public DiskState getState() { return state; } public boolean setState(DiskState state) { if (this.state != state) { this.state = state; return true; } return false; } public long getPathHash() { return pathHash; } public void setPathHash(long pathHash) { this.pathHash = pathHash; } public boolean hasPathHash() { return pathHash != 0; } public TStorageMedium getStorageMedium() { return storageMedium; } public void setStorageMedium(TStorageMedium storageMedium) { this.storageMedium = storageMedium; } /* * Check if this disk's capacity reach the limit. Return true if yes. * If usingHardLimit is true, use usingHardLimit threshold to check. */ public static boolean exceedLimit(long currentAvailCapacityB, long totalCapacityB, boolean usingHardLimit) { if (usingHardLimit) { return currentAvailCapacityB < Config.storage_usage_hard_limit_reserve_bytes && (double) (totalCapacityB - currentAvailCapacityB) / totalCapacityB > (Config.storage_usage_hard_limit_percent / 100.0); } else { return currentAvailCapacityB < Config.storage_usage_soft_limit_reserve_bytes && (double) (totalCapacityB - currentAvailCapacityB) / totalCapacityB > (Config.storage_usage_soft_limit_percent / 100.0); } } @Override public String toString() { return "DiskInfo [rootPath=" + rootPath + "(" + pathHash + "), totalCapacityB=" + totalCapacityB + ", dataTotalCapacityB=" + getDataTotalCapacityB() + ", dataUsedCapacityB=" + dataUsedCapacityB + ", diskAvailableCapacityB=" + diskAvailableCapacityB + ", state=" + state + ", medium: " + storageMedium + "]"; } @Override public void write(DataOutput out) throws IOException { Text.writeString(out, rootPath); out.writeLong(totalCapacityB); out.writeLong(dataUsedCapacityB); out.writeLong(diskAvailableCapacityB); Text.writeString(out, state.name()); } public void readFields(DataInput in) throws IOException { this.rootPath = Text.readString(in); this.totalCapacityB = in.readLong(); if (GlobalStateMgr.getCurrentStateJournalVersion() >= FeMetaVersion.VERSION_36) { this.dataUsedCapacityB = in.readLong(); this.diskAvailableCapacityB = in.readLong(); } else { long availableCapacityB = in.readLong(); this.dataUsedCapacityB = this.totalCapacityB - availableCapacityB; this.diskAvailableCapacityB = availableCapacityB; } this.state = DiskState.valueOf(Text.readString(in)); } public static DiskInfo read(DataInput in) throws IOException { DiskInfo diskInfo = new DiskInfo(); diskInfo.readFields(in); return diskInfo; } }
class DiskInfo implements Writable { private static final Logger LOG = LogManager.getLogger(DiskInfo.class); public enum DiskState { ONLINE, OFFLINE } private static final long DEFAULT_CAPACITY_B = 1024 * 1024 * 1024 * 1024L; private String rootPath; private long totalCapacityB; private long dataUsedCapacityB; private long diskAvailableCapacityB; private DiskState state; private long pathHash = 0; private TStorageMedium storageMedium; private DiskInfo() { } public DiskInfo(String rootPath) { this.rootPath = rootPath; this.totalCapacityB = DEFAULT_CAPACITY_B; this.dataUsedCapacityB = 0; this.diskAvailableCapacityB = totalCapacityB; this.state = DiskState.ONLINE; this.pathHash = 0; this.storageMedium = TStorageMedium.HDD; } public String getRootPath() { return rootPath; } public long getTotalCapacityB() { return totalCapacityB; } public void setTotalCapacityB(long totalCapacityB) { this.totalCapacityB = totalCapacityB; } /** * OtherUsed (totalCapacityB - diskAvailableCapacityB - dataUsedCapacityB) may hold a lot of disk space, * disk usage percent = DataUsedCapacityB / TotalCapacityB in balance, * using dataUsedCapacityB + diskAvailableCapacityB as total capacity is more reasonable. */ public long getDataTotalCapacityB() { return dataUsedCapacityB + diskAvailableCapacityB; } public long getDataUsedCapacityB() { return dataUsedCapacityB; } public void setDataUsedCapacityB(long dataUsedCapacityB) { this.dataUsedCapacityB = dataUsedCapacityB; } public long getAvailableCapacityB() { return diskAvailableCapacityB; } public void setAvailableCapacityB(long availableCapacityB) { this.diskAvailableCapacityB = availableCapacityB; } public double getUsedPct() { return (totalCapacityB - diskAvailableCapacityB) / (double) (totalCapacityB <= 0 ? 1 : totalCapacityB); } public DiskState getState() { return state; } public boolean setState(DiskState state) { if (this.state != state) { this.state = state; return true; } return false; } public long getPathHash() { return pathHash; } public void setPathHash(long pathHash) { this.pathHash = pathHash; } public boolean hasPathHash() { return pathHash != 0; } public TStorageMedium getStorageMedium() { return storageMedium; } public void setStorageMedium(TStorageMedium storageMedium) { this.storageMedium = storageMedium; } /* * Check if this disk's capacity reach the limit. Return true if yes. * If usingHardLimit is true, use usingHardLimit threshold to check. */ public static boolean exceedLimit(long currentAvailCapacityB, long totalCapacityB, boolean usingHardLimit) { if (usingHardLimit) { return currentAvailCapacityB < Config.storage_usage_hard_limit_reserve_bytes && (double) (totalCapacityB - currentAvailCapacityB) / totalCapacityB > (Config.storage_usage_hard_limit_percent / 100.0); } else { return currentAvailCapacityB < Config.storage_usage_soft_limit_reserve_bytes && (double) (totalCapacityB - currentAvailCapacityB) / totalCapacityB > (Config.storage_usage_soft_limit_percent / 100.0); } } @Override public String toString() { return "DiskInfo [rootPath=" + rootPath + "(" + pathHash + "), totalCapacityB=" + totalCapacityB + ", dataTotalCapacityB=" + getDataTotalCapacityB() + ", dataUsedCapacityB=" + dataUsedCapacityB + ", diskAvailableCapacityB=" + diskAvailableCapacityB + ", state=" + state + ", medium: " + storageMedium + "]"; } @Override public void write(DataOutput out) throws IOException { Text.writeString(out, rootPath); out.writeLong(totalCapacityB); out.writeLong(dataUsedCapacityB); out.writeLong(diskAvailableCapacityB); Text.writeString(out, state.name()); } public void readFields(DataInput in) throws IOException { this.rootPath = Text.readString(in); this.totalCapacityB = in.readLong(); if (GlobalStateMgr.getCurrentStateJournalVersion() >= FeMetaVersion.VERSION_36) { this.dataUsedCapacityB = in.readLong(); this.diskAvailableCapacityB = in.readLong(); } else { long availableCapacityB = in.readLong(); this.dataUsedCapacityB = this.totalCapacityB - availableCapacityB; this.diskAvailableCapacityB = availableCapacityB; } this.state = DiskState.valueOf(Text.readString(in)); } public static DiskInfo read(DataInput in) throws IOException { DiskInfo diskInfo = new DiskInfo(); diskInfo.readFields(in); return diskInfo; } }
Fixed, replaced `org.junit.Assert.assertEquals` with `org.junit.jupiter.api.Assertions.assertEquals`
void testUpdate() { assertThatThrownBy( () -> { String value = aggrState.get(); assertEquals("42", value); aggrState.add(54L); }) .isInstanceOf(UnsupportedOperationException.class); }
assertEquals("42", value);
void testUpdate() throws Exception { String value = aggrState.get(); assertThat(value).isEqualTo("42"); assertThatThrownBy(() -> aggrState.add(54L)) .isInstanceOf(UnsupportedOperationException.class); }
class ImmutableAggregatingStateTest { private final AggregatingStateDescriptor<Long, String, String> aggrStateDesc = new AggregatingStateDescriptor<>("test", new SumAggr(), String.class); private AggregatingState<Long, String> aggrState; @BeforeEach void setUp() throws Exception { if (!aggrStateDesc.isSerializerInitialized()) { aggrStateDesc.initializeSerializerUnlessSet(new ExecutionConfig()); } final String initValue = "42"; ByteArrayOutputStream out = new ByteArrayOutputStream(); aggrStateDesc.getSerializer().serialize(initValue, new DataOutputViewStreamWrapper(out)); aggrState = ImmutableAggregatingState.createState(aggrStateDesc, out.toByteArray()); } @Test @Test void testClear() { assertThatThrownBy( () -> { String value = aggrState.get(); assertEquals("42", value); aggrState.clear(); }) .isInstanceOf(UnsupportedOperationException.class); } /** * Test {@link AggregateFunction} concatenating the already stored string with the long passed * as argument. */ private static class SumAggr implements AggregateFunction<Long, String, String> { private static final long serialVersionUID = -6249227626701264599L; @Override public String createAccumulator() { return ""; } @Override public String add(Long value, String accumulator) { accumulator += ", " + value; return accumulator; } @Override public String getResult(String accumulator) { return accumulator; } @Override public String merge(String a, String b) { return a + ", " + b; } } }
class ImmutableAggregatingStateTest { private final AggregatingStateDescriptor<Long, String, String> aggrStateDesc = new AggregatingStateDescriptor<>("test", new SumAggr(), String.class); private AggregatingState<Long, String> aggrState; @BeforeEach void setUp() throws Exception { if (!aggrStateDesc.isSerializerInitialized()) { aggrStateDesc.initializeSerializerUnlessSet(new ExecutionConfig()); } final String initValue = "42"; ByteArrayOutputStream out = new ByteArrayOutputStream(); aggrStateDesc.getSerializer().serialize(initValue, new DataOutputViewStreamWrapper(out)); aggrState = ImmutableAggregatingState.createState(aggrStateDesc, out.toByteArray()); } @Test @Test void testClear() throws Exception { String value = aggrState.get(); assertThat(value).isEqualTo("42"); assertThatThrownBy(() -> aggrState.clear()) .isInstanceOf(UnsupportedOperationException.class); } /** * Test {@link AggregateFunction} concatenating the already stored string with the long passed * as argument. */ private static class SumAggr implements AggregateFunction<Long, String, String> { private static final long serialVersionUID = -6249227626701264599L; @Override public String createAccumulator() { return ""; } @Override public String add(Long value, String accumulator) { accumulator += ", " + value; return accumulator; } @Override public String getResult(String accumulator) { return accumulator; } @Override public String merge(String a, String b) { return a + ", " + b; } } }
@radcortez Thanks, I forgot there is no dedicated property yet exists for Regex origins so for now it won't work.
public static List<Pattern> parseAllowedOriginsRegex(Optional<List<String>> allowedOrigins) { if (allowedOrigins == null || !allowedOrigins.isPresent()) { return Collections.emptyList(); } List<Pattern> allowOriginsRegex = new ArrayList<>(); allowedOrigins.get().stream() .forEach(o -> { if (o != null && o.startsWith("/") && o.endsWith("/")) { allowOriginsRegex.add(Pattern.compile(o.substring(1, o.length() - 1))); } }); return allowOriginsRegex; }
allowedOrigins.get().stream()
public static List<Pattern> parseAllowedOriginsRegex(Optional<List<String>> allowedOrigins) { if (allowedOrigins == null || !allowedOrigins.isPresent()) { return Collections.emptyList(); } List<Pattern> allowOriginsRegex = new ArrayList<>(); for (String o : allowedOrigins.get()) { if (o != null && o.startsWith("/") && o.endsWith("/")) { allowOriginsRegex.add(Pattern.compile(o.substring(1, o.length() - 1))); } } return allowOriginsRegex; }
class CORSFilter implements Handler<RoutingContext> { private static final Pattern COMMA_SEPARATED_SPLIT_REGEX = Pattern.compile("\\s*,\\s*"); final CORSConfig corsConfig; final List<Pattern> allowedOriginsRegex; public CORSFilter(CORSConfig corsConfig) { this.corsConfig = corsConfig; this.allowedOriginsRegex = parseAllowedOriginsRegex(this.corsConfig.origins); } public static boolean isConfiguredWithWildcard(Optional<List<String>> optionalList) { if (optionalList == null || !optionalList.isPresent()) { return true; } List<String> list = optionalList.get(); return list.isEmpty() || (list.size() == 1 && "*".equals(list.get(0))); } /** * Parse the provided allowed origins for any regexes * * @param allowedOrigins * @return a list of compiled regular expressions. If none configured, and empty list is returned */ /** * If any regular expression origins are configured, try to match on them. * Regular expressions must begin and end with '/' * * @param allowedOrigins the configured regex origins. * @param origin the specified origin * @return true if any configured regular expressions match the specified origin, false otherwise */ public static boolean isOriginAllowedByRegex(List<Pattern> allowOriginsRegex, String origin) { return (allowOriginsRegex.stream() .filter(pattern -> pattern.matcher(origin).matches()) .map(pattern -> origin) .findFirst() .orElse(null) != null); } private void processRequestedHeaders(HttpServerResponse response, String allowHeadersValue) { if (isConfiguredWithWildcard(corsConfig.headers)) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, allowHeadersValue); } else { List<String> requestedHeaders = new ArrayList<>(); for (String requestedHeader : COMMA_SEPARATED_SPLIT_REGEX.split(allowHeadersValue)) { requestedHeaders.add(requestedHeader.toLowerCase()); } List<String> validRequestedHeaders = new ArrayList<>(); for (String configHeader : corsConfig.headers.get()) { if (requestedHeaders.contains(configHeader.toLowerCase())) { validRequestedHeaders.add(configHeader); } } if (!validRequestedHeaders.isEmpty()) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, String.join(",", validRequestedHeaders)); } } } private void processMethods(HttpServerResponse response, String allowMethodsValue) { if (isConfiguredWithWildcard(corsConfig.methods)) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, allowMethodsValue); } else { List<String> requestedMethods = new ArrayList<>(); for (String requestedMethod : COMMA_SEPARATED_SPLIT_REGEX.split(allowMethodsValue)) { requestedMethods.add(requestedMethod.toLowerCase()); } List<String> validRequestedMethods = new ArrayList<>(); List<HttpMethod> methods = corsConfig.methods.get().stream().map(HttpMethod::valueOf) .collect(Collectors.toList()); for (HttpMethod configMethod : methods) { if (requestedMethods.contains(configMethod.name().toLowerCase())) { validRequestedMethods.add(configMethod.name()); } } if (!validRequestedMethods.isEmpty()) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, String.join(",", validRequestedMethods)); } } } @Override public void handle(RoutingContext event) { Objects.requireNonNull(corsConfig, "CORS config is not set"); HttpServerRequest request = event.request(); HttpServerResponse response = event.response(); String origin = request.getHeader(HttpHeaders.ORIGIN); if (origin == null) { event.next(); } else { final String requestedMethods = request.getHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD); if (requestedMethods != null) { processMethods(response, requestedMethods); } final String requestedHeaders = request.getHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS); if (requestedHeaders != null) { processRequestedHeaders(response, requestedHeaders); } boolean allowsOrigin = isConfiguredWithWildcard(corsConfig.origins) || corsConfig.origins.get().contains(origin) || isOriginAllowedByRegex(allowedOriginsRegex, origin); if (allowsOrigin) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, origin); } boolean allowCredentials = corsConfig.accessControlAllowCredentials .orElseGet(() -> corsConfig.origins.isPresent() && corsConfig.origins.get().contains(origin) && !origin.equals("*")); response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, String.valueOf(allowCredentials)); final Optional<List<String>> exposedHeaders = corsConfig.exposedHeaders; if (!isConfiguredWithWildcard(exposedHeaders)) { response.headers().set(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, String.join(",", exposedHeaders.orElse(Collections.emptyList()))); } if (request.method().equals(HttpMethod.OPTIONS)) { if ((requestedHeaders != null || requestedMethods != null) && corsConfig.accessControlMaxAge.isPresent()) { response.putHeader(HttpHeaders.ACCESS_CONTROL_MAX_AGE, String.valueOf(corsConfig.accessControlMaxAge.get().getSeconds())); } response.end(); } else { event.next(); } } } }
class CORSFilter implements Handler<RoutingContext> { private static final Pattern COMMA_SEPARATED_SPLIT_REGEX = Pattern.compile("\\s*,\\s*"); final CORSConfig corsConfig; final List<Pattern> allowedOriginsRegex; public CORSFilter(CORSConfig corsConfig) { this.corsConfig = corsConfig; this.allowedOriginsRegex = parseAllowedOriginsRegex(this.corsConfig.origins); } public static boolean isConfiguredWithWildcard(Optional<List<String>> optionalList) { if (optionalList == null || !optionalList.isPresent()) { return true; } List<String> list = optionalList.get(); return list.isEmpty() || (list.size() == 1 && "*".equals(list.get(0))); } /** * Parse the provided allowed origins for any regexes * * @param allowedOrigins * @return a list of compiled regular expressions. If none configured, and empty list is returned */ /** * If any regular expression origins are configured, try to match on them. * Regular expressions must begin and end with '/' * * @param allowedOrigins the configured regex origins. * @param origin the specified origin * @return true if any configured regular expressions match the specified origin, false otherwise */ public static boolean isOriginAllowedByRegex(List<Pattern> allowOriginsRegex, String origin) { if (allowOriginsRegex == null) { return false; } for (Pattern pattern : allowOriginsRegex) { if (pattern.matcher(origin).matches()) { return true; } } return false; } private void processRequestedHeaders(HttpServerResponse response, String allowHeadersValue) { if (isConfiguredWithWildcard(corsConfig.headers)) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, allowHeadersValue); } else { List<String> requestedHeaders = new ArrayList<>(); for (String requestedHeader : COMMA_SEPARATED_SPLIT_REGEX.split(allowHeadersValue)) { requestedHeaders.add(requestedHeader.toLowerCase()); } List<String> validRequestedHeaders = new ArrayList<>(); for (String configHeader : corsConfig.headers.get()) { if (requestedHeaders.contains(configHeader.toLowerCase())) { validRequestedHeaders.add(configHeader); } } if (!validRequestedHeaders.isEmpty()) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, String.join(",", validRequestedHeaders)); } } } private void processMethods(HttpServerResponse response, String allowMethodsValue) { if (isConfiguredWithWildcard(corsConfig.methods)) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, allowMethodsValue); } else { List<String> requestedMethods = new ArrayList<>(); for (String requestedMethod : COMMA_SEPARATED_SPLIT_REGEX.split(allowMethodsValue)) { requestedMethods.add(requestedMethod.toLowerCase()); } List<String> validRequestedMethods = new ArrayList<>(); List<HttpMethod> methods = corsConfig.methods.get().stream().map(HttpMethod::valueOf) .collect(Collectors.toList()); for (HttpMethod configMethod : methods) { if (requestedMethods.contains(configMethod.name().toLowerCase())) { validRequestedMethods.add(configMethod.name()); } } if (!validRequestedMethods.isEmpty()) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, String.join(",", validRequestedMethods)); } } } @Override public void handle(RoutingContext event) { Objects.requireNonNull(corsConfig, "CORS config is not set"); HttpServerRequest request = event.request(); HttpServerResponse response = event.response(); String origin = request.getHeader(HttpHeaders.ORIGIN); if (origin == null) { event.next(); } else { final String requestedMethods = request.getHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD); if (requestedMethods != null) { processMethods(response, requestedMethods); } final String requestedHeaders = request.getHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS); if (requestedHeaders != null) { processRequestedHeaders(response, requestedHeaders); } boolean allowsOrigin = isConfiguredWithWildcard(corsConfig.origins) || corsConfig.origins.get().contains(origin) || isOriginAllowedByRegex(allowedOriginsRegex, origin); if (allowsOrigin) { response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, origin); } boolean allowCredentials = corsConfig.accessControlAllowCredentials .orElseGet(() -> corsConfig.origins.isPresent() && corsConfig.origins.get().contains(origin) && !origin.equals("*")); response.headers().set(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, String.valueOf(allowCredentials)); final Optional<List<String>> exposedHeaders = corsConfig.exposedHeaders; if (!isConfiguredWithWildcard(exposedHeaders)) { response.headers().set(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, String.join(",", exposedHeaders.orElse(Collections.emptyList()))); } if (request.method().equals(HttpMethod.OPTIONS)) { if ((requestedHeaders != null || requestedMethods != null) && corsConfig.accessControlMaxAge.isPresent()) { response.putHeader(HttpHeaders.ACCESS_CONTROL_MAX_AGE, String.valueOf(corsConfig.accessControlMaxAge.get().getSeconds())); } response.end(); } else { event.next(); } } } }
catch refresh table exception and then invalidate the cache of cachingIcebergCatalog.
public void alterTable(AlterTableStmt stmt) throws UserException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); org.apache.iceberg.Table table = icebergCatalog.getTable(dbName, tableName); if (table == null) { throw new StarRocksConnectorException( "Failed to load iceberg table: " + stmt.getTbl().toString()); } List<AlterClause> alterClauses = stmt.getOps(); List<AlterClause> tableChanges = Lists.newArrayList(); List<AlterClause> schemaChanges = Lists.newArrayList(); for (AlterClause clause : alterClauses) { if (clause instanceof AddColumnClause || clause instanceof AddColumnsClause || clause instanceof DropColumnClause || clause instanceof ColumnRenameClause || clause instanceof ModifyColumnClause) { schemaChanges.add(clause); } else if (clause instanceof ModifyTablePropertiesClause || clause instanceof TableRenameClause || clause instanceof AlterTableCommentClause ) { tableChanges.add(clause); } else { throw new StarRocksConnectorException( "Unsupported alter operation for iceberg connector: " + clause.toString()); } } commitAlterTable(table, schemaChanges, tableChanges); synchronized(this) { tables.remove(TableIdentifier.of(dbName, tableName)); icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor); asyncRefreshOthersFeMetadataCache(dbName, tableName); } }
icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor);
public void alterTable(AlterTableStmt stmt) throws UserException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); org.apache.iceberg.Table table = icebergCatalog.getTable(dbName, tableName); if (table == null) { throw new StarRocksConnectorException( "Failed to load iceberg table: " + stmt.getTbl().toString()); } List<AlterClause> alterClauses = stmt.getOps(); List<AlterClause> tableChanges = Lists.newArrayList(); List<AlterClause> schemaChanges = Lists.newArrayList(); for (AlterClause clause : alterClauses) { if (clause instanceof AddColumnClause || clause instanceof AddColumnsClause || clause instanceof DropColumnClause || clause instanceof ColumnRenameClause || clause instanceof ModifyColumnClause) { schemaChanges.add(clause); } else if (clause instanceof ModifyTablePropertiesClause || clause instanceof TableRenameClause || clause instanceof AlterTableCommentClause ) { tableChanges.add(clause); } else { throw new StarRocksConnectorException( "Unsupported alter operation for iceberg connector: " + clause.toString()); } } commitAlterTable(table, schemaChanges, tableChanges); synchronized (this) { tables.remove(TableIdentifier.of(dbName, tableName)); try { icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor); } catch (Exception exception) { LOG.error("Failed to refresh caching iceberg table."); icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } asyncRefreshOthersFeMetadataCache(dbName, tableName); } }
class IcebergMetadata implements ConnectorMetadata { private static final Logger LOG = LogManager.getLogger(IcebergMetadata.class); private final String catalogName; private final HdfsEnvironment hdfsEnvironment; private final IcebergCatalog icebergCatalog; private final IcebergStatisticProvider statisticProvider = new IcebergStatisticProvider(); private final Map<TableIdentifier, Table> tables = new ConcurrentHashMap<>(); private final Map<String, Database> databases = new ConcurrentHashMap<>(); private final Map<IcebergFilter, List<FileScanTask>> splitTasks = new ConcurrentHashMap<>(); private final Set<IcebergFilter> scannedTables = new HashSet<>(); private final Map<FileScanTaskSchema, Pair<String, String>> fileScanTaskSchemas = new ConcurrentHashMap<>(); private final ExecutorService jobPlanningExecutor; private final ExecutorService refreshOtherFeExecutor; public IcebergMetadata(String catalogName, HdfsEnvironment hdfsEnvironment, IcebergCatalog icebergCatalog, ExecutorService jobPlanningExecutor, ExecutorService refreshOtherFeExecutor) { this.catalogName = catalogName; this.hdfsEnvironment = hdfsEnvironment; this.icebergCatalog = icebergCatalog; new IcebergMetricsReporter().setThreadLocalReporter(); this.jobPlanningExecutor = jobPlanningExecutor; this.refreshOtherFeExecutor = refreshOtherFeExecutor; } @Override public List<String> listDbNames() { return icebergCatalog.listAllDatabases(); } @Override public void createDb(String dbName, Map<String, String> properties) throws AlreadyExistsException { if (dbExists(dbName)) { throw new AlreadyExistsException("Database Already Exists"); } icebergCatalog.createDb(dbName, properties); } @Override public void dropDb(String dbName, boolean isForceDrop) throws MetaNotFoundException { if (listTableNames(dbName).size() != 0) { throw new StarRocksConnectorException("Database %s not empty", dbName); } icebergCatalog.dropDb(dbName); databases.remove(dbName); } @Override public Database getDb(String dbName) { if (databases.containsKey(dbName)) { return databases.get(dbName); } Database db; try { db = icebergCatalog.getDB(dbName); } catch (NoSuchNamespaceException e) { LOG.error("Database {} not found", dbName, e); return null; } databases.put(dbName, db); return db; } @Override public List<String> listTableNames(String dbName) { return icebergCatalog.listTables(dbName); } @Override public boolean createTable(CreateTableStmt stmt) throws DdlException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); Schema schema = toIcebergApiSchema(stmt.getColumns()); PartitionDesc partitionDesc = stmt.getPartitionDesc(); List<String> partitionColNames = partitionDesc == null ? Lists.newArrayList() : ((ListPartitionDesc) partitionDesc).getPartitionColNames(); PartitionSpec partitionSpec = parsePartitionFields(schema, partitionColNames); Map<String, String> properties = stmt.getProperties() == null ? new HashMap<>() : stmt.getProperties(); String tableLocation = properties.get(LOCATION_PROPERTY); Map<String, String> createTableProperties = IcebergApiConverter.rebuildCreateTableProperties(properties); return icebergCatalog.createTable(dbName, tableName, schema, partitionSpec, tableLocation, createTableProperties); } @Override private void commitAlterTable(org.apache.iceberg.Table table, List<AlterClause> schemaChanges, List<AlterClause> tableChanges) { Transaction transaction = table.newTransaction(); if (!tableChanges.isEmpty()) { throw new StarRocksConnectorException( "Unsupported alter operation for iceberg connector"); } if (!schemaChanges.isEmpty()) { IcebergApiConverter.applySchemaChanges(transaction.updateSchema(), schemaChanges); } transaction.commitTransaction(); } @Override public void dropTable(DropTableStmt stmt) { Table icebergTable = getTable(stmt.getDbName(), stmt.getTableName()); if (icebergTable == null) { return; } icebergCatalog.dropTable(stmt.getDbName(), stmt.getTableName(), stmt.isForceDrop()); tables.remove(TableIdentifier.of(stmt.getDbName(), stmt.getTableName())); StatisticUtils.dropStatisticsAfterDropTable(icebergTable); asyncRefreshOthersFeMetadataCache(stmt.getDbName(), stmt.getTableName()); } @Override public Table getTable(String dbName, String tblName) { TableIdentifier identifier = TableIdentifier.of(dbName, tblName); if (tables.containsKey(identifier)) { return tables.get(identifier); } try { IcebergCatalogType catalogType = icebergCatalog.getIcebergCatalogType(); org.apache.iceberg.Table icebergTable = icebergCatalog.getTable(dbName, tblName); Table table = IcebergApiConverter.toIcebergTable(icebergTable, catalogName, dbName, tblName, catalogType.name()); tables.put(identifier, table); return table; } catch (StarRocksConnectorException | NoSuchTableException e) { LOG.error("Failed to get iceberg table {}", identifier, e); return null; } } @Override public List<String> listPartitionNames(String dbName, String tblName) { IcebergCatalogType nativeType = icebergCatalog.getIcebergCatalogType(); if (nativeType != HIVE_CATALOG && nativeType != REST_CATALOG && nativeType != GLUE_CATALOG) { throw new StarRocksConnectorException( "Do not support get partitions from catalog type: " + nativeType); } return icebergCatalog.listPartitionNames(dbName, tblName, jobPlanningExecutor); } @Override public List<RemoteFileInfo> getRemoteFileInfos(Table table, List<PartitionKey> partitionKeys, long snapshotId, ScalarOperator predicate, List<String> fieldNames, long limit) { return getRemoteFileInfos((IcebergTable) table, snapshotId, predicate, limit); } private List<RemoteFileInfo> getRemoteFileInfos(IcebergTable table, long snapshotId, ScalarOperator predicate, long limit) { RemoteFileInfo remoteFileInfo = new RemoteFileInfo(); String dbName = table.getRemoteDbName(); String tableName = table.getRemoteTableName(); IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate); triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit); List<FileScanTask> icebergScanTasks = splitTasks.get(key); if (icebergScanTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", dbName, tableName, predicate); } List<RemoteFileDesc> remoteFileDescs = Lists.newArrayList(RemoteFileDesc.createIcebergRemoteFileDesc(icebergScanTasks)); remoteFileInfo.setFiles(remoteFileDescs); return Lists.newArrayList(remoteFileInfo); } private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate, long limit) { if (!scannedTables.contains(key)) { try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.processSplit." + key)) { collectTableStatisticsAndCacheIcebergSplit(table, predicate, limit); } } } public List<PartitionKey> getPrunedPartitions(Table table, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (!snapshot.isPresent()) { return new ArrayList<>(); } IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), predicate); triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit); List<PartitionKey> partitionKeys = new ArrayList<>(); List<FileScanTask> icebergSplitTasks = splitTasks.get(key); if (icebergSplitTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", dbName, tableName, predicate); } Set<List<String>> scannedPartitions = new HashSet<>(); PartitionSpec spec = icebergTable.getNativeTable().spec(); List<Column> partitionColumns = icebergTable.getPartitionColumnsIncludeTransformed(); for (FileScanTask fileScanTask : icebergSplitTasks) { org.apache.iceberg.PartitionData partitionData = (org.apache.iceberg.PartitionData) fileScanTask.file().partition(); List<String> values = PartitionUtil.getIcebergPartitionValues(spec, partitionData); if (values.size() != partitionColumns.size()) { continue; } if (scannedPartitions.contains(values)) { continue; } else { scannedPartitions.add(values); } try { List<com.starrocks.catalog.Type> srTypes = new ArrayList<>(); for (PartitionField partitionField : spec.fields()) { if (partitionField.transform().isVoid()) { continue; } if (!partitionField.transform().isIdentity()) { Type sourceType = spec.schema().findType(partitionField.sourceId()); Type resultType = partitionField.transform().getResultType(sourceType); if (resultType == Types.DateType.get()) { resultType = Types.IntegerType.get(); } srTypes.add(fromIcebergType(resultType)); continue; } srTypes.add(icebergTable.getColumn(partitionField.name()).getType()); } if (icebergTable.hasPartitionTransformedEvolution()) { srTypes = partitionColumns.stream() .map(Column::getType) .collect(Collectors.toList()); } partitionKeys.add(createPartitionKeyWithType(values, srTypes, table.getType())); } catch (Exception e) { LOG.error("create partition key failed.", e); throw new StarRocksConnectorException(e.getMessage()); } } return partitionKeys; } private void collectTableStatisticsAndCacheIcebergSplit(Table table, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (!snapshot.isPresent()) { return; } long snapshotId = snapshot.get().snapshotId(); String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate); org.apache.iceberg.Table nativeTbl = icebergTable.getNativeTable(); Types.StructType schema = nativeTbl.schema().asStruct(); List<ScalarOperator> scalarOperators = Utils.extractConjuncts(predicate); ScalarOperatorToIcebergExpr.IcebergContext icebergContext = new ScalarOperatorToIcebergExpr.IcebergContext(schema); Expression icebergPredicate = new ScalarOperatorToIcebergExpr().convert(scalarOperators, icebergContext); TableScan scan = nativeTbl.newScan().useSnapshot(snapshotId); if (enableCollectColumnStatistics()) { scan = scan.includeColumnStats(); } scan = scan.planWith(jobPlanningExecutor); if (icebergPredicate.op() != Expression.Operation.TRUE) { scan = scan.filter(icebergPredicate); } CloseableIterable<FileScanTask> fileScanTaskIterable = TableScanUtil.splitFiles( scan.planFiles(), scan.targetSplitSize()); CloseableIterator<FileScanTask> fileScanTaskIterator = fileScanTaskIterable.iterator(); Iterator<FileScanTask> fileScanTasks; boolean canPruneManifests = limit != -1 && !icebergTable.isV2Format() && onlyHasPartitionPredicate(table, predicate) && limit < Integer.MAX_VALUE && nativeTbl.spec().specId() == 0 && enablePruneManifest(); if (canPruneManifests) { fileScanTasks = Iterators.limit(fileScanTaskIterator, (int) limit); } else { fileScanTasks = fileScanTaskIterator; } List<FileScanTask> icebergScanTasks = Lists.newArrayList(); long totalReadCount = 0; Set<String> filePaths = new HashSet<>(); while (fileScanTasks.hasNext()) { FileScanTask scanTask = fileScanTasks.next(); FileScanTask icebergSplitScanTask = scanTask; if (enableCollectColumnStatistics()) { try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.buildSplitScanTask")) { icebergSplitScanTask = buildIcebergSplitScanTask(scanTask, icebergPredicate, key); } List<Types.NestedField> fullColumns = nativeTbl.schema().columns(); Map<Integer, Type.PrimitiveType> idToTypeMapping = fullColumns.stream() .filter(column -> column.type().isPrimitiveType()) .collect(Collectors.toMap(Types.NestedField::fieldId, column -> column.type().asPrimitiveType())); Set<Integer> identityPartitionIds = nativeTbl.spec().fields().stream() .filter(x -> x.transform().isIdentity()) .map(PartitionField::sourceId) .collect(Collectors.toSet()); List<Types.NestedField> nonPartitionPrimitiveColumns = fullColumns.stream() .filter(column -> !identityPartitionIds.contains(column.fieldId()) && column.type().isPrimitiveType()) .collect(toImmutableList()); try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.updateIcebergFileStats")) { statisticProvider.updateIcebergFileStats( icebergTable, scanTask, idToTypeMapping, nonPartitionPrimitiveColumns, key); } } icebergScanTasks.add(icebergSplitScanTask); String filePath = icebergSplitScanTask.file().path().toString(); if (!filePaths.contains(filePath)) { filePaths.add(filePath); totalReadCount += scanTask.file().recordCount(); } if (canPruneManifests && totalReadCount >= limit) { break; } } try { fileScanTaskIterable.close(); fileScanTaskIterator.close(); } catch (IOException e) { } IcebergMetricsReporter.lastReport().ifPresent(scanReportWithCounter -> Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.ScanMetrics." + scanReportWithCounter.getScanReport().tableName() + " / No_" + scanReportWithCounter.getCount(), scanReportWithCounter.getScanReport().scanMetrics().toString())); splitTasks.put(key, icebergScanTasks); scannedTables.add(key); } @Override public Statistics getTableStatistics(OptimizerContext session, Table table, Map<ColumnRefOperator, Column> columns, List<PartitionKey> partitionKeys, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; Optional<Snapshot> snapshot = icebergTable.getSnapshot(); long snapshotId; if (snapshot.isPresent()) { snapshotId = snapshot.get().snapshotId(); } else { Statistics.Builder statisticsBuilder = Statistics.builder(); statisticsBuilder.setOutputRowCount(1); statisticsBuilder.addColumnStatistics(statisticProvider.buildUnknownColumnStatistics(columns.keySet())); return statisticsBuilder.build(); } IcebergFilter key = IcebergFilter.of( icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), snapshotId, predicate); triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit); if (!session.getSessionVariable().enableIcebergColumnStatistics()) { List<FileScanTask> icebergScanTasks = splitTasks.get(key); if (icebergScanTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), predicate); } try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.calculateCardinality" + key)) { return statisticProvider.getCardinalityStats(columns, icebergScanTasks); } } else { return statisticProvider.getTableStatistics(icebergTable, columns, session, predicate); } } private IcebergSplitScanTask buildIcebergSplitScanTask( FileScanTask fileScanTask, Expression icebergPredicate, IcebergFilter filter) { long offset = fileScanTask.start(); long length = fileScanTask.length(); DataFile dataFileWithoutStats = fileScanTask.file().copyWithoutStats(); DeleteFile[] deleteFiles = fileScanTask.deletes().stream() .map(DeleteFile::copyWithoutStats) .toArray(DeleteFile[]::new); PartitionSpec taskSpec = fileScanTask.spec(); Schema taskSchema = fileScanTask.spec().schema(); String schemaString; String partitionString; FileScanTaskSchema schemaKey = new FileScanTaskSchema(filter.getDatabaseName(), filter.getTableName(), taskSchema.schemaId(), taskSpec.specId()); Pair<String, String> schema = fileScanTaskSchemas.get(schemaKey); if (schema == null) { schemaString = SchemaParser.toJson(fileScanTask.spec().schema()); partitionString = PartitionSpecParser.toJson(fileScanTask.spec()); fileScanTaskSchemas.put(schemaKey, Pair.create(schemaString, partitionString)); } else { schemaString = schema.first; partitionString = schema.second; } ResidualEvaluator residualEvaluator = ResidualEvaluator.of(taskSpec, icebergPredicate, true); BaseFileScanTask baseFileScanTask = new BaseFileScanTask( dataFileWithoutStats, deleteFiles, schemaString, partitionString, residualEvaluator); return new IcebergSplitScanTask(offset, length, baseFileScanTask); } @Override public void refreshTable(String srDbName, Table table, List<String> partitionNames, boolean onlyCachedPartitions) { if (isResourceMappingCatalog(catalogName)) { refreshTableWithResource(table); } else { IcebergTable icebergTable = (IcebergTable) table; String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); try { icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor); } catch (Exception e) { LOG.error("Failed to refresh table {}.{}.{}. invalidate cache", catalogName, dbName, tableName, e); icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } } } private void refreshTableWithResource(Table table) { IcebergTable icebergTable = (IcebergTable) table; org.apache.iceberg.Table nativeTable = icebergTable.getNativeTable(); try { if (nativeTable instanceof BaseTable) { BaseTable baseTable = (BaseTable) nativeTable; if (baseTable.operations().refresh() == null) { throw new NoSuchTableException("No such table: %s", nativeTable.name()); } } else { throw new StarRocksConnectorException("Invalid table type of %s, it should be a BaseTable!", nativeTable.name()); } } catch (NoSuchTableException e) { throw new StarRocksConnectorException("No such table %s", nativeTable.name()); } catch (IllegalStateException ei) { throw new StarRocksConnectorException("Refresh table %s with failure, the table under hood" + " may have been dropped. You should re-create the external table. cause %s", nativeTable.name(), ei.getMessage()); } icebergTable.resetSnapshot(); } @Override public void finishSink(String dbName, String tableName, List<TSinkCommitInfo> commitInfos) { boolean isOverwrite = false; if (!commitInfos.isEmpty()) { TSinkCommitInfo sinkCommitInfo = commitInfos.get(0); if (sinkCommitInfo.isSetIs_overwrite()) { isOverwrite = sinkCommitInfo.is_overwrite; } } List<TIcebergDataFile> dataFiles = commitInfos.stream() .map(TSinkCommitInfo::getIceberg_data_file).collect(Collectors.toList()); IcebergTable table = (IcebergTable) getTable(dbName, tableName); org.apache.iceberg.Table nativeTbl = table.getNativeTable(); Transaction transaction = nativeTbl.newTransaction(); BatchWrite batchWrite = getBatchWrite(transaction, isOverwrite); PartitionSpec partitionSpec = nativeTbl.spec(); for (TIcebergDataFile dataFile : dataFiles) { Metrics metrics = IcebergApiConverter.buildDataFileMetrics(dataFile); DataFiles.Builder builder = DataFiles.builder(partitionSpec) .withMetrics(metrics) .withPath(dataFile.path) .withFormat(dataFile.format) .withRecordCount(dataFile.record_count) .withFileSizeInBytes(dataFile.file_size_in_bytes) .withSplitOffsets(dataFile.split_offsets); if (partitionSpec.isPartitioned()) { String relativePartitionLocation = getIcebergRelativePartitionPath( nativeTbl.location(), dataFile.partition_path); PartitionData partitionData = partitionDataFromPath( relativePartitionLocation, partitionSpec); builder.withPartition(partitionData); } batchWrite.addFile(builder.build()); } try { batchWrite.commit(); transaction.commitTransaction(); asyncRefreshOthersFeMetadataCache(dbName, tableName); } catch (Exception e) { List<String> toDeleteFiles = dataFiles.stream() .map(TIcebergDataFile::getPath) .collect(Collectors.toList()); icebergCatalog.deleteUncommittedDataFiles(toDeleteFiles); LOG.error("Failed to commit iceberg transaction on {}.{}", dbName, tableName, e); throw new StarRocksConnectorException(e.getMessage()); } finally { icebergCatalog.invalidateCacheWithoutTable(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } } private void asyncRefreshOthersFeMetadataCache(String dbName, String tableName) { refreshOtherFeExecutor.execute(() -> { LOG.info("Start to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName); try { GlobalStateMgr.getCurrentState().refreshOthersFeTable( new TableName(catalogName, dbName, tableName), new ArrayList<>(), false); } catch (DdlException e) { LOG.error("Failed to refresh others fe iceberg metadata cache {}.{}.{}", catalogName, dbName, tableName, e); throw new StarRocksConnectorException(e.getMessage()); } LOG.info("Finish to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName); }); } public BatchWrite getBatchWrite(Transaction transaction, boolean isOverwrite) { return isOverwrite ? new DynamicOverwrite(transaction) : new Append(transaction); } public static PartitionData partitionDataFromPath(String relativePartitionPath, PartitionSpec spec) { PartitionData data = new PartitionData(spec.fields().size()); String[] partitions = relativePartitionPath.split("/", -1); List<PartitionField> partitionFields = spec.fields(); for (int i = 0; i < partitions.length; i++) { PartitionField field = partitionFields.get(i); String[] parts = partitions[i].split("=", 2); Preconditions.checkArgument(parts.length == 2 && parts[0] != null && field.name().equals(parts[0]), "Invalid partition: %s", partitions[i]); org.apache.iceberg.types.Type sourceType = spec.partitionType().fields().get(i).type(); data.set(i, Conversions.fromPartitionString(sourceType, parts[1])); } return data; } public static String getIcebergRelativePartitionPath(String tableLocation, String partitionLocation) { tableLocation = tableLocation.endsWith("/") ? tableLocation.substring(0, tableLocation.length() - 1) : tableLocation; String tableLocationWithData = tableLocation + "/data/"; String path = PartitionUtil.getSuffixName(tableLocationWithData, partitionLocation); if (path.startsWith("/")) { path = path.substring(1); } if (path.endsWith("/")) { path = path.substring(0, path.length() - 1); } return path; } public static boolean onlyHasPartitionPredicate(Table table, ScalarOperator predicate) { if (predicate == null) { return true; } List<ColumnRefOperator> columnRefOperators = predicate.getColumnRefs(); List<String> partitionColNames = table.getPartitionColumnNames(); for (ColumnRefOperator c : columnRefOperators) { if (!partitionColNames.contains(c.getName())) { return false; } } return true; } private boolean enablePruneManifest() { if (ConnectContext.get() == null) { return false; } if (ConnectContext.get().getSessionVariable() == null) { return false; } return ConnectContext.get().getSessionVariable().isEnablePruneIcebergManifest(); } private boolean enableCollectColumnStatistics() { if (ConnectContext.get() == null) { return false; } if (ConnectContext.get().getSessionVariable() == null) { return false; } return ConnectContext.get().getSessionVariable().enableIcebergColumnStatistics(); } @Override public void clear() { splitTasks.clear(); tables.clear(); databases.clear(); scannedTables.clear(); IcebergMetricsReporter.remove(); } interface BatchWrite { void addFile(DataFile file); void commit(); } static class Append implements BatchWrite { private final AppendFiles append; public Append(Transaction txn) { append = txn.newAppend(); } @Override public void addFile(DataFile file) { append.appendFile(file); } @Override public void commit() { append.commit(); } } static class DynamicOverwrite implements BatchWrite { private final ReplacePartitions replace; public DynamicOverwrite(Transaction txn) { replace = txn.newReplacePartitions(); } @Override public void addFile(DataFile file) { replace.addFile(file); } @Override public void commit() { replace.commit(); } } public static class PartitionData implements StructLike { private final Object[] values; private PartitionData(int size) { this.values = new Object[size]; } @Override public int size() { return values.length; } @Override public <T> T get(int pos, Class<T> javaClass) { return javaClass.cast(values[pos]); } @Override public <T> void set(int pos, T value) { if (value instanceof ByteBuffer) { ByteBuffer buffer = (ByteBuffer) value; byte[] bytes = new byte[buffer.remaining()]; buffer.duplicate().get(bytes); values[pos] = bytes; } else { values[pos] = value; } } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } PartitionData that = (PartitionData) other; return Arrays.equals(values, that.values); } @Override public int hashCode() { return Arrays.hashCode(values); } } @Override public CloudConfiguration getCloudConfiguration() { return hdfsEnvironment.getCloudConfiguration(); } private static class FileScanTaskSchema { private final String dbName; private final String tableName; private final int schemaId; private final int specId; public FileScanTaskSchema(String dbName, String tableName, int schemaId, int specId) { this.dbName = dbName; this.tableName = tableName; this.schemaId = schemaId; this.specId = specId; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } FileScanTaskSchema that = (FileScanTaskSchema) o; return schemaId == that.schemaId && specId == that.specId && Objects.equals(dbName, that.dbName) && Objects.equals(tableName, that.tableName); } @Override public int hashCode() { return Objects.hash(dbName, tableName, schemaId, specId); } } }
class IcebergMetadata implements ConnectorMetadata { private static final Logger LOG = LogManager.getLogger(IcebergMetadata.class); private final String catalogName; private final HdfsEnvironment hdfsEnvironment; private final IcebergCatalog icebergCatalog; private final IcebergStatisticProvider statisticProvider = new IcebergStatisticProvider(); private final Map<TableIdentifier, Table> tables = new ConcurrentHashMap<>(); private final Map<String, Database> databases = new ConcurrentHashMap<>(); private final Map<IcebergFilter, List<FileScanTask>> splitTasks = new ConcurrentHashMap<>(); private final Set<IcebergFilter> scannedTables = new HashSet<>(); private final Map<FileScanTaskSchema, Pair<String, String>> fileScanTaskSchemas = new ConcurrentHashMap<>(); private final ExecutorService jobPlanningExecutor; private final ExecutorService refreshOtherFeExecutor; public IcebergMetadata(String catalogName, HdfsEnvironment hdfsEnvironment, IcebergCatalog icebergCatalog, ExecutorService jobPlanningExecutor, ExecutorService refreshOtherFeExecutor) { this.catalogName = catalogName; this.hdfsEnvironment = hdfsEnvironment; this.icebergCatalog = icebergCatalog; new IcebergMetricsReporter().setThreadLocalReporter(); this.jobPlanningExecutor = jobPlanningExecutor; this.refreshOtherFeExecutor = refreshOtherFeExecutor; } @Override public List<String> listDbNames() { return icebergCatalog.listAllDatabases(); } @Override public void createDb(String dbName, Map<String, String> properties) throws AlreadyExistsException { if (dbExists(dbName)) { throw new AlreadyExistsException("Database Already Exists"); } icebergCatalog.createDb(dbName, properties); } @Override public void dropDb(String dbName, boolean isForceDrop) throws MetaNotFoundException { if (listTableNames(dbName).size() != 0) { throw new StarRocksConnectorException("Database %s not empty", dbName); } icebergCatalog.dropDb(dbName); databases.remove(dbName); } @Override public Database getDb(String dbName) { if (databases.containsKey(dbName)) { return databases.get(dbName); } Database db; try { db = icebergCatalog.getDB(dbName); } catch (NoSuchNamespaceException e) { LOG.error("Database {} not found", dbName, e); return null; } databases.put(dbName, db); return db; } @Override public List<String> listTableNames(String dbName) { return icebergCatalog.listTables(dbName); } @Override public boolean createTable(CreateTableStmt stmt) throws DdlException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); Schema schema = toIcebergApiSchema(stmt.getColumns()); PartitionDesc partitionDesc = stmt.getPartitionDesc(); List<String> partitionColNames = partitionDesc == null ? Lists.newArrayList() : ((ListPartitionDesc) partitionDesc).getPartitionColNames(); PartitionSpec partitionSpec = parsePartitionFields(schema, partitionColNames); Map<String, String> properties = stmt.getProperties() == null ? new HashMap<>() : stmt.getProperties(); String tableLocation = properties.get(LOCATION_PROPERTY); Map<String, String> createTableProperties = IcebergApiConverter.rebuildCreateTableProperties(properties); return icebergCatalog.createTable(dbName, tableName, schema, partitionSpec, tableLocation, createTableProperties); } @Override private void commitAlterTable(org.apache.iceberg.Table table, List<AlterClause> schemaChanges, List<AlterClause> tableChanges) { Transaction transaction = table.newTransaction(); if (!tableChanges.isEmpty()) { throw new StarRocksConnectorException( "Unsupported alter operation for iceberg connector"); } if (!schemaChanges.isEmpty()) { IcebergApiConverter.applySchemaChanges(transaction.updateSchema(), schemaChanges); } transaction.commitTransaction(); } @Override public void dropTable(DropTableStmt stmt) { Table icebergTable = getTable(stmt.getDbName(), stmt.getTableName()); if (icebergTable == null) { return; } icebergCatalog.dropTable(stmt.getDbName(), stmt.getTableName(), stmt.isForceDrop()); tables.remove(TableIdentifier.of(stmt.getDbName(), stmt.getTableName())); StatisticUtils.dropStatisticsAfterDropTable(icebergTable); asyncRefreshOthersFeMetadataCache(stmt.getDbName(), stmt.getTableName()); } @Override public Table getTable(String dbName, String tblName) { TableIdentifier identifier = TableIdentifier.of(dbName, tblName); if (tables.containsKey(identifier)) { return tables.get(identifier); } try { IcebergCatalogType catalogType = icebergCatalog.getIcebergCatalogType(); org.apache.iceberg.Table icebergTable = icebergCatalog.getTable(dbName, tblName); Table table = IcebergApiConverter.toIcebergTable(icebergTable, catalogName, dbName, tblName, catalogType.name()); tables.put(identifier, table); return table; } catch (StarRocksConnectorException | NoSuchTableException e) { LOG.error("Failed to get iceberg table {}", identifier, e); return null; } } @Override public boolean tableExists(String dbName, String tblName) { return icebergCatalog.tableExists(dbName, tblName); } @Override public List<String> listPartitionNames(String dbName, String tblName) { IcebergCatalogType nativeType = icebergCatalog.getIcebergCatalogType(); if (nativeType != HIVE_CATALOG && nativeType != REST_CATALOG && nativeType != GLUE_CATALOG) { throw new StarRocksConnectorException( "Do not support get partitions from catalog type: " + nativeType); } return icebergCatalog.listPartitionNames(dbName, tblName, jobPlanningExecutor); } @Override public List<RemoteFileInfo> getRemoteFileInfos(Table table, List<PartitionKey> partitionKeys, long snapshotId, ScalarOperator predicate, List<String> fieldNames, long limit) { return getRemoteFileInfos((IcebergTable) table, snapshotId, predicate, limit); } private List<RemoteFileInfo> getRemoteFileInfos(IcebergTable table, long snapshotId, ScalarOperator predicate, long limit) { RemoteFileInfo remoteFileInfo = new RemoteFileInfo(); String dbName = table.getRemoteDbName(); String tableName = table.getRemoteTableName(); IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate); triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit); List<FileScanTask> icebergScanTasks = splitTasks.get(key); if (icebergScanTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", dbName, tableName, predicate); } List<RemoteFileDesc> remoteFileDescs = Lists.newArrayList(RemoteFileDesc.createIcebergRemoteFileDesc(icebergScanTasks)); remoteFileInfo.setFiles(remoteFileDescs); return Lists.newArrayList(remoteFileInfo); } @Override public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) { Map<String, Partition> partitionMap = Maps.newHashMap(); IcebergTable icebergTable = (IcebergTable) table; PartitionsTable partitionsTable = (PartitionsTable) MetadataTableUtils. createMetadataTableInstance(icebergTable.getNativeTable(), MetadataTableType.PARTITIONS); if (icebergTable.isUnPartitioned()) { try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) { for (FileScanTask task : tasks) { CloseableIterable<StructLike> rows = task.asDataTask().rows(); for (StructLike row : rows) { long lastUpdated = row.get(7, Long.class); Partition partition = new Partition(lastUpdated); return ImmutableList.of(partition); } } } catch (IOException e) { throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e); } } else { try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) { for (FileScanTask task : tasks) { CloseableIterable<StructLike> rows = task.asDataTask().rows(); for (StructLike row : rows) { StructProjection partitionData = row.get(0, StructProjection.class); int specId = row.get(1, Integer.class); long lastUpdated = row.get(9, Long.class); PartitionSpec spec = icebergTable.getNativeTable().specs().get(specId); Partition partition = new Partition(lastUpdated); String partitionName = PartitionUtil.convertIcebergPartitionToPartitionName(spec, partitionData); partitionMap.put(partitionName, partition); } } } catch (IOException e) { throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e); } } ImmutableList.Builder<PartitionInfo> partitions = ImmutableList.builder(); partitionNames.forEach(partitionName -> partitions.add(partitionMap.get(partitionName))); return partitions.build(); } private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate, long limit) { if (!scannedTables.contains(key)) { try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.processSplit." + key)) { collectTableStatisticsAndCacheIcebergSplit(table, predicate, limit); } } } public List<PartitionKey> getPrunedPartitions(Table table, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (!snapshot.isPresent()) { return new ArrayList<>(); } IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), predicate); triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit); List<PartitionKey> partitionKeys = new ArrayList<>(); List<FileScanTask> icebergSplitTasks = splitTasks.get(key); if (icebergSplitTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", dbName, tableName, predicate); } Set<List<String>> scannedPartitions = new HashSet<>(); PartitionSpec spec = icebergTable.getNativeTable().spec(); List<Column> partitionColumns = icebergTable.getPartitionColumnsIncludeTransformed(); for (FileScanTask fileScanTask : icebergSplitTasks) { org.apache.iceberg.PartitionData partitionData = (org.apache.iceberg.PartitionData) fileScanTask.file().partition(); List<String> values = PartitionUtil.getIcebergPartitionValues(spec, partitionData); if (values.size() != partitionColumns.size()) { continue; } if (scannedPartitions.contains(values)) { continue; } else { scannedPartitions.add(values); } try { List<com.starrocks.catalog.Type> srTypes = new ArrayList<>(); for (PartitionField partitionField : spec.fields()) { if (partitionField.transform().isVoid()) { continue; } if (!partitionField.transform().isIdentity()) { Type sourceType = spec.schema().findType(partitionField.sourceId()); Type resultType = partitionField.transform().getResultType(sourceType); if (resultType == Types.DateType.get()) { resultType = Types.IntegerType.get(); } srTypes.add(fromIcebergType(resultType)); continue; } srTypes.add(icebergTable.getColumn(partitionField.name()).getType()); } if (icebergTable.hasPartitionTransformedEvolution()) { srTypes = partitionColumns.stream() .map(Column::getType) .collect(Collectors.toList()); } partitionKeys.add(createPartitionKeyWithType(values, srTypes, table.getType())); } catch (Exception e) { LOG.error("create partition key failed.", e); throw new StarRocksConnectorException(e.getMessage()); } } return partitionKeys; } private void collectTableStatisticsAndCacheIcebergSplit(Table table, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (!snapshot.isPresent()) { return; } long snapshotId = snapshot.get().snapshotId(); String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate); org.apache.iceberg.Table nativeTbl = icebergTable.getNativeTable(); Types.StructType schema = nativeTbl.schema().asStruct(); List<ScalarOperator> scalarOperators = Utils.extractConjuncts(predicate); ScalarOperatorToIcebergExpr.IcebergContext icebergContext = new ScalarOperatorToIcebergExpr.IcebergContext(schema); Expression icebergPredicate = new ScalarOperatorToIcebergExpr().convert(scalarOperators, icebergContext); TableScan scan = nativeTbl.newScan().useSnapshot(snapshotId); if (enableCollectColumnStatistics()) { scan = scan.includeColumnStats(); } scan = scan.planWith(jobPlanningExecutor); if (icebergPredicate.op() != Expression.Operation.TRUE) { scan = scan.filter(icebergPredicate); } CloseableIterable<FileScanTask> fileScanTaskIterable = TableScanUtil.splitFiles( scan.planFiles(), scan.targetSplitSize()); CloseableIterator<FileScanTask> fileScanTaskIterator = fileScanTaskIterable.iterator(); Iterator<FileScanTask> fileScanTasks; boolean canPruneManifests = limit != -1 && !icebergTable.isV2Format() && onlyHasPartitionPredicate(table, predicate) && limit < Integer.MAX_VALUE && nativeTbl.spec().specId() == 0 && enablePruneManifest(); if (canPruneManifests) { fileScanTasks = Iterators.limit(fileScanTaskIterator, (int) limit); } else { fileScanTasks = fileScanTaskIterator; } List<FileScanTask> icebergScanTasks = Lists.newArrayList(); long totalReadCount = 0; Set<String> filePaths = new HashSet<>(); while (fileScanTasks.hasNext()) { FileScanTask scanTask = fileScanTasks.next(); FileScanTask icebergSplitScanTask = scanTask; if (enableCollectColumnStatistics()) { try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.buildSplitScanTask")) { icebergSplitScanTask = buildIcebergSplitScanTask(scanTask, icebergPredicate, key); } List<Types.NestedField> fullColumns = nativeTbl.schema().columns(); Map<Integer, Type.PrimitiveType> idToTypeMapping = fullColumns.stream() .filter(column -> column.type().isPrimitiveType()) .collect(Collectors.toMap(Types.NestedField::fieldId, column -> column.type().asPrimitiveType())); Set<Integer> identityPartitionIds = nativeTbl.spec().fields().stream() .filter(x -> x.transform().isIdentity()) .map(PartitionField::sourceId) .collect(Collectors.toSet()); List<Types.NestedField> nonPartitionPrimitiveColumns = fullColumns.stream() .filter(column -> !identityPartitionIds.contains(column.fieldId()) && column.type().isPrimitiveType()) .collect(toImmutableList()); try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.updateIcebergFileStats")) { statisticProvider.updateIcebergFileStats( icebergTable, scanTask, idToTypeMapping, nonPartitionPrimitiveColumns, key); } } icebergScanTasks.add(icebergSplitScanTask); String filePath = icebergSplitScanTask.file().path().toString(); if (!filePaths.contains(filePath)) { filePaths.add(filePath); totalReadCount += scanTask.file().recordCount(); } if (canPruneManifests && totalReadCount >= limit) { break; } } try { fileScanTaskIterable.close(); fileScanTaskIterator.close(); } catch (IOException e) { } IcebergMetricsReporter.lastReport().ifPresent(scanReportWithCounter -> Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.ScanMetrics." + scanReportWithCounter.getScanReport().tableName() + " / No_" + scanReportWithCounter.getCount(), scanReportWithCounter.getScanReport().scanMetrics().toString())); splitTasks.put(key, icebergScanTasks); scannedTables.add(key); } @Override public Statistics getTableStatistics(OptimizerContext session, Table table, Map<ColumnRefOperator, Column> columns, List<PartitionKey> partitionKeys, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; Optional<Snapshot> snapshot = icebergTable.getSnapshot(); long snapshotId; if (snapshot.isPresent()) { snapshotId = snapshot.get().snapshotId(); } else { Statistics.Builder statisticsBuilder = Statistics.builder(); statisticsBuilder.setOutputRowCount(1); statisticsBuilder.addColumnStatistics(statisticProvider.buildUnknownColumnStatistics(columns.keySet())); return statisticsBuilder.build(); } IcebergFilter key = IcebergFilter.of( icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), snapshotId, predicate); triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit); if (!session.getSessionVariable().enableIcebergColumnStatistics()) { List<FileScanTask> icebergScanTasks = splitTasks.get(key); if (icebergScanTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), predicate); } try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.calculateCardinality" + key)) { return statisticProvider.getCardinalityStats(columns, icebergScanTasks); } } else { return statisticProvider.getTableStatistics(icebergTable, columns, session, predicate); } } private IcebergSplitScanTask buildIcebergSplitScanTask( FileScanTask fileScanTask, Expression icebergPredicate, IcebergFilter filter) { long offset = fileScanTask.start(); long length = fileScanTask.length(); DataFile dataFileWithoutStats = fileScanTask.file().copyWithoutStats(); DeleteFile[] deleteFiles = fileScanTask.deletes().stream() .map(DeleteFile::copyWithoutStats) .toArray(DeleteFile[]::new); PartitionSpec taskSpec = fileScanTask.spec(); Schema taskSchema = fileScanTask.spec().schema(); String schemaString; String partitionString; FileScanTaskSchema schemaKey = new FileScanTaskSchema(filter.getDatabaseName(), filter.getTableName(), taskSchema.schemaId(), taskSpec.specId()); Pair<String, String> schema = fileScanTaskSchemas.get(schemaKey); if (schema == null) { schemaString = SchemaParser.toJson(fileScanTask.spec().schema()); partitionString = PartitionSpecParser.toJson(fileScanTask.spec()); fileScanTaskSchemas.put(schemaKey, Pair.create(schemaString, partitionString)); } else { schemaString = schema.first; partitionString = schema.second; } ResidualEvaluator residualEvaluator = ResidualEvaluator.of(taskSpec, icebergPredicate, true); BaseFileScanTask baseFileScanTask = new BaseFileScanTask( dataFileWithoutStats, deleteFiles, schemaString, partitionString, residualEvaluator); return new IcebergSplitScanTask(offset, length, baseFileScanTask); } @Override public void refreshTable(String srDbName, Table table, List<String> partitionNames, boolean onlyCachedPartitions) { if (isResourceMappingCatalog(catalogName)) { refreshTableWithResource(table); } else { IcebergTable icebergTable = (IcebergTable) table; String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); try { icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor); } catch (Exception e) { LOG.error("Failed to refresh table {}.{}.{}. invalidate cache", catalogName, dbName, tableName, e); icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } } } private void refreshTableWithResource(Table table) { IcebergTable icebergTable = (IcebergTable) table; org.apache.iceberg.Table nativeTable = icebergTable.getNativeTable(); try { if (nativeTable instanceof BaseTable) { BaseTable baseTable = (BaseTable) nativeTable; if (baseTable.operations().refresh() == null) { throw new NoSuchTableException("No such table: %s", nativeTable.name()); } } else { throw new StarRocksConnectorException("Invalid table type of %s, it should be a BaseTable!", nativeTable.name()); } } catch (NoSuchTableException e) { throw new StarRocksConnectorException("No such table %s", nativeTable.name()); } catch (IllegalStateException ei) { throw new StarRocksConnectorException("Refresh table %s with failure, the table under hood" + " may have been dropped. You should re-create the external table. cause %s", nativeTable.name(), ei.getMessage()); } icebergTable.resetSnapshot(); } @Override public void finishSink(String dbName, String tableName, List<TSinkCommitInfo> commitInfos) { boolean isOverwrite = false; if (!commitInfos.isEmpty()) { TSinkCommitInfo sinkCommitInfo = commitInfos.get(0); if (sinkCommitInfo.isSetIs_overwrite()) { isOverwrite = sinkCommitInfo.is_overwrite; } } List<TIcebergDataFile> dataFiles = commitInfos.stream() .map(TSinkCommitInfo::getIceberg_data_file).collect(Collectors.toList()); IcebergTable table = (IcebergTable) getTable(dbName, tableName); org.apache.iceberg.Table nativeTbl = table.getNativeTable(); Transaction transaction = nativeTbl.newTransaction(); BatchWrite batchWrite = getBatchWrite(transaction, isOverwrite); PartitionSpec partitionSpec = nativeTbl.spec(); for (TIcebergDataFile dataFile : dataFiles) { Metrics metrics = IcebergApiConverter.buildDataFileMetrics(dataFile); DataFiles.Builder builder = DataFiles.builder(partitionSpec) .withMetrics(metrics) .withPath(dataFile.path) .withFormat(dataFile.format) .withRecordCount(dataFile.record_count) .withFileSizeInBytes(dataFile.file_size_in_bytes) .withSplitOffsets(dataFile.split_offsets); if (partitionSpec.isPartitioned()) { String relativePartitionLocation = getIcebergRelativePartitionPath( nativeTbl.location(), dataFile.partition_path); PartitionData partitionData = partitionDataFromPath( relativePartitionLocation, partitionSpec); builder.withPartition(partitionData); } batchWrite.addFile(builder.build()); } try { batchWrite.commit(); transaction.commitTransaction(); asyncRefreshOthersFeMetadataCache(dbName, tableName); } catch (Exception e) { List<String> toDeleteFiles = dataFiles.stream() .map(TIcebergDataFile::getPath) .collect(Collectors.toList()); icebergCatalog.deleteUncommittedDataFiles(toDeleteFiles); LOG.error("Failed to commit iceberg transaction on {}.{}", dbName, tableName, e); throw new StarRocksConnectorException(e.getMessage()); } finally { icebergCatalog.invalidateCacheWithoutTable(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } } private void asyncRefreshOthersFeMetadataCache(String dbName, String tableName) { refreshOtherFeExecutor.execute(() -> { LOG.info("Start to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName); try { GlobalStateMgr.getCurrentState().refreshOthersFeTable( new TableName(catalogName, dbName, tableName), new ArrayList<>(), false); } catch (DdlException e) { LOG.error("Failed to refresh others fe iceberg metadata cache {}.{}.{}", catalogName, dbName, tableName, e); throw new StarRocksConnectorException(e.getMessage()); } LOG.info("Finish to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName); }); } public BatchWrite getBatchWrite(Transaction transaction, boolean isOverwrite) { return isOverwrite ? new DynamicOverwrite(transaction) : new Append(transaction); } public static PartitionData partitionDataFromPath(String relativePartitionPath, PartitionSpec spec) { PartitionData data = new PartitionData(spec.fields().size()); String[] partitions = relativePartitionPath.split("/", -1); List<PartitionField> partitionFields = spec.fields(); for (int i = 0; i < partitions.length; i++) { PartitionField field = partitionFields.get(i); String[] parts = partitions[i].split("=", 2); Preconditions.checkArgument(parts.length == 2 && parts[0] != null && field.name().equals(parts[0]), "Invalid partition: %s", partitions[i]); org.apache.iceberg.types.Type sourceType = spec.partitionType().fields().get(i).type(); data.set(i, Conversions.fromPartitionString(sourceType, parts[1])); } return data; } public static String getIcebergRelativePartitionPath(String tableLocation, String partitionLocation) { tableLocation = tableLocation.endsWith("/") ? tableLocation.substring(0, tableLocation.length() - 1) : tableLocation; String tableLocationWithData = tableLocation + "/data/"; String path = PartitionUtil.getSuffixName(tableLocationWithData, partitionLocation); if (path.startsWith("/")) { path = path.substring(1); } if (path.endsWith("/")) { path = path.substring(0, path.length() - 1); } return path; } public static boolean onlyHasPartitionPredicate(Table table, ScalarOperator predicate) { if (predicate == null) { return true; } List<ColumnRefOperator> columnRefOperators = predicate.getColumnRefs(); List<String> partitionColNames = table.getPartitionColumnNames(); for (ColumnRefOperator c : columnRefOperators) { if (!partitionColNames.contains(c.getName())) { return false; } } return true; } private boolean enablePruneManifest() { if (ConnectContext.get() == null) { return false; } if (ConnectContext.get().getSessionVariable() == null) { return false; } return ConnectContext.get().getSessionVariable().isEnablePruneIcebergManifest(); } private boolean enableCollectColumnStatistics() { if (ConnectContext.get() == null) { return false; } if (ConnectContext.get().getSessionVariable() == null) { return false; } return ConnectContext.get().getSessionVariable().enableIcebergColumnStatistics(); } @Override public void clear() { splitTasks.clear(); tables.clear(); databases.clear(); scannedTables.clear(); IcebergMetricsReporter.remove(); } interface BatchWrite { void addFile(DataFile file); void commit(); } static class Append implements BatchWrite { private final AppendFiles append; public Append(Transaction txn) { append = txn.newAppend(); } @Override public void addFile(DataFile file) { append.appendFile(file); } @Override public void commit() { append.commit(); } } static class DynamicOverwrite implements BatchWrite { private final ReplacePartitions replace; public DynamicOverwrite(Transaction txn) { replace = txn.newReplacePartitions(); } @Override public void addFile(DataFile file) { replace.addFile(file); } @Override public void commit() { replace.commit(); } } public static class PartitionData implements StructLike { private final Object[] values; private PartitionData(int size) { this.values = new Object[size]; } @Override public int size() { return values.length; } @Override public <T> T get(int pos, Class<T> javaClass) { return javaClass.cast(values[pos]); } @Override public <T> void set(int pos, T value) { if (value instanceof ByteBuffer) { ByteBuffer buffer = (ByteBuffer) value; byte[] bytes = new byte[buffer.remaining()]; buffer.duplicate().get(bytes); values[pos] = bytes; } else { values[pos] = value; } } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } PartitionData that = (PartitionData) other; return Arrays.equals(values, that.values); } @Override public int hashCode() { return Arrays.hashCode(values); } } @Override public CloudConfiguration getCloudConfiguration() { return hdfsEnvironment.getCloudConfiguration(); } private static class FileScanTaskSchema { private final String dbName; private final String tableName; private final int schemaId; private final int specId; public FileScanTaskSchema(String dbName, String tableName, int schemaId, int specId) { this.dbName = dbName; this.tableName = tableName; this.schemaId = schemaId; this.specId = specId; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } FileScanTaskSchema that = (FileScanTaskSchema) o; return schemaId == that.schemaId && specId == that.specId && Objects.equals(dbName, that.dbName) && Objects.equals(tableName, that.tableName); } @Override public int hashCode() { return Objects.hash(dbName, tableName, schemaId, specId); } } }
That is true, except I thought (maybe incorrectly) that Quarkus shades Caffeine so it would only apply to your extension if you disable the logger at the shaded path? I might be completely wrong, in which case you're very much right.
public CompletableFuture<Object> get(Object key, Function<Object, Object> valueLoader) { if (key == null) { throw new NullPointerException(NULL_KEYS_NOT_SUPPORTED_MSG); } CompletableFuture<Object> newCacheValue = new CompletableFuture<Object>(); CompletableFuture<Object> existingCacheValue = cache.asMap().putIfAbsent(key, newCacheValue); if (existingCacheValue == null) { try { Object value = valueLoader.apply(key); newCacheValue.complete(NullValueConverter.toCacheValue(value)); return unwrapCacheValue(newCacheValue); } catch (Throwable t) { cache.asMap().remove(key, newCacheValue); newCacheValue.completeExceptionally(t); return newCacheValue; } } else { return unwrapCacheValue(existingCacheValue); } }
newCacheValue.completeExceptionally(t);
public CompletableFuture<Object> get(Object key, Function<Object, Object> valueLoader) { if (key == null) { throw new NullPointerException(NULL_KEYS_NOT_SUPPORTED_MSG); } CompletableFuture<Object> newCacheValue = new CompletableFuture<Object>(); CompletableFuture<Object> existingCacheValue = cache.asMap().putIfAbsent(key, newCacheValue); if (existingCacheValue == null) { try { Object value = valueLoader.apply(key); newCacheValue.complete(NullValueConverter.toCacheValue(value)); } catch (Throwable t) { cache.asMap().remove(key, newCacheValue); newCacheValue.complete(new CaffeineComputationThrowable(t)); } return unwrapCacheValueOrThrowable(key, newCacheValue); } else { return unwrapCacheValueOrThrowable(key, existingCacheValue); } }
class CaffeineCache { public static final String NULL_KEYS_NOT_SUPPORTED_MSG = "Null keys are not supported by the Quarkus application data cache"; private AsyncCache<Object, Object> cache; private String name; private Integer initialCapacity; private Long maximumSize; private Duration expireAfterWrite; private Duration expireAfterAccess; private Object defaultKey; public CaffeineCache(CaffeineCacheInfo cacheInfo) { this.name = cacheInfo.name; Caffeine<Object, Object> builder = Caffeine.newBuilder(); if (cacheInfo.initialCapacity != null) { this.initialCapacity = cacheInfo.initialCapacity; builder.initialCapacity(cacheInfo.initialCapacity); } if (cacheInfo.maximumSize != null) { this.maximumSize = cacheInfo.maximumSize; builder.maximumSize(cacheInfo.maximumSize); } if (cacheInfo.expireAfterWrite != null) { this.expireAfterWrite = cacheInfo.expireAfterWrite; builder.expireAfterWrite(cacheInfo.expireAfterWrite); } if (cacheInfo.expireAfterAccess != null) { this.expireAfterAccess = cacheInfo.expireAfterAccess; builder.expireAfterAccess(cacheInfo.expireAfterAccess); } cache = builder.buildAsync(); } /** * Returns a {@link CompletableFuture} holding the cache value identified by {@code key}, obtaining that value from * {@code valueLoader} if necessary. The value computation is done synchronously on the calling thread and the * {@link CompletableFuture} is immediately completed before being returned. * * @param key cache key * @param valueLoader function used to compute the cache value if {@code key} is not already associated with a value * @return a {@link CompletableFuture} holding the cache value * @throws CacheException if an exception is thrown during the cache value computation */ private CompletableFuture<Object> unwrapCacheValue(CompletableFuture<Object> cacheValue) { return cacheValue.thenApply(new Function<Object, Object>() { @Override public Object apply(Object value) { return NullValueConverter.fromCacheValue(value); } }); } public void invalidate(Object key) { if (key == null) { throw new NullPointerException(NULL_KEYS_NOT_SUPPORTED_MSG); } cache.synchronous().invalidate(key); } public void invalidateAll() { cache.synchronous().invalidateAll(); } public String getName() { return name; } public Integer getInitialCapacity() { return initialCapacity; } public Long getMaximumSize() { return maximumSize; } public Duration getExpireAfterWrite() { return expireAfterWrite; } public Duration getExpireAfterAccess() { return expireAfterAccess; } /** * Returns the unique and immutable default key for the current cache. This key is used by the annotations caching API when * a no-args method annotated with {@link io.quarkus.cache.CacheResult CacheResult} or * {@link io.quarkus.cache.CacheInvalidate CacheInvalidate} is invoked. * * @return default cache key */ public Object getDefaultKey() { if (defaultKey == null) { defaultKey = new DefaultCacheKey(getName()); } return defaultKey; } }
class CaffeineCache { public static final String NULL_KEYS_NOT_SUPPORTED_MSG = "Null keys are not supported by the Quarkus application data cache"; private AsyncCache<Object, Object> cache; private String name; private Integer initialCapacity; private Long maximumSize; private Duration expireAfterWrite; private Duration expireAfterAccess; private Object defaultKey; public CaffeineCache(CaffeineCacheInfo cacheInfo) { this.name = cacheInfo.name; Caffeine<Object, Object> builder = Caffeine.newBuilder(); if (cacheInfo.initialCapacity != null) { this.initialCapacity = cacheInfo.initialCapacity; builder.initialCapacity(cacheInfo.initialCapacity); } if (cacheInfo.maximumSize != null) { this.maximumSize = cacheInfo.maximumSize; builder.maximumSize(cacheInfo.maximumSize); } if (cacheInfo.expireAfterWrite != null) { this.expireAfterWrite = cacheInfo.expireAfterWrite; builder.expireAfterWrite(cacheInfo.expireAfterWrite); } if (cacheInfo.expireAfterAccess != null) { this.expireAfterAccess = cacheInfo.expireAfterAccess; builder.expireAfterAccess(cacheInfo.expireAfterAccess); } cache = builder.buildAsync(); } /** * Returns a {@link CompletableFuture} holding the cache value identified by {@code key}, obtaining that value from * {@code valueLoader} if necessary. The value computation is done synchronously on the calling thread and the * {@link CompletableFuture} is immediately completed before being returned. * * @param key cache key * @param valueLoader function used to compute the cache value if {@code key} is not already associated with a value * @return a {@link CompletableFuture} holding the cache value * @throws CacheException if an exception is thrown during the cache value computation */ private CompletableFuture<Object> unwrapCacheValueOrThrowable(Object key, CompletableFuture<Object> cacheValue) { return cacheValue.thenApply(new Function<Object, Object>() { @Override public Object apply(Object value) { if (value instanceof CaffeineComputationThrowable) { Throwable cause = ((CaffeineComputationThrowable) value).getCause(); if (cause instanceof RuntimeException) { throw (RuntimeException) cause; } else { throw new CacheException(cause); } } else { return NullValueConverter.fromCacheValue(value); } } }); } public void invalidate(Object key) { if (key == null) { throw new NullPointerException(NULL_KEYS_NOT_SUPPORTED_MSG); } cache.synchronous().invalidate(key); } public void invalidateAll() { cache.synchronous().invalidateAll(); } public String getName() { return name; } public Integer getInitialCapacity() { return initialCapacity; } public Long getMaximumSize() { return maximumSize; } public Duration getExpireAfterWrite() { return expireAfterWrite; } public Duration getExpireAfterAccess() { return expireAfterAccess; } /** * Returns the unique and immutable default key for the current cache. This key is used by the annotations caching API when * a no-args method annotated with {@link io.quarkus.cache.CacheResult CacheResult} or * {@link io.quarkus.cache.CacheInvalidate CacheInvalidate} is invoked. * * @return default cache key */ public Object getDefaultKey() { if (defaultKey == null) { defaultKey = new DefaultCacheKey(getName()); } return defaultKey; } }
Earlier we were getting `null` for the returnTypeSymbol? Asking for my understanding for this change.
public void visit(StartActionNode startActionNode) { startActionNode.parent().accept(this); if (resultFound && returnTypeSymbol.typeKind() == TypeDescKind.FUTURE) { FutureTypeSymbol futureTypeSymbol = (FutureTypeSymbol) returnTypeSymbol; TypeSymbol typeSymbol = futureTypeSymbol.typeParameter().orElse(null); checkAndSetTypeResult(typeSymbol); } else { TypeSymbol nilTypeSymbol = semanticModel.types().NIL; checkAndSetTypeResult(nilTypeSymbol); } }
TypeSymbol nilTypeSymbol = semanticModel.types().NIL;
public void visit(StartActionNode startActionNode) { startActionNode.parent().accept(this); if (resultFound && returnTypeSymbol.typeKind() == TypeDescKind.FUTURE) { FutureTypeSymbol futureTypeSymbol = (FutureTypeSymbol) returnTypeSymbol; TypeSymbol typeSymbol = futureTypeSymbol.typeParameter().orElse(null); checkAndSetTypeResult(typeSymbol); } else { TypeSymbol nilTypeSymbol = semanticModel.types().NIL; checkAndSetTypeResult(nilTypeSymbol); } }
class FunctionCallExpressionTypeFinder extends NodeVisitor { private final SemanticModel semanticModel; private FunctionCallExpressionNode functionCallExpr; private TypeSymbol returnTypeSymbol; private boolean resultFound = false; public FunctionCallExpressionTypeFinder(SemanticModel semanticModel, FunctionCallExpressionNode functionCallExpr) { this.semanticModel = semanticModel; this.functionCallExpr = functionCallExpr; } @Override public void visit(ObjectFieldNode objectFieldNode) { Symbol symbol = semanticModel.symbol(objectFieldNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) { Symbol symbol = semanticModel.symbol(recordFieldWithDefaultValueNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(ModuleVariableDeclarationNode moduleVariableDeclarationNode) { Symbol symbol = semanticModel.symbol(moduleVariableDeclarationNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(AssignmentStatementNode assignmentStatementNode) { Symbol symbol = semanticModel.symbol(assignmentStatementNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); if (resultFound) { return; } assignmentStatementNode.varRef().accept(this); } @Override public void visit(VariableDeclarationNode variableDeclarationNode) { Symbol symbol = semanticModel.symbol(variableDeclarationNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(SpecificFieldNode specificFieldNode) { semanticModel.symbol(specificFieldNode) .map(symbol -> (RecordFieldSymbol) symbol) .ifPresent(recordFieldSymbol -> checkAndSetTypeResult(recordFieldSymbol.typeDescriptor())); } @Override public void visit(BinaryExpressionNode binaryExpressionNode) { TypeSymbol typeSymbol = semanticModel.typeOf(binaryExpressionNode.lhsExpr()).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } typeSymbol = semanticModel.typeOf(binaryExpressionNode.rhsExpr()).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(LetExpressionNode letExpressionNode) { TypeSymbol typeSymbol = semanticModel.typeOf(letExpressionNode).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } letExpressionNode.parent().accept(this); } @Override public void visit(LetVariableDeclarationNode letVariableDeclarationNode) { Optional<Symbol> symbol1 = semanticModel.symbol(letVariableDeclarationNode); symbol1.map(symbol -> (VariableSymbol) symbol) .map(VariableSymbol::typeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override @Override public void visit(FunctionCallExpressionNode fnCallExprNode) { fnCallExprNode.functionName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(fnCallExprNode).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } fnCallExprNode.parent().accept(this); } @Override public void visit(MethodCallExpressionNode methodCallExpressionNode) { methodCallExpressionNode.methodName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(methodCallExpressionNode).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(RemoteMethodCallActionNode remoteMethodCallActionNode) { remoteMethodCallActionNode.methodName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(remoteMethodCallActionNode).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(SimpleNameReferenceNode simpleNameReferenceNode) { semanticModel.symbol(simpleNameReferenceNode) .flatMap(SymbolUtil::getTypeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(ErrorConstructorExpressionNode errorConstructorExpressionNode) { semanticModel.typeOf(errorConstructorExpressionNode) .map(CommonUtil::getRawType) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(PositionalArgumentNode positionalArgumentNode) { positionalArgumentNode.parent().accept(this); if (!resultFound) { return; } if (returnTypeSymbol.typeKind() == TypeDescKind.ERROR) { checkAndSetTypeResult(semanticModel.types().STRING); return; } Optional<List<ParameterSymbol>> params = getParameterSymbols(); if (params.isEmpty() || params.get().isEmpty()) { return; } SeparatedNodeList<FunctionArgumentNode> arguments; switch (positionalArgumentNode.parent().kind()) { case METHOD_CALL: MethodCallExpressionNode methodCallExpressionNode = (MethodCallExpressionNode) positionalArgumentNode.parent(); arguments = methodCallExpressionNode.arguments(); break; case FUNCTION_CALL: FunctionCallExpressionNode functionCallExpressionNode = (FunctionCallExpressionNode) positionalArgumentNode.parent(); arguments = functionCallExpressionNode.arguments(); break; case REMOTE_METHOD_CALL_ACTION: RemoteMethodCallActionNode remoteMethodCallActionNode = (RemoteMethodCallActionNode) positionalArgumentNode.parent(); arguments = remoteMethodCallActionNode.arguments(); break; case PARENTHESIZED_ARG_LIST: ParenthesizedArgList parenthesizedArgList = (ParenthesizedArgList) positionalArgumentNode.parent(); arguments = parenthesizedArgList.arguments(); break; default: return; } if (arguments != null) { int argIndex = -1; for (int i = 0; i < arguments.size(); i++) { if (arguments.get(i).equals(positionalArgumentNode)) { argIndex = i; break; } } if (argIndex < 0 || params.get().size() < argIndex + 1) { return; } ParameterSymbol parameterSymbol = params.get().get(argIndex); checkAndSetTypeResult(parameterSymbol.typeDescriptor()); } } @Override public void visit(NamedArgumentNode namedArgumentNode) { namedArgumentNode.parent().accept(this); if (!resultFound) { return; } if (returnTypeSymbol.typeKind() == TypeDescKind.ERROR) { ErrorTypeSymbol errorTypeSymbol = (ErrorTypeSymbol) returnTypeSymbol; TypeSymbol detailType = CommonUtil.getRawType(errorTypeSymbol.detailTypeDescriptor()); if (detailType.typeKind() != TypeDescKind.RECORD) { checkAndSetTypeResult(semanticModel.types().ANYDATA); return; } RecordTypeSymbol recordTypeSymbol = (RecordTypeSymbol) detailType; RecordFieldSymbol fieldSymbol = recordTypeSymbol.fieldDescriptors() .get(namedArgumentNode.argumentName().name().text()); if (fieldSymbol == null) { resetResult(); return; } checkAndSetTypeResult(fieldSymbol.typeDescriptor()); return; } Optional<List<ParameterSymbol>> params = getParameterSymbols(); if (params.isEmpty()) { return; } params.get().stream().filter(param -> param.getName().isPresent() && param.getName().get().equals(namedArgumentNode.argumentName().name().text())).findFirst() .ifPresent(parameterSymbol -> this.checkAndSetTypeResult(parameterSymbol.typeDescriptor())); } /** * Returns the parameter symbols once the {@link * * @return Optional parameter symbol list */ private Optional<List<ParameterSymbol>> getParameterSymbols() { FunctionTypeSymbol functionTypeSymbol; if (returnTypeSymbol.typeKind() == TypeDescKind.FUNCTION) { functionTypeSymbol = (FunctionTypeSymbol) returnTypeSymbol; } else if (returnTypeSymbol.kind() == SymbolKind.CLASS) { Optional<MethodSymbol> methodSymbol = ((ClassSymbol) returnTypeSymbol).initMethod(); if (methodSymbol.isEmpty()) { return Optional.empty(); } functionTypeSymbol = methodSymbol.get().typeDescriptor(); } else { return Optional.empty(); } return functionTypeSymbol.params(); } @Override public void visit(ParenthesizedArgList parenthesizedArgList) { parenthesizedArgList.parent().accept(this); } @Override public void visit(ExplicitNewExpressionNode explicitNewExpressionNode) { semanticModel.typeOf(explicitNewExpressionNode) .flatMap(typeSymbol -> Optional.of(CommonUtil.getRawType(typeSymbol))) .stream().findFirst().ifPresent(this::checkAndSetTypeResult); } @Override public void visit(ImplicitNewExpressionNode implicitNewExpressionNode) { semanticModel.typeOf(implicitNewExpressionNode) .flatMap(typeSymbol -> Optional.of(CommonUtil.getRawType(typeSymbol))) .stream().findFirst().ifPresent(this::checkAndSetTypeResult); } @Override public void visit(FunctionDefinitionNode node) { semanticModel.symbol(node) .flatMap(SymbolUtil::getTypeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(FunctionBodyBlockNode node) { node.parent().accept(this); } @Override public void visit(BlockStatementNode node) { node.parent().accept(this); } @Override public void visit(ReturnStatementNode returnStatementNode) { this.semanticModel.typeOf(returnStatementNode).ifPresent(this::checkAndSetTypeResult); if (resultFound) { return; } returnStatementNode.parent().accept(this); if (resultFound && returnTypeSymbol.typeKind() == TypeDescKind.FUNCTION) { FunctionTypeSymbol functionTypeSymbol = (FunctionTypeSymbol) returnTypeSymbol; functionTypeSymbol.returnTypeDescriptor().ifPresentOrElse(this::checkAndSetTypeResult, this::resetResult); } else { resetResult(); } } @Override public void visit(UnaryExpressionNode unaryExpressionNode) { semanticModel.typeOf(unaryExpressionNode).ifPresent(this::checkAndSetTypeResult); if (!resultFound) { checkAndSetTypeResult(semanticModel.types().BOOLEAN); } } @Override public void visit(IfElseStatementNode node) { if (PositionUtil.isWithinLineRange(functionCallExpr.lineRange(), node.condition().lineRange())) { checkAndSetTypeResult(semanticModel.types().BOOLEAN); return; } node.parent().accept(this); } @Override public void visit(FailStatementNode failStatementNode) { checkAndSetTypeResult(semanticModel.types().ERROR); } @Override public void visit(WhileStatementNode whileStatementNode) { if (PositionUtil.isWithinLineRange(functionCallExpr.lineRange(), whileStatementNode.condition().lineRange())) { checkAndSetTypeResult(semanticModel.types().BOOLEAN); return; } whileStatementNode.parent().accept(this); } @Override public void visit(ConditionalExpressionNode conditionalExpressionNode) { Optional<TypeSymbol> typeSymbol = semanticModel.typeOf(conditionalExpressionNode.middleExpression()) .filter(type -> type.typeKind() != TypeDescKind.COMPILATION_ERROR) .or(() -> semanticModel.typeOf(conditionalExpressionNode.endExpression()) .filter(type -> type.typeKind() != TypeDescKind.COMPILATION_ERROR)); if (typeSymbol.isPresent()) { checkAndSetTypeResult(typeSymbol.get()); } else { conditionalExpressionNode.parent().accept(this); } } @Override public void visit(CheckExpressionNode checkExpressionNode) { if (checkExpressionNode.parent().kind() == SyntaxKind.CALL_STATEMENT) { checkAndSetTypeResult(semanticModel.types().ERROR); } else { checkExpressionNode.parent().accept(this); } } @Override public void visit(PanicStatementNode panicStatementNode) { checkAndSetTypeResult(semanticModel.types().ERROR); } @Override protected void visitSyntaxNode(Node node) { } private void checkAndSetTypeResult(TypeSymbol typeSymbol) { if (typeSymbol == null) { return; } this.returnTypeSymbol = typeSymbol; if (typeSymbol.typeKind() != TypeDescKind.COMPILATION_ERROR) { resultFound = true; } } private void resetResult() { this.returnTypeSymbol = null; this.resultFound = false; } /** * Get the type symbol of the return type of the function call expression provided to this instance. * * @return Optional type symbol of the return type of function call expression */ public Optional<TypeSymbol> getReturnTypeSymbol() { return Optional.ofNullable(returnTypeSymbol); } }
class FunctionCallExpressionTypeFinder extends NodeVisitor { private final SemanticModel semanticModel; private FunctionCallExpressionNode functionCallExpr; private TypeSymbol returnTypeSymbol; private boolean resultFound = false; public FunctionCallExpressionTypeFinder(SemanticModel semanticModel, FunctionCallExpressionNode functionCallExpr) { this.semanticModel = semanticModel; this.functionCallExpr = functionCallExpr; } @Override public void visit(ObjectFieldNode objectFieldNode) { Symbol symbol = semanticModel.symbol(objectFieldNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) { Symbol symbol = semanticModel.symbol(recordFieldWithDefaultValueNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(ModuleVariableDeclarationNode moduleVariableDeclarationNode) { Symbol symbol = semanticModel.symbol(moduleVariableDeclarationNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(AssignmentStatementNode assignmentStatementNode) { Symbol symbol = semanticModel.symbol(assignmentStatementNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); if (resultFound) { return; } assignmentStatementNode.varRef().accept(this); } @Override public void visit(VariableDeclarationNode variableDeclarationNode) { Symbol symbol = semanticModel.symbol(variableDeclarationNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(SpecificFieldNode specificFieldNode) { semanticModel.symbol(specificFieldNode) .map(symbol -> (RecordFieldSymbol) symbol) .ifPresent(recordFieldSymbol -> checkAndSetTypeResult(recordFieldSymbol.typeDescriptor())); } @Override public void visit(BinaryExpressionNode binaryExpressionNode) { TypeSymbol typeSymbol = semanticModel.typeOf(binaryExpressionNode.lhsExpr()).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } typeSymbol = semanticModel.typeOf(binaryExpressionNode.rhsExpr()).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(LetExpressionNode letExpressionNode) { TypeSymbol typeSymbol = semanticModel.typeOf(letExpressionNode).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } letExpressionNode.parent().accept(this); } @Override public void visit(LetVariableDeclarationNode letVariableDeclarationNode) { Optional<Symbol> symbol1 = semanticModel.symbol(letVariableDeclarationNode); symbol1.map(symbol -> (VariableSymbol) symbol) .map(VariableSymbol::typeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override @Override public void visit(FunctionCallExpressionNode fnCallExprNode) { fnCallExprNode.functionName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(fnCallExprNode).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } fnCallExprNode.parent().accept(this); } @Override public void visit(MethodCallExpressionNode methodCallExpressionNode) { methodCallExpressionNode.methodName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(methodCallExpressionNode).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(RemoteMethodCallActionNode remoteMethodCallActionNode) { remoteMethodCallActionNode.methodName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(remoteMethodCallActionNode).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(SimpleNameReferenceNode simpleNameReferenceNode) { semanticModel.symbol(simpleNameReferenceNode) .flatMap(SymbolUtil::getTypeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(ErrorConstructorExpressionNode errorConstructorExpressionNode) { semanticModel.typeOf(errorConstructorExpressionNode) .map(CommonUtil::getRawType) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(PositionalArgumentNode positionalArgumentNode) { positionalArgumentNode.parent().accept(this); if (!resultFound) { return; } if (returnTypeSymbol.typeKind() == TypeDescKind.ERROR) { checkAndSetTypeResult(semanticModel.types().STRING); return; } Optional<List<ParameterSymbol>> params = getParameterSymbols(); if (params.isEmpty() || params.get().isEmpty()) { return; } SeparatedNodeList<FunctionArgumentNode> arguments; switch (positionalArgumentNode.parent().kind()) { case METHOD_CALL: MethodCallExpressionNode methodCallExpressionNode = (MethodCallExpressionNode) positionalArgumentNode.parent(); arguments = methodCallExpressionNode.arguments(); break; case FUNCTION_CALL: FunctionCallExpressionNode functionCallExpressionNode = (FunctionCallExpressionNode) positionalArgumentNode.parent(); arguments = functionCallExpressionNode.arguments(); break; case REMOTE_METHOD_CALL_ACTION: RemoteMethodCallActionNode remoteMethodCallActionNode = (RemoteMethodCallActionNode) positionalArgumentNode.parent(); arguments = remoteMethodCallActionNode.arguments(); break; case PARENTHESIZED_ARG_LIST: ParenthesizedArgList parenthesizedArgList = (ParenthesizedArgList) positionalArgumentNode.parent(); arguments = parenthesizedArgList.arguments(); break; default: return; } if (arguments != null) { int argIndex = -1; for (int i = 0; i < arguments.size(); i++) { if (arguments.get(i).equals(positionalArgumentNode)) { argIndex = i; break; } } if (argIndex < 0 || params.get().size() < argIndex + 1) { return; } ParameterSymbol parameterSymbol = params.get().get(argIndex); checkAndSetTypeResult(parameterSymbol.typeDescriptor()); } } @Override public void visit(NamedArgumentNode namedArgumentNode) { namedArgumentNode.parent().accept(this); if (!resultFound) { return; } if (returnTypeSymbol.typeKind() == TypeDescKind.ERROR) { ErrorTypeSymbol errorTypeSymbol = (ErrorTypeSymbol) returnTypeSymbol; TypeSymbol detailType = CommonUtil.getRawType(errorTypeSymbol.detailTypeDescriptor()); if (detailType.typeKind() != TypeDescKind.RECORD) { checkAndSetTypeResult(semanticModel.types().ANYDATA); return; } RecordTypeSymbol recordTypeSymbol = (RecordTypeSymbol) detailType; RecordFieldSymbol fieldSymbol = recordTypeSymbol.fieldDescriptors() .get(namedArgumentNode.argumentName().name().text()); if (fieldSymbol == null) { resetResult(); return; } checkAndSetTypeResult(fieldSymbol.typeDescriptor()); return; } Optional<List<ParameterSymbol>> params = getParameterSymbols(); if (params.isEmpty()) { return; } params.get().stream().filter(param -> param.getName().isPresent() && param.getName().get().equals(namedArgumentNode.argumentName().name().text())).findFirst() .ifPresent(parameterSymbol -> this.checkAndSetTypeResult(parameterSymbol.typeDescriptor())); } /** * Returns the parameter symbols once the {@link * * @return Optional parameter symbol list */ private Optional<List<ParameterSymbol>> getParameterSymbols() { FunctionTypeSymbol functionTypeSymbol; if (returnTypeSymbol.typeKind() == TypeDescKind.FUNCTION) { functionTypeSymbol = (FunctionTypeSymbol) returnTypeSymbol; } else if (returnTypeSymbol.kind() == SymbolKind.CLASS) { Optional<MethodSymbol> methodSymbol = ((ClassSymbol) returnTypeSymbol).initMethod(); if (methodSymbol.isEmpty()) { return Optional.empty(); } functionTypeSymbol = methodSymbol.get().typeDescriptor(); } else { return Optional.empty(); } return functionTypeSymbol.params(); } @Override public void visit(ParenthesizedArgList parenthesizedArgList) { parenthesizedArgList.parent().accept(this); } @Override public void visit(ExplicitNewExpressionNode explicitNewExpressionNode) { semanticModel.typeOf(explicitNewExpressionNode) .flatMap(typeSymbol -> Optional.of(CommonUtil.getRawType(typeSymbol))) .stream().findFirst().ifPresent(this::checkAndSetTypeResult); } @Override public void visit(ImplicitNewExpressionNode implicitNewExpressionNode) { semanticModel.typeOf(implicitNewExpressionNode) .flatMap(typeSymbol -> Optional.of(CommonUtil.getRawType(typeSymbol))) .stream().findFirst().ifPresent(this::checkAndSetTypeResult); } @Override public void visit(FunctionDefinitionNode node) { semanticModel.symbol(node) .flatMap(SymbolUtil::getTypeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(FunctionBodyBlockNode node) { node.parent().accept(this); } @Override public void visit(BlockStatementNode node) { node.parent().accept(this); } @Override public void visit(ReturnStatementNode returnStatementNode) { this.semanticModel.typeOf(returnStatementNode).ifPresent(this::checkAndSetTypeResult); if (resultFound) { return; } returnStatementNode.parent().accept(this); if (resultFound && returnTypeSymbol.typeKind() == TypeDescKind.FUNCTION) { FunctionTypeSymbol functionTypeSymbol = (FunctionTypeSymbol) returnTypeSymbol; functionTypeSymbol.returnTypeDescriptor().ifPresentOrElse(this::checkAndSetTypeResult, this::resetResult); } else { resetResult(); } } @Override public void visit(UnaryExpressionNode unaryExpressionNode) { semanticModel.typeOf(unaryExpressionNode).ifPresent(this::checkAndSetTypeResult); if (!resultFound) { checkAndSetTypeResult(semanticModel.types().BOOLEAN); } } @Override public void visit(IfElseStatementNode node) { if (PositionUtil.isWithinLineRange(functionCallExpr.lineRange(), node.condition().lineRange())) { checkAndSetTypeResult(semanticModel.types().BOOLEAN); return; } node.parent().accept(this); } @Override public void visit(FailStatementNode failStatementNode) { checkAndSetTypeResult(semanticModel.types().ERROR); } @Override public void visit(WhileStatementNode whileStatementNode) { if (PositionUtil.isWithinLineRange(functionCallExpr.lineRange(), whileStatementNode.condition().lineRange())) { checkAndSetTypeResult(semanticModel.types().BOOLEAN); return; } whileStatementNode.parent().accept(this); } @Override public void visit(ConditionalExpressionNode conditionalExpressionNode) { Optional<TypeSymbol> typeSymbol = semanticModel.typeOf(conditionalExpressionNode.middleExpression()) .filter(type -> type.typeKind() != TypeDescKind.COMPILATION_ERROR) .or(() -> semanticModel.typeOf(conditionalExpressionNode.endExpression()) .filter(type -> type.typeKind() != TypeDescKind.COMPILATION_ERROR)); if (typeSymbol.isPresent()) { checkAndSetTypeResult(typeSymbol.get()); } else { conditionalExpressionNode.parent().accept(this); } } @Override public void visit(CheckExpressionNode checkExpressionNode) { if (checkExpressionNode.parent().kind() == SyntaxKind.CALL_STATEMENT) { checkAndSetTypeResult(semanticModel.types().ERROR); } else { checkExpressionNode.parent().accept(this); } } @Override public void visit(PanicStatementNode panicStatementNode) { checkAndSetTypeResult(semanticModel.types().ERROR); } @Override protected void visitSyntaxNode(Node node) { } private void checkAndSetTypeResult(TypeSymbol typeSymbol) { if (typeSymbol == null) { return; } this.returnTypeSymbol = typeSymbol; if (typeSymbol.typeKind() != TypeDescKind.COMPILATION_ERROR) { resultFound = true; } } private void resetResult() { this.returnTypeSymbol = null; this.resultFound = false; } /** * Get the type symbol of the return type of the function call expression provided to this instance. * * @return Optional type symbol of the return type of function call expression */ public Optional<TypeSymbol> getReturnTypeSymbol() { return Optional.ofNullable(returnTypeSymbol); } }
This should be removed too.
public void createContainer(ContainerName containerName, NodeSpec node, ContainerData containerData) { PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); logger.info("Creating container " + containerName); Inet6Address ipV6Address = environment.getIpAddresses().getIPv6Address(node.getHostname()).orElseThrow( () -> new RuntimeException("Unable to find a valid IPv6 address for " + node.getHostname() + ". Missing an AAAA DNS entry?")); String configServers = String.join(",", environment.getConfigServerHostNames()); Docker.CreateContainerCommand command = docker.createContainerCommand( node.getWantedDockerImage().get(), ContainerResources.from(node.getMinCpuCores(), node.getMinMainMemoryAvailableGb()), containerName, node.getHostname()) .withManagedBy(MANAGER_NAME) .withEnvironment("VESPA_CONFIGSERVERS", configServers) .withEnvironment("CONTAINER_ENVIRONMENT_SETTINGS", environment.getContainerEnvironmentResolver().createSettings(environment, node)) .withUlimit("nofile", 262_144, 262_144) .withUlimit("nproc", 32_768, 409_600) .withUlimit("core", -1, -1) .withAddCapability("SYS_PTRACE") .withAddCapability("SYS_ADMIN"); if (environment.getNodeType() == NodeType.confighost || environment.getNodeType() == NodeType.proxyhost) { command.withVolume("/var/lib/sia", "/var/lib/sia"); } if (environment.getNodeType() == NodeType.proxyhost) { command.withVolume("/opt/yahoo/share/ssl/certs/", "/opt/yahoo/share/ssl/certs/"); } if (environment.getNodeType() == NodeType.host) { Path zpePathInNode = environment.pathInNodeUnderVespaHome("var/zpe"); if (environment.isRunningOnHost()) { command.withSharedVolume("/var/zpe", zpePathInNode.toString()); } else { command.withVolume(environment.pathInHostFromPathInNode(containerName, zpePathInNode).toString(), zpePathInNode.toString()); } } DockerNetworking networking = environment.getDockerNetworking(); command.withNetworkMode(networking.getDockerNetworkMode()); if (networking == DockerNetworking.MACVLAN) { command.withIpAddress(ipV6Address); command.withNetworkMode(DockerImpl.DOCKER_CUSTOM_MACVLAN_NETWORK_NAME); command.withSharedVolume("/etc/hosts", "/etc/hosts"); } else if (networking == DockerNetworking.NPT) { InetAddress ipV6Prefix = InetAddresses.forString(IPV6_NPT_PREFIX); InetAddress ipV6Local = IPAddresses.prefixTranslate(ipV6Address, ipV6Prefix, 8); command.withIpAddress(ipV6Local); Optional<InetAddress> ipV4Local = environment.getIpAddresses().getIPv4Address(node.getHostname()) .map(ipV4Address -> { InetAddress ipV4Prefix = InetAddresses.forString(IPV4_NPT_PREFIX); return IPAddresses.prefixTranslate(ipV4Address, ipV4Prefix, 2); }); ipV4Local.ifPresent(command::withIpAddress); addEtcHosts(containerData, node.getHostname(), ipV4Local, ipV6Local); } for (Path pathInNode : directoriesToMount.keySet()) { String pathInHost = environment.pathInHostFromPathInNode(containerName, pathInNode).toString(); command.withVolume(pathInHost, pathInNode.toString()); } long minMainMemoryAvailableMb = (long) (node.getMinMainMemoryAvailableGb() * 1024); if (minMainMemoryAvailableMb > 0) { command.withEnvironment("VESPA_TOTAL_MEMORY_MB", Long.toString(minMainMemoryAvailableMb)); } logger.info("Creating new container with args: " + command); command.create(); docker.createContainer(command); }
command.withNetworkMode(DockerImpl.DOCKER_CUSTOM_MACVLAN_NETWORK_NAME);
public void createContainer(ContainerName containerName, NodeSpec node, ContainerData containerData) { PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); logger.info("Creating container " + containerName); Inet6Address ipV6Address = environment.getIpAddresses().getIPv6Address(node.getHostname()).orElseThrow( () -> new RuntimeException("Unable to find a valid IPv6 address for " + node.getHostname() + ". Missing an AAAA DNS entry?")); String configServers = String.join(",", environment.getConfigServerHostNames()); Docker.CreateContainerCommand command = docker.createContainerCommand( node.getWantedDockerImage().get(), ContainerResources.from(node.getMinCpuCores(), node.getMinMainMemoryAvailableGb()), containerName, node.getHostname()) .withManagedBy(MANAGER_NAME) .withEnvironment("VESPA_CONFIGSERVERS", configServers) .withEnvironment("CONTAINER_ENVIRONMENT_SETTINGS", environment.getContainerEnvironmentResolver().createSettings(environment, node)) .withUlimit("nofile", 262_144, 262_144) .withUlimit("nproc", 32_768, 409_600) .withUlimit("core", -1, -1) .withAddCapability("SYS_PTRACE") .withAddCapability("SYS_ADMIN"); if (environment.getNodeType() == NodeType.confighost || environment.getNodeType() == NodeType.proxyhost) { command.withVolume("/var/lib/sia", "/var/lib/sia"); } if (environment.getNodeType() == NodeType.proxyhost) { command.withVolume("/opt/yahoo/share/ssl/certs/", "/opt/yahoo/share/ssl/certs/"); } if (environment.getNodeType() == NodeType.host) { Path zpePathInNode = environment.pathInNodeUnderVespaHome("var/zpe"); if (environment.isRunningOnHost()) { command.withSharedVolume("/var/zpe", zpePathInNode.toString()); } else { command.withVolume(environment.pathInHostFromPathInNode(containerName, zpePathInNode).toString(), zpePathInNode.toString()); } } DockerNetworking networking = environment.getDockerNetworking(); command.withNetworkMode(networking.getDockerNetworkMode()); if (networking == DockerNetworking.MACVLAN) { command.withIpAddress(ipV6Address); command.withSharedVolume("/etc/hosts", "/etc/hosts"); } else if (networking == DockerNetworking.NPT) { InetAddress ipV6Prefix = InetAddresses.forString(IPV6_NPT_PREFIX); InetAddress ipV6Local = IPAddresses.prefixTranslate(ipV6Address, ipV6Prefix, 8); command.withIpAddress(ipV6Local); Optional<InetAddress> ipV4Local = environment.getIpAddresses().getIPv4Address(node.getHostname()) .map(ipV4Address -> { InetAddress ipV4Prefix = InetAddresses.forString(IPV4_NPT_PREFIX); return IPAddresses.prefixTranslate(ipV4Address, ipV4Prefix, 2); }); ipV4Local.ifPresent(command::withIpAddress); addEtcHosts(containerData, node.getHostname(), ipV4Local, ipV6Local); } for (Path pathInNode : directoriesToMount.keySet()) { String pathInHost = environment.pathInHostFromPathInNode(containerName, pathInNode).toString(); command.withVolume(pathInHost, pathInNode.toString()); } long minMainMemoryAvailableMb = (long) (node.getMinMainMemoryAvailableGb() * 1024); if (minMainMemoryAvailableMb > 0) { command.withEnvironment("VESPA_TOTAL_MEMORY_MB", Long.toString(minMainMemoryAvailableMb)); } logger.info("Creating new container with args: " + command); command.create(); docker.createContainer(command); }
class DockerOperationsImpl implements DockerOperations { private static final String MANAGER_NAME = "node-admin"; private static final String IPV6_NPT_PREFIX = "fd00::"; private static final String IPV4_NPT_PREFIX = "172.17.0.0"; private final Docker docker; private final Environment environment; private final ProcessExecuter processExecuter; private final String nodeProgram; private final Map<Path, Boolean> directoriesToMount; public DockerOperationsImpl(Docker docker, Environment environment, ProcessExecuter processExecuter) { this.docker = docker; this.environment = environment; this.processExecuter = processExecuter; this.nodeProgram = environment.pathInNodeUnderVespaHome("bin/vespa-nodectl").toString(); this.directoriesToMount = getDirectoriesToMount(environment); } @Override void addEtcHosts(ContainerData containerData, String hostname, Optional<InetAddress> ipV4Local, InetAddress ipV6Local) { StringBuilder etcHosts = new StringBuilder( " "127.0.0.1\tlocalhost\n" + "::1\tlocalhost ip6-localhost ip6-loopback\n" + "fe00::0\tip6-localnet\n" + "ff00::0\tip6-mcastprefix\n" + "ff02::1\tip6-allnodes\n" + "ff02::2\tip6-allrouters\n" + ipV6Local.getHostAddress() + '\t' + hostname + '\n'); ipV4Local.ifPresent(ipv4 -> etcHosts.append(ipv4.getHostAddress() + '\t' + hostname + '\n')); containerData.addFile(Paths.get("/etc/hosts"), etcHosts.toString()); } @Override public void startContainer(ContainerName containerName) { PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); logger.info("Starting container " + containerName); if (environment.getDockerNetworking() == DockerNetworking.MACVLAN) { docker.connectContainerToNetwork(containerName, "bridge"); } docker.startContainer(containerName); if (environment.getDockerNetworking() == DockerNetworking.MACVLAN) { setupContainerNetworkConnectivity(containerName); } directoriesToMount.entrySet().stream() .filter(Map.Entry::getValue) .map(Map.Entry::getKey) .forEach(path -> docker.executeInContainerAsRoot(containerName, "chmod", "-R", "a+w", path.toString())); } @Override public void removeContainer(Container existingContainer) { final ContainerName containerName = existingContainer.name; PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); if (existingContainer.state.isRunning()) { logger.info("Stopping container " + containerName.asString()); docker.stopContainer(containerName); } logger.info("Deleting container " + containerName.asString()); docker.deleteContainer(containerName); } @Override public Optional<Container> getContainer(ContainerName containerName) { return docker.getContainer(containerName); } /** * Try to suspend node. Suspending a node means the node should be taken offline, * such that maintenance can be done of the node (upgrading, rebooting, etc), * and such that we will start serving again as soon as possible afterwards. * <p> * Any failures are logged and ignored. */ @Override public void trySuspendNode(ContainerName containerName) { try { executeCommandInContainer(containerName, nodeProgram, "suspend"); } catch (RuntimeException e) { PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); logger.warning("Failed trying to suspend container " + containerName.asString(), e); } } /** * For macvlan: * <p> * Due to a bug in docker (https: * IPv6 gateway in containers connected to more than one docker network */ private void setupContainerNetworkConnectivity(ContainerName containerName) { InetAddress hostDefaultGateway = uncheck(() -> DockerNetworkCreator.getDefaultGatewayLinux(true)); executeCommandInNetworkNamespace(containerName, "route", "-A", "inet6", "add", "default", "gw", hostDefaultGateway.getHostAddress(), "dev", "eth1"); } @Override public boolean pullImageAsyncIfNeeded(DockerImage dockerImage) { return docker.pullImageAsyncIfNeeded(dockerImage); } ProcessResult executeCommandInContainer(ContainerName containerName, String... command) { ProcessResult result = docker.executeInContainerAsRoot(containerName, command); if (!result.isSuccess()) { throw new RuntimeException("Container " + containerName.asString() + ": command " + Arrays.toString(command) + " failed: " + result); } return result; } @Override public ProcessResult executeCommandInContainerAsRoot(ContainerName containerName, Long timeoutSeconds, String... command) { return docker.executeInContainerAsRoot(containerName, timeoutSeconds, command); } @Override public ProcessResult executeCommandInContainerAsRoot(ContainerName containerName, String... command) { return docker.executeInContainerAsRoot(containerName, command); } @Override public ProcessResult executeCommandInNetworkNamespace(ContainerName containerName, String... command) { final PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); final Integer containerPid = docker.getContainer(containerName) .filter(container -> container.state.isRunning()) .map(container -> container.pid) .orElseThrow(() -> new RuntimeException("PID not found for container with name: " + containerName.asString())); Path procPath = environment.getPathResolver().getPathToRootOfHost().resolve("proc"); final String[] wrappedCommand = Stream.concat( Stream.of("sudo", "nsenter", String.format("--net=%s/%d/ns/net", procPath, containerPid), "--"), Stream.of(command)) .toArray(String[]::new); try { Pair<Integer, String> result = processExecuter.exec(wrappedCommand); if (result.getFirst() != 0) { String msg = String.format( "Failed to execute %s in network namespace for %s (PID = %d), exit code: %d, output: %s", Arrays.toString(wrappedCommand), containerName.asString(), containerPid, result.getFirst(), result.getSecond()); logger.error(msg); throw new RuntimeException(msg); } return new ProcessResult(0, result.getSecond(), ""); } catch (IOException e) { logger.warning(String.format("IOException while executing %s in network namespace for %s (PID = %d)", Arrays.toString(wrappedCommand), containerName.asString(), containerPid), e); throw new RuntimeException(e); } } @Override public void resumeNode(ContainerName containerName) { executeCommandInContainer(containerName, nodeProgram, "resume"); } @Override public void restartVespaOnNode(ContainerName containerName) { executeCommandInContainer(containerName, nodeProgram, "restart-vespa"); } @Override public void stopServicesOnNode(ContainerName containerName) { executeCommandInContainer(containerName, nodeProgram, "stop"); } @Override public Optional<Docker.ContainerStats> getContainerStats(ContainerName containerName) { return docker.getContainerStats(containerName); } @Override public List<Container> getAllManagedContainers() { return docker.getAllContainersManagedBy(MANAGER_NAME); } @Override public void deleteUnusedDockerImages() { docker.deleteUnusedDockerImages(); } /** * Returns map of directories to mount and whether they should be writable by everyone */ private static Map<Path, Boolean> getDirectoriesToMount(Environment environment) { final Map<Path, Boolean> directoriesToMount = new HashMap<>(); directoriesToMount.put(Paths.get("/etc/yamas-agent"), true); directoriesToMount.put(Paths.get("/etc/filebeat"), true); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/daemontools_y"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/jdisc_core"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/langdetect/"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/vespa"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yca"), true); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yck"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yell"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ykeykey"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ykeykeyd"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yms_agent"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ysar"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ystatus"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/zpu"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/cache"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/crash"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/db/jdisc"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/db/vespa"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/jdisc_container"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/jdisc_core"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/maven"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/run"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/scoreboards"), true); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/service"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/share"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/spool"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/vespa"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/yca"), true); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/ycore++"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/zookeeper"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("tmp"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/container-data"), false); if (environment.getNodeType() == NodeType.proxyhost) directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/vespa-hosted/routing"), true); if (environment.getNodeType() == NodeType.host) directoriesToMount.put(Paths.get("/var/lib/sia"), true); return Collections.unmodifiableMap(directoriesToMount); } }
class DockerOperationsImpl implements DockerOperations { private static final String MANAGER_NAME = "node-admin"; private static final String IPV6_NPT_PREFIX = "fd00::"; private static final String IPV4_NPT_PREFIX = "172.17.0.0"; private final Docker docker; private final Environment environment; private final ProcessExecuter processExecuter; private final String nodeProgram; private final Map<Path, Boolean> directoriesToMount; public DockerOperationsImpl(Docker docker, Environment environment, ProcessExecuter processExecuter) { this.docker = docker; this.environment = environment; this.processExecuter = processExecuter; this.nodeProgram = environment.pathInNodeUnderVespaHome("bin/vespa-nodectl").toString(); this.directoriesToMount = getDirectoriesToMount(environment); } @Override void addEtcHosts(ContainerData containerData, String hostname, Optional<InetAddress> ipV4Local, InetAddress ipV6Local) { StringBuilder etcHosts = new StringBuilder( " "127.0.0.1\tlocalhost\n" + "::1\tlocalhost ip6-localhost ip6-loopback\n" + "fe00::0\tip6-localnet\n" + "ff00::0\tip6-mcastprefix\n" + "ff02::1\tip6-allnodes\n" + "ff02::2\tip6-allrouters\n" + ipV6Local.getHostAddress() + '\t' + hostname + '\n'); ipV4Local.ifPresent(ipv4 -> etcHosts.append(ipv4.getHostAddress() + '\t' + hostname + '\n')); containerData.addFile(Paths.get("/etc/hosts"), etcHosts.toString()); } @Override public void startContainer(ContainerName containerName) { PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); logger.info("Starting container " + containerName); if (environment.getDockerNetworking() == DockerNetworking.MACVLAN) { docker.connectContainerToNetwork(containerName, "bridge"); } docker.startContainer(containerName); if (environment.getDockerNetworking() == DockerNetworking.MACVLAN) { setupContainerNetworkConnectivity(containerName); } directoriesToMount.entrySet().stream() .filter(Map.Entry::getValue) .map(Map.Entry::getKey) .forEach(path -> docker.executeInContainerAsRoot(containerName, "chmod", "-R", "a+w", path.toString())); } @Override public void removeContainer(Container existingContainer) { final ContainerName containerName = existingContainer.name; PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); if (existingContainer.state.isRunning()) { logger.info("Stopping container " + containerName.asString()); docker.stopContainer(containerName); } logger.info("Deleting container " + containerName.asString()); docker.deleteContainer(containerName); } @Override public Optional<Container> getContainer(ContainerName containerName) { return docker.getContainer(containerName); } /** * Try to suspend node. Suspending a node means the node should be taken offline, * such that maintenance can be done of the node (upgrading, rebooting, etc), * and such that we will start serving again as soon as possible afterwards. * <p> * Any failures are logged and ignored. */ @Override public void trySuspendNode(ContainerName containerName) { try { executeCommandInContainer(containerName, nodeProgram, "suspend"); } catch (RuntimeException e) { PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); logger.warning("Failed trying to suspend container " + containerName.asString(), e); } } /** * For macvlan: * <p> * Due to a bug in docker (https: * IPv6 gateway in containers connected to more than one docker network */ private void setupContainerNetworkConnectivity(ContainerName containerName) { InetAddress hostDefaultGateway = uncheck(() -> DockerNetworkCreator.getDefaultGatewayLinux(true)); executeCommandInNetworkNamespace(containerName, "route", "-A", "inet6", "add", "default", "gw", hostDefaultGateway.getHostAddress(), "dev", "eth1"); } @Override public boolean pullImageAsyncIfNeeded(DockerImage dockerImage) { return docker.pullImageAsyncIfNeeded(dockerImage); } ProcessResult executeCommandInContainer(ContainerName containerName, String... command) { ProcessResult result = docker.executeInContainerAsRoot(containerName, command); if (!result.isSuccess()) { throw new RuntimeException("Container " + containerName.asString() + ": command " + Arrays.toString(command) + " failed: " + result); } return result; } @Override public ProcessResult executeCommandInContainerAsRoot(ContainerName containerName, Long timeoutSeconds, String... command) { return docker.executeInContainerAsRoot(containerName, timeoutSeconds, command); } @Override public ProcessResult executeCommandInContainerAsRoot(ContainerName containerName, String... command) { return docker.executeInContainerAsRoot(containerName, command); } @Override public ProcessResult executeCommandInNetworkNamespace(ContainerName containerName, String... command) { final PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName); final Integer containerPid = docker.getContainer(containerName) .filter(container -> container.state.isRunning()) .map(container -> container.pid) .orElseThrow(() -> new RuntimeException("PID not found for container with name: " + containerName.asString())); Path procPath = environment.getPathResolver().getPathToRootOfHost().resolve("proc"); final String[] wrappedCommand = Stream.concat( Stream.of("sudo", "nsenter", String.format("--net=%s/%d/ns/net", procPath, containerPid), "--"), Stream.of(command)) .toArray(String[]::new); try { Pair<Integer, String> result = processExecuter.exec(wrappedCommand); if (result.getFirst() != 0) { String msg = String.format( "Failed to execute %s in network namespace for %s (PID = %d), exit code: %d, output: %s", Arrays.toString(wrappedCommand), containerName.asString(), containerPid, result.getFirst(), result.getSecond()); logger.error(msg); throw new RuntimeException(msg); } return new ProcessResult(0, result.getSecond(), ""); } catch (IOException e) { logger.warning(String.format("IOException while executing %s in network namespace for %s (PID = %d)", Arrays.toString(wrappedCommand), containerName.asString(), containerPid), e); throw new RuntimeException(e); } } @Override public void resumeNode(ContainerName containerName) { executeCommandInContainer(containerName, nodeProgram, "resume"); } @Override public void restartVespaOnNode(ContainerName containerName) { executeCommandInContainer(containerName, nodeProgram, "restart-vespa"); } @Override public void stopServicesOnNode(ContainerName containerName) { executeCommandInContainer(containerName, nodeProgram, "stop"); } @Override public Optional<Docker.ContainerStats> getContainerStats(ContainerName containerName) { return docker.getContainerStats(containerName); } @Override public List<Container> getAllManagedContainers() { return docker.getAllContainersManagedBy(MANAGER_NAME); } @Override public void deleteUnusedDockerImages() { docker.deleteUnusedDockerImages(); } /** * Returns map of directories to mount and whether they should be writable by everyone */ private static Map<Path, Boolean> getDirectoriesToMount(Environment environment) { final Map<Path, Boolean> directoriesToMount = new HashMap<>(); directoriesToMount.put(Paths.get("/etc/yamas-agent"), true); directoriesToMount.put(Paths.get("/etc/filebeat"), true); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/daemontools_y"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/jdisc_core"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/langdetect/"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/vespa"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yca"), true); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yck"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yell"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ykeykey"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ykeykeyd"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yms_agent"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ysar"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ystatus"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/zpu"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/cache"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/crash"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/db/jdisc"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/db/vespa"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/jdisc_container"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/jdisc_core"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/maven"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/run"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/scoreboards"), true); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/service"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/share"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/spool"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/vespa"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/yca"), true); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/ycore++"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/zookeeper"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("tmp"), false); directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/container-data"), false); if (environment.getNodeType() == NodeType.proxyhost) directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/vespa-hosted/routing"), true); if (environment.getNodeType() == NodeType.host) directoriesToMount.put(Paths.get("/var/lib/sia"), true); return Collections.unmodifiableMap(directoriesToMount); } }
The one option we have is to auto accept them, or do like you did here and stop the algorithm by asking to select/confirm the extension explicitly. I was fine with option 1 but I can be convinced otherwise. If we go with option 2, we need a clearer text to describe what is going on as the general non matching text is confusing.
public AddExtensionResult addExtensions(final Set<String> extensions) throws IOException { if (extensions == null || extensions.isEmpty()) { return new AddExtensionResult(false, true); } boolean updated = false; boolean success = true; List<Dependency> dependenciesFromBom = getDependenciesFromBom(); for (String query : extensions) { List<Extension> registry = MojoUtils.loadExtensions(); SelectionResult result = select(query, registry); if (!result.matches()) { StringBuilder sb = new StringBuilder(); Set<Extension> candidates = result.getExtensions(); if (candidates.isEmpty() && query.contains(":")) { updated = addExtensionAsGAV(query) || updated; } else if (candidates.isEmpty()) { print(NOK + " Cannot find a dependency matching '" + query + "', maybe a typo?"); success = false; } else if (candidates.size() == 1) { sb.append(NOK).append(" One extension matching '").append(query).append("'"); sb.append(System.lineSeparator()).append(" * ").append(candidates.iterator().next().managementKey()); sb.append(System.lineSeparator()).append(" Use the exact name or the full GAV to add the extension"); print(sb.toString()); success = false; } else { sb.append(NOK).append(" Multiple extensions matching '").append(query).append("'"); result.getExtensions() .forEach(extension -> sb.append(System.lineSeparator()).append(" * ") .append(extension.managementKey())); sb.append(System.lineSeparator()).append(" Be more specific e.g using the exact name or the full GAV."); print(sb.toString()); success = false; } } else { final Extension extension = result.getMatch(); updated = addDependency(dependenciesFromBom, extension) || updated; } } if (updated) { ByteArrayOutputStream pomOutputStream = new ByteArrayOutputStream(); MojoUtils.write(model, pomOutputStream); writer.write(pom, pomOutputStream.toString("UTF-8")); } return new AddExtensionResult(updated, success); }
sb.append(NOK).append(" One extension matching '").append(query).append("'");
public AddExtensionResult addExtensions(final Set<String> extensions) throws IOException { if (extensions == null || extensions.isEmpty()) { return new AddExtensionResult(false, true); } boolean updated = false; boolean success = true; List<Dependency> dependenciesFromBom = getDependenciesFromBom(); List<Extension> registry = MojoUtils.loadExtensions(); for (String query : extensions) { if (query.contains(":")) { updated = addExtensionAsGAV(query) || updated; } else { SelectionResult result = select(query, registry, false); if (!result.matches()) { StringBuilder sb = new StringBuilder(); Set<Extension> candidates = result.getExtensions(); if (candidates.isEmpty()) { print(NOK + " Cannot find a dependency matching '" + query + "', maybe a typo?"); success = false; } else { sb.append(NOK).append(" Multiple extensions matching '").append(query).append("'"); result.getExtensions() .forEach(extension -> sb.append(System.lineSeparator()).append(" * ") .append(extension.managementKey())); sb.append(System.lineSeparator()) .append(" Be more specific e.g using the exact name or the full GAV."); print(sb.toString()); success = false; } } else { final Extension extension = result.getMatch(); updated = addDependency(dependenciesFromBom, extension) || updated; } } } if (updated) { ByteArrayOutputStream pomOutputStream = new ByteArrayOutputStream(); MojoUtils.write(model, pomOutputStream); writer.write(pom, pomOutputStream.toString("UTF-8")); } return new AddExtensionResult(updated, success); }
class AddExtensions { private static final String OK = "\u2705"; private static final String NOK = "\u274c"; private static final String NOOP = "\uD83D\uDC4D"; private Model model; private String pom; private ProjectWriter writer; public AddExtensions(final ProjectWriter writer, final String pom) throws IOException { this.model = MojoUtils.readPom(new ByteArrayInputStream(writer.getContent(pom))); this.writer = writer; this.pom = pom; } /** * Selection algorithm. * * @param query the query * @param extensions the extension list * @return the list of matching candidates and whether or not a match has been found. */ static SelectionResult select(String query, List<Extension> extensions) { String q = query.trim().toLowerCase(); Set<Extension> matchesNameOrArtifactId = extensions.stream().filter(extension -> extension.getName().equalsIgnoreCase(q) || matchesArtifactId(extension.getArtifactId(), q)).collect(Collectors.toSet()); if (matchesNameOrArtifactId.size() == 1) { return new SelectionResult(matchesNameOrArtifactId, true); } Set<Extension> matchesShortName = extensions.stream().filter(extension -> matchesShortName(extension, q)) .collect(Collectors.toSet()); if (matchesShortName.size() == 1) { return new SelectionResult(matchesShortName, true); } List<Extension> matchesLabels = extensions.stream() .filter(extension -> extension.labels().contains(q)).collect(Collectors.toList()); Set<Extension> candidates = new LinkedHashSet<>(); candidates.addAll(matchesNameOrArtifactId); candidates.addAll(matchesShortName); candidates.addAll(matchesLabels); return new SelectionResult(candidates, false); } private static boolean matchesShortName(Extension extension, String q) { return q.equalsIgnoreCase(extension.getShortName()); } private static boolean matchesArtifactId(String artifactId, String q) { return (artifactId.equalsIgnoreCase(q) || artifactId.equalsIgnoreCase("quarkus-" + q) || artifactId.equalsIgnoreCase("quarkus-smallrye-" + q)); } private boolean addDependency(List<Dependency> dependenciesFromBom, Extension extension) { if (!MojoUtils.hasDependency(model, extension.getGroupId(), extension.getArtifactId())) { print(OK + " Adding extension " + extension.managementKey()); model.addDependency(extension .toDependency(containsBOM(model) && isDefinedInBom(dependenciesFromBom, extension))); return true; } else { print(NOOP + " Skipping extension " + extension.managementKey() + ": already present"); return false; } } private boolean addExtensionAsGAV(String query) { Dependency parsed = MojoUtils.parse(query.trim().toLowerCase()); print(OK + " Adding dependency " + parsed.getManagementKey()); model.addDependency(parsed); return true; } private void print(String message) { System.out.println(message); } private List<Dependency> getDependenciesFromBom() { try { return readPom(getClass().getResourceAsStream("/quarkus-bom/pom.xml")) .getDependencyManagement() .getDependencies(); } catch (IOException e) { throw new IllegalStateException("Unable to read the BOM file: " + e.getMessage(), e); } } private boolean containsBOM(Model model) { if (model.getDependencyManagement() == null) { return false; } List<Dependency> dependencies = model.getDependencyManagement().getDependencies(); return dependencies.stream() .filter(dependency -> "import".equalsIgnoreCase(dependency.getScope())) .filter(dependency -> "pom".equalsIgnoreCase(dependency.getType())) .anyMatch(dependency -> dependency.getArtifactId().equalsIgnoreCase(getBomArtifactId())); } private boolean isDefinedInBom(List<Dependency> dependencies, Extension extension) { return dependencies.stream().anyMatch(dependency -> dependency.getGroupId().equalsIgnoreCase(extension.getGroupId()) && dependency.getArtifactId().equalsIgnoreCase(extension.getArtifactId())); } }
class AddExtensions { private static final String OK = "\u2705"; private static final String NOK = "\u274c"; private static final String NOOP = "\uD83D\uDC4D"; private Model model; private String pom; private ProjectWriter writer; public AddExtensions(final ProjectWriter writer, final String pom) throws IOException { this.model = MojoUtils.readPom(new ByteArrayInputStream(writer.getContent(pom))); this.writer = writer; this.pom = pom; } /** * Selection algorithm. * * @param query the query * @param extensions the extension list * @param labelLookup whether or not the query must be tested against the labels of the extensions. Should * be {@code false} by default. * @return the list of matching candidates and whether or not a match has been found. */ static SelectionResult select(String query, List<Extension> extensions, boolean labelLookup) { String q = query.trim().toLowerCase(); Set<Extension> matchesNameOrArtifactId = extensions.stream() .filter(extension -> extension.getName().equalsIgnoreCase(q) || matchesArtifactId(extension.getArtifactId(), q)) .collect(Collectors.toSet()); if (matchesNameOrArtifactId.size() == 1) { return new SelectionResult(matchesNameOrArtifactId, true); } Set<Extension> matchesShortName = extensions.stream().filter(extension -> matchesShortName(extension, q)) .collect(Collectors.toSet()); if (matchesShortName.size() == 1 && matchesNameOrArtifactId.isEmpty()) { return new SelectionResult(matchesShortName, true); } Set<Extension> partialMatches = extensions.stream().filter(extension -> extension.getName().toLowerCase().contains(q) || extension.getArtifactId().toLowerCase().contains(q) || extension.getShortName().toLowerCase().contains(q)).collect(Collectors.toSet()); if (partialMatches.size() == 1 && matchesNameOrArtifactId.isEmpty() && matchesShortName.isEmpty()) { return new SelectionResult(partialMatches, true); } List<Extension> matchesLabels; if (labelLookup) { matchesLabels = extensions.stream() .filter(extension -> extension.labels().contains(q)).collect(Collectors.toList()); } else { matchesLabels = new ArrayList<>(); } Set<Extension> candidates = new LinkedHashSet<>(); candidates.addAll(matchesNameOrArtifactId); candidates.addAll(matchesShortName); candidates.addAll(partialMatches); candidates.addAll(matchesLabels); return new SelectionResult(candidates, false); } private static boolean matchesShortName(Extension extension, String q) { return q.equalsIgnoreCase(extension.getShortName()); } private static boolean matchesArtifactId(String artifactId, String q) { return artifactId.equalsIgnoreCase(q) || artifactId.equalsIgnoreCase("quarkus-" + q); } private boolean addDependency(List<Dependency> dependenciesFromBom, Extension extension) { if (!MojoUtils.hasDependency(model, extension.getGroupId(), extension.getArtifactId())) { print(OK + " Adding extension " + extension.managementKey()); model.addDependency(extension .toDependency(containsBOM(model) && isDefinedInBom(dependenciesFromBom, extension))); return true; } else { print(NOOP + " Skipping extension " + extension.managementKey() + ": already present"); return false; } } private boolean addExtensionAsGAV(String query) { Dependency parsed = MojoUtils.parse(query.trim().toLowerCase()); boolean alreadyThere = model.getDependencies().stream() .anyMatch(d -> d.getManagementKey().equalsIgnoreCase(parsed.getManagementKey())); if (!alreadyThere) { print(OK + " Adding dependency " + parsed.getManagementKey()); model.addDependency(parsed); return true; } else { print(NOOP + " Dependency " + parsed.getManagementKey() + " already in the pom.xml file - skipping"); return false; } } private void print(String message) { System.out.println(message); } private List<Dependency> getDependenciesFromBom() { try { return readPom(getClass().getResourceAsStream("/quarkus-bom/pom.xml")) .getDependencyManagement() .getDependencies(); } catch (IOException e) { throw new IllegalStateException("Unable to read the BOM file: " + e.getMessage(), e); } } private boolean containsBOM(Model model) { if (model.getDependencyManagement() == null) { return false; } List<Dependency> dependencies = model.getDependencyManagement().getDependencies(); return dependencies.stream() .filter(dependency -> "import".equalsIgnoreCase(dependency.getScope())) .filter(dependency -> "pom".equalsIgnoreCase(dependency.getType())) .anyMatch(dependency -> dependency.getArtifactId().equalsIgnoreCase(getBomArtifactId())); } private boolean isDefinedInBom(List<Dependency> dependencies, Extension extension) { return dependencies.stream().anyMatch(dependency -> dependency.getGroupId().equalsIgnoreCase(extension.getGroupId()) && dependency.getArtifactId().equalsIgnoreCase(extension.getArtifactId())); } }
Actually we should maybe also add `@Nullable` to `resourceManagerId` and `resourceActions`.
public void suspend() { LOG.info("Suspending the slot manager."); resourceTracker.clear(); if (taskExecutorManager != null) { taskExecutorManager.close(); for (InstanceID registeredTaskManager : taskExecutorManager.getTaskExecutors()) { unregisterTaskManager(registeredTaskManager, new SlotManagerException("The slot manager is being suspended.")); } } taskExecutorManager = null; resourceManagerId = null; resourceActions = null; started = false; }
if (taskExecutorManager != null) {
public void suspend() { if (!started) { return; } LOG.info("Suspending the slot manager."); resourceTracker.clear(); if (taskExecutorManager != null) { taskExecutorManager.close(); for (InstanceID registeredTaskManager : taskExecutorManager.getTaskExecutors()) { unregisterTaskManager(registeredTaskManager, new SlotManagerException("The slot manager is being suspended.")); } } taskExecutorManager = null; resourceManagerId = null; resourceActions = null; started = false; }
class DeclarativeSlotManager implements SlotManager { private static final Logger LOG = LoggerFactory.getLogger(DeclarativeSlotManager.class); private final SlotTracker slotTracker; private final ResourceTracker resourceTracker; private final BiFunction<Executor, ResourceActions, TaskExecutorManager> taskExecutorManagerFactory; private TaskExecutorManager taskExecutorManager; /** Timeout for slot requests to the task manager. */ private final Time taskManagerRequestTimeout; private final SlotMatchingStrategy slotMatchingStrategy; private final SlotManagerMetricGroup slotManagerMetricGroup; private final Map<JobID, String> jobMasterTargetAddresses = new HashMap<>(); private final HashMap<SlotID, CompletableFuture<Acknowledge>> pendingSlotAllocationFutures; /** ResourceManager's id. */ private ResourceManagerId resourceManagerId; /** Executor for future callbacks which have to be "synchronized". */ private Executor mainThreadExecutor; /** Callbacks for resource (de-)allocations. */ private ResourceActions resourceActions; /** True iff the component has been started. */ private boolean started; public DeclarativeSlotManager( ScheduledExecutor scheduledExecutor, SlotManagerConfiguration slotManagerConfiguration, SlotManagerMetricGroup slotManagerMetricGroup, ResourceTracker resourceTracker, SlotTracker slotTracker) { Preconditions.checkNotNull(slotManagerConfiguration); this.taskManagerRequestTimeout = slotManagerConfiguration.getTaskManagerRequestTimeout(); this.slotManagerMetricGroup = Preconditions.checkNotNull(slotManagerMetricGroup); this.resourceTracker = Preconditions.checkNotNull(resourceTracker); pendingSlotAllocationFutures = new HashMap<>(16); this.slotTracker = Preconditions.checkNotNull(slotTracker); slotTracker.registerSlotStatusUpdateListener(createSlotStatusUpdateListener()); slotMatchingStrategy = slotManagerConfiguration.getSlotMatchingStrategy(); taskExecutorManagerFactory = (executor, resourceActions) -> new TaskExecutorManager( slotManagerConfiguration.getDefaultWorkerResourceSpec(), slotManagerConfiguration.getNumSlotsPerWorker(), slotManagerConfiguration.getMaxSlotNum(), slotManagerConfiguration.isWaitResultConsumedBeforeRelease(), slotManagerConfiguration.getRedundantTaskManagerNum(), slotManagerConfiguration.getTaskManagerTimeout(), scheduledExecutor, executor, resourceActions); resourceManagerId = null; resourceActions = null; mainThreadExecutor = null; taskExecutorManager = null; started = false; } private SlotStatusUpdateListener createSlotStatusUpdateListener() { return (taskManagerSlot, previous, current, jobId) -> { if (previous == SlotState.PENDING) { cancelAllocationFuture(taskManagerSlot.getSlotId()); } if (current == SlotState.PENDING) { resourceTracker.notifyAcquiredResource(jobId, taskManagerSlot.getResourceProfile()); } if (current == SlotState.FREE) { resourceTracker.notifyLostResource(jobId, taskManagerSlot.getResourceProfile()); } if (current == SlotState.ALLOCATED) { taskExecutorManager.occupySlot(taskManagerSlot.getInstanceId()); } if (previous == SlotState.ALLOCATED && current == SlotState.FREE) { taskExecutorManager.freeSlot(taskManagerSlot.getInstanceId()); } }; } private void cancelAllocationFuture(SlotID slotId) { final CompletableFuture<Acknowledge> acknowledgeCompletableFuture = pendingSlotAllocationFutures.remove(slotId); if (acknowledgeCompletableFuture != null) { acknowledgeCompletableFuture.cancel(false); } } /** * Starts the slot manager with the given leader id and resource manager actions. * * @param newResourceManagerId to use for communication with the task managers * @param newMainThreadExecutor to use to run code in the ResourceManager's main thread * @param newResourceActions to use for resource (de-)allocations */ @Override public void start(ResourceManagerId newResourceManagerId, Executor newMainThreadExecutor, ResourceActions newResourceActions) { LOG.info("Starting the slot manager."); this.resourceManagerId = Preconditions.checkNotNull(newResourceManagerId); mainThreadExecutor = Preconditions.checkNotNull(newMainThreadExecutor); resourceActions = Preconditions.checkNotNull(newResourceActions); taskExecutorManager = taskExecutorManagerFactory.apply(newMainThreadExecutor, newResourceActions); started = true; registerSlotManagerMetrics(); } private void registerSlotManagerMetrics() { slotManagerMetricGroup.gauge( MetricNames.TASK_SLOTS_AVAILABLE, () -> (long) getNumberFreeSlots()); slotManagerMetricGroup.gauge( MetricNames.TASK_SLOTS_TOTAL, () -> (long) getNumberRegisteredSlots()); } /** * Suspends the component. This clears the internal state of the slot manager. */ @Override /** * Closes the slot manager. * * @throws Exception if the close operation fails */ @Override public void close() throws Exception { LOG.info("Closing the slot manager."); suspend(); slotManagerMetricGroup.close(); } @Override public void processResourceRequirements(ResourceRequirements resourceRequirements) { checkInit(); LOG.debug("Received resource requirements from job {}: {}", resourceRequirements.getJobId(), resourceRequirements.getResourceRequirements()); if (resourceRequirements.getResourceRequirements().isEmpty()) { jobMasterTargetAddresses.remove(resourceRequirements.getJobId()); } else { jobMasterTargetAddresses.put(resourceRequirements.getJobId(), resourceRequirements.getTargetAddress()); } resourceTracker.notifyResourceRequirements(resourceRequirements.getJobId(), resourceRequirements.getResourceRequirements()); checkResourceRequirements(); } /** * Registers a new task manager at the slot manager. This will make the task managers slots * known and, thus, available for allocation. * * @param taskExecutorConnection for the new task manager * @param initialSlotReport for the new task manager * @return True if the task manager has not been registered before and is registered successfully; otherwise false */ @Override public boolean registerTaskManager(final TaskExecutorConnection taskExecutorConnection, SlotReport initialSlotReport) { checkInit(); LOG.debug("Registering task executor {} under {} at the slot manager.", taskExecutorConnection.getResourceID(), taskExecutorConnection.getInstanceID()); if (taskExecutorManager.isTaskManagerRegistered(taskExecutorConnection.getInstanceID())) { LOG.debug("Task executor {} was already registered.", taskExecutorConnection.getResourceID()); reportSlotStatus(taskExecutorConnection.getInstanceID(), initialSlotReport); return false; } else { if (!taskExecutorManager.registerTaskManager(taskExecutorConnection, initialSlotReport)) { LOG.debug("Task executor {} could not be registered.", taskExecutorConnection.getResourceID()); return false; } for (SlotStatus slotStatus : initialSlotReport) { slotTracker.addSlot( slotStatus.getSlotID(), slotStatus.getResourceProfile(), taskExecutorConnection, slotStatus.getJobID()); } checkResourceRequirements(); return true; } } @Override public boolean unregisterTaskManager(InstanceID instanceId, Exception cause) { checkInit(); LOG.debug("Unregistering task executor {} from the slot manager.", instanceId); if (taskExecutorManager.isTaskManagerRegistered(instanceId)) { slotTracker.removeSlots(taskExecutorManager.getSlotsOf(instanceId)); taskExecutorManager.unregisterTaskExecutor(instanceId); checkResourceRequirements(); return true; } else { LOG.debug("There is no task executor registered with instance ID {}. Ignoring this message.", instanceId); return false; } } /** * Reports the current slot allocations for a task manager identified by the given instance id. * * @param instanceId identifying the task manager for which to report the slot status * @param slotReport containing the status for all of its slots * @return true if the slot status has been updated successfully, otherwise false */ @Override public boolean reportSlotStatus(InstanceID instanceId, SlotReport slotReport) { checkInit(); LOG.debug("Received slot report from instance {}: {}.", instanceId, slotReport); if (taskExecutorManager.isTaskManagerRegistered(instanceId)) { slotTracker.notifySlotStatus(slotReport); checkResourceRequirements(); return true; } else { LOG.debug("Received slot report for unknown task manager with instance id {}. Ignoring this report.", instanceId); return false; } } /** * Free the given slot from the given allocation. If the slot is still allocated by the given * allocation id, then the slot will be marked as free and will be subject to new slot requests. * * @param slotId identifying the slot to free * @param allocationId with which the slot is presumably allocated */ @Override public void freeSlot(SlotID slotId, AllocationID allocationId) { checkInit(); LOG.debug("Freeing slot {}.", slotId); slotTracker.notifyFree(slotId); checkResourceRequirements(); } /** * Matches resource requirements against available resources. In a first round requirements are matched against free * slot, and any match results in a slot allocation. * The remaining unfulfilled requirements are matched against pending slots, allocating more workers if no matching * pending slot could be found. * If the requirements for a job could not be fulfilled then a notification is sent to the job master informing it * as such. * * <p>Performance notes: At it's core this method loops, for each job, over all free/pending slots for each required slot, trying to * find a matching slot. One should generally go in with the assumption that this runs in * numberOfJobsRequiringResources * numberOfRequiredSlots * numberOfFreeOrPendingSlots. * This is especially important when dealing with pending slots, as matches between requirements and pending slots * are not persisted and recomputed on each call. * This may required further refinements in the future; e.g., persisting the matches between requirements and pending slots, * or not matching against pending slots at all. * * <p>When dealing with unspecific resource profiles (i.e., {@link ResourceProfile * then the number of free/pending slots is not relevant because we only need exactly 1 comparison to determine whether * a slot can be fulfilled or not, since they are all the same anyway. * * <p>When dealing with specific resource profiles things can be a lot worse, with the classical cases * where either no matches are found, or only at the very end of the iteration. * In the absolute worst case, with J jobs, requiring R slots each with a unique resource profile such each pair * of these profiles is not matching, and S free/pending slots that don't fulfill any requirement, then this method * does a total of J*R*S resource profile comparisons. */ private void checkResourceRequirements() { final Map<JobID, Collection<ResourceRequirement>> missingResources = resourceTracker.getMissingResources(); if (missingResources.isEmpty()) { return; } final Map<JobID, ResourceCounter> unfulfilledRequirements = new LinkedHashMap<>(); for (Map.Entry<JobID, Collection<ResourceRequirement>> resourceRequirements : missingResources.entrySet()) { final JobID jobId = resourceRequirements.getKey(); final ResourceCounter unfulfilledJobRequirements = tryAllocateSlotsForJob(jobId, resourceRequirements.getValue()); if (!unfulfilledJobRequirements.isEmpty()) { unfulfilledRequirements.put(jobId, unfulfilledJobRequirements); } } if (unfulfilledRequirements.isEmpty()) { return; } final ResourceCounter pendingSlots = new ResourceCounter(taskExecutorManager.getPendingTaskManagerSlots().stream().collect( Collectors.groupingBy( PendingTaskManagerSlot::getResourceProfile, Collectors.summingInt(x -> 1)))); for (Map.Entry<JobID, ResourceCounter> unfulfilledRequirement : unfulfilledRequirements.entrySet()) { tryFulfillRequirementsWithPendingSlots( unfulfilledRequirement.getKey(), unfulfilledRequirement.getValue().getResourceProfilesWithCount(), pendingSlots); } } private ResourceCounter tryAllocateSlotsForJob(JobID jobId, Collection<ResourceRequirement> missingResources) { final ResourceCounter outstandingRequirements = new ResourceCounter(); for (ResourceRequirement resourceRequirement : missingResources) { int numMissingSlots = internalTryAllocateSlots(jobId, jobMasterTargetAddresses.get(jobId), resourceRequirement); if (numMissingSlots > 0) { outstandingRequirements.incrementCount(resourceRequirement.getResourceProfile(), numMissingSlots); } } return outstandingRequirements; } /** * Tries to allocate slots for the given requirement. If there are not enough slots available, the * resource manager is informed to allocate more resources. * * @param jobId job to allocate slots for * @param targetAddress address of the jobmaster * @param resourceRequirement required slots * @return the number of missing slots */ private int internalTryAllocateSlots(JobID jobId, String targetAddress, ResourceRequirement resourceRequirement) { final ResourceProfile requiredResource = resourceRequirement.getResourceProfile(); Collection<TaskManagerSlotInformation> freeSlots = slotTracker.getFreeSlots(); int numUnfulfilled = 0; for (int x = 0; x < resourceRequirement.getNumberOfRequiredSlots(); x++) { final Optional<TaskManagerSlotInformation> reservedSlot = slotMatchingStrategy.findMatchingSlot(requiredResource, freeSlots, this::getNumberRegisteredSlotsOf); if (reservedSlot.isPresent()) { allocateSlot(reservedSlot.get(), jobId, targetAddress, requiredResource); } else { int numRemaining = resourceRequirement.getNumberOfRequiredSlots() - x; numUnfulfilled += numRemaining; break; } } return numUnfulfilled; } /** * Allocates the given slot. This entails sending a registration message to the task manager and treating failures. * * @param taskManagerSlot slot to allocate * @param jobId job for which the slot should be allocated for * @param targetAddress address of the job master * @param resourceProfile resource profile for the requirement for which the slot is used */ private void allocateSlot(TaskManagerSlotInformation taskManagerSlot, JobID jobId, String targetAddress, ResourceProfile resourceProfile) { final SlotID slotId = taskManagerSlot.getSlotId(); LOG.debug("Starting allocation of slot {} for job {} with resource profile {}.", slotId, jobId, resourceProfile); final InstanceID instanceId = taskManagerSlot.getInstanceId(); if (!taskExecutorManager.isTaskManagerRegistered(instanceId)) { throw new IllegalStateException("Could not find a registered task manager for instance id " + instanceId + '.'); } final TaskExecutorConnection taskExecutorConnection = taskManagerSlot.getTaskManagerConnection(); final TaskExecutorGateway gateway = taskExecutorConnection.getTaskExecutorGateway(); final CompletableFuture<Acknowledge> completableFuture = new CompletableFuture<>(); slotTracker.notifyAllocationStart(slotId, jobId); taskExecutorManager.markUsed(instanceId); pendingSlotAllocationFutures.put(slotId, completableFuture); CompletableFuture<Acknowledge> requestFuture = gateway.requestSlot( slotId, jobId, new AllocationID(), resourceProfile, targetAddress, resourceManagerId, taskManagerRequestTimeout); requestFuture.whenComplete( (Acknowledge acknowledge, Throwable throwable) -> { if (acknowledge != null) { completableFuture.complete(acknowledge); } else { completableFuture.completeExceptionally(throwable); } }); CompletableFuture<Void> slotAllocationResponseProcessingFuture = completableFuture.handleAsync( (Acknowledge acknowledge, Throwable throwable) -> { if (acknowledge != null) { LOG.trace("Completed allocation of slot {} for job {}.", slotId, jobId); slotTracker.notifyAllocationComplete(slotId, jobId); } else { if (throwable instanceof SlotOccupiedException) { SlotOccupiedException exception = (SlotOccupiedException) throwable; LOG.debug("Tried allocating slot {} for job {}, but it was already allocated for job {}.", slotId, jobId, exception.getJobId()); slotTracker.notifySlotStatus(Collections.singleton(new SlotStatus(slotId, taskManagerSlot.getResourceProfile(), exception.getJobId(), exception.getAllocationId()))); } else { if (throwable instanceof CancellationException) { LOG.debug("Cancelled allocation of slot {} for job {}.", slotId, jobId, throwable); } else { LOG.warn("Slot allocation for slot {} for job {} failed.", slotId, jobId, throwable); slotTracker.notifyFree(slotId); } } checkResourceRequirements(); } return null; }, mainThreadExecutor); FutureUtils.assertNoException(slotAllocationResponseProcessingFuture); } private void tryFulfillRequirementsWithPendingSlots(JobID jobId, Map<ResourceProfile, Integer> missingResources, ResourceCounter pendingSlots) { for (Map.Entry<ResourceProfile, Integer> missingResource : missingResources.entrySet()) { ResourceProfile profile = missingResource.getKey(); for (int i = 0; i < missingResource.getValue(); i++) { if (!tryFulfillWithPendingSlots(profile, pendingSlots)) { boolean couldAllocateWorkerAndReserveSlot = tryAllocateWorkerAndReserveSlot(profile, pendingSlots); if (!couldAllocateWorkerAndReserveSlot) { LOG.warn("Could not fulfill resource requirements of job {}.", jobId); resourceActions.notifyNotEnoughResourcesAvailable(jobId, resourceTracker.getAcquiredResources(jobId)); return; } } } } } private boolean tryFulfillWithPendingSlots(ResourceProfile resourceProfile, ResourceCounter pendingSlots) { Set<ResourceProfile> pendingSlotProfiles = pendingSlots.getResourceProfiles(); if (pendingSlotProfiles.contains(resourceProfile)) { pendingSlots.decrementCount(resourceProfile, 1); return true; } for (ResourceProfile pendingSlotProfile : pendingSlotProfiles) { if (pendingSlotProfile.isMatching(resourceProfile)) { pendingSlots.decrementCount(pendingSlotProfile, 1); return true; } } return false; } private boolean tryAllocateWorkerAndReserveSlot(ResourceProfile profile, ResourceCounter pendingSlots) { Optional<ResourceRequirement> newlyFulfillableRequirements = taskExecutorManager.allocateWorker(profile); if (newlyFulfillableRequirements.isPresent()) { ResourceRequirement newSlots = newlyFulfillableRequirements.get(); if (newSlots.getNumberOfRequiredSlots() > 1) { pendingSlots.incrementCount(newSlots.getResourceProfile(), newSlots.getNumberOfRequiredSlots() - 1); } return true; } else { return false; } } @Override public int getNumberRegisteredSlots() { return taskExecutorManager.getNumberRegisteredSlots(); } @Override public int getNumberRegisteredSlotsOf(InstanceID instanceId) { return taskExecutorManager.getNumberRegisteredSlotsOf(instanceId); } @Override public int getNumberFreeSlots() { return taskExecutorManager.getNumberFreeSlots(); } @Override public int getNumberFreeSlotsOf(InstanceID instanceId) { return taskExecutorManager.getNumberFreeSlotsOf(instanceId); } @Override public Map<WorkerResourceSpec, Integer> getRequiredResources() { return taskExecutorManager.getRequiredWorkers(); } @Override public ResourceProfile getRegisteredResource() { return taskExecutorManager.getTotalRegisteredResources(); } @Override public ResourceProfile getRegisteredResourceOf(InstanceID instanceID) { return taskExecutorManager.getTotalRegisteredResourcesOf(instanceID); } @Override public ResourceProfile getFreeResource() { return taskExecutorManager.getTotalFreeResources(); } @Override public ResourceProfile getFreeResourceOf(InstanceID instanceID) { return taskExecutorManager.getTotalFreeResourcesOf(instanceID); } @Override public void setFailUnfulfillableRequest(boolean failUnfulfillableRequest) { } @Override public int getNumberPendingSlotRequests() { throw new UnsupportedOperationException(); } private void checkInit() { Preconditions.checkState(started, "The slot manager has not been started."); } }
class DeclarativeSlotManager implements SlotManager { private static final Logger LOG = LoggerFactory.getLogger(DeclarativeSlotManager.class); private final SlotTracker slotTracker; private final ResourceTracker resourceTracker; private final BiFunction<Executor, ResourceActions, TaskExecutorManager> taskExecutorManagerFactory; @Nullable private TaskExecutorManager taskExecutorManager; /** Timeout for slot requests to the task manager. */ private final Time taskManagerRequestTimeout; private final SlotMatchingStrategy slotMatchingStrategy; private final SlotManagerMetricGroup slotManagerMetricGroup; private final Map<JobID, String> jobMasterTargetAddresses = new HashMap<>(); private final HashMap<SlotID, CompletableFuture<Acknowledge>> pendingSlotAllocationFutures; /** ResourceManager's id. */ @Nullable private ResourceManagerId resourceManagerId; /** Executor for future callbacks which have to be "synchronized". */ @Nullable private Executor mainThreadExecutor; /** Callbacks for resource (de-)allocations. */ @Nullable private ResourceActions resourceActions; /** True iff the component has been started. */ private boolean started; public DeclarativeSlotManager( ScheduledExecutor scheduledExecutor, SlotManagerConfiguration slotManagerConfiguration, SlotManagerMetricGroup slotManagerMetricGroup, ResourceTracker resourceTracker, SlotTracker slotTracker) { Preconditions.checkNotNull(slotManagerConfiguration); this.taskManagerRequestTimeout = slotManagerConfiguration.getTaskManagerRequestTimeout(); this.slotManagerMetricGroup = Preconditions.checkNotNull(slotManagerMetricGroup); this.resourceTracker = Preconditions.checkNotNull(resourceTracker); pendingSlotAllocationFutures = new HashMap<>(16); this.slotTracker = Preconditions.checkNotNull(slotTracker); slotTracker.registerSlotStatusUpdateListener(createSlotStatusUpdateListener()); slotMatchingStrategy = slotManagerConfiguration.getSlotMatchingStrategy(); taskExecutorManagerFactory = (executor, resourceActions) -> new TaskExecutorManager( slotManagerConfiguration.getDefaultWorkerResourceSpec(), slotManagerConfiguration.getNumSlotsPerWorker(), slotManagerConfiguration.getMaxSlotNum(), slotManagerConfiguration.isWaitResultConsumedBeforeRelease(), slotManagerConfiguration.getRedundantTaskManagerNum(), slotManagerConfiguration.getTaskManagerTimeout(), scheduledExecutor, executor, resourceActions); resourceManagerId = null; resourceActions = null; mainThreadExecutor = null; taskExecutorManager = null; started = false; } private SlotStatusUpdateListener createSlotStatusUpdateListener() { return (taskManagerSlot, previous, current, jobId) -> { if (previous == SlotState.PENDING) { cancelAllocationFuture(taskManagerSlot.getSlotId()); } if (current == SlotState.PENDING) { resourceTracker.notifyAcquiredResource(jobId, taskManagerSlot.getResourceProfile()); } if (current == SlotState.FREE) { resourceTracker.notifyLostResource(jobId, taskManagerSlot.getResourceProfile()); } if (current == SlotState.ALLOCATED) { taskExecutorManager.occupySlot(taskManagerSlot.getInstanceId()); } if (previous == SlotState.ALLOCATED && current == SlotState.FREE) { taskExecutorManager.freeSlot(taskManagerSlot.getInstanceId()); } }; } private void cancelAllocationFuture(SlotID slotId) { final CompletableFuture<Acknowledge> acknowledgeCompletableFuture = pendingSlotAllocationFutures.remove(slotId); if (acknowledgeCompletableFuture != null) { acknowledgeCompletableFuture.cancel(false); } } /** * Starts the slot manager with the given leader id and resource manager actions. * * @param newResourceManagerId to use for communication with the task managers * @param newMainThreadExecutor to use to run code in the ResourceManager's main thread * @param newResourceActions to use for resource (de-)allocations */ @Override public void start(ResourceManagerId newResourceManagerId, Executor newMainThreadExecutor, ResourceActions newResourceActions) { LOG.info("Starting the slot manager."); this.resourceManagerId = Preconditions.checkNotNull(newResourceManagerId); mainThreadExecutor = Preconditions.checkNotNull(newMainThreadExecutor); resourceActions = Preconditions.checkNotNull(newResourceActions); taskExecutorManager = taskExecutorManagerFactory.apply(newMainThreadExecutor, newResourceActions); started = true; registerSlotManagerMetrics(); } private void registerSlotManagerMetrics() { slotManagerMetricGroup.gauge( MetricNames.TASK_SLOTS_AVAILABLE, () -> (long) getNumberFreeSlots()); slotManagerMetricGroup.gauge( MetricNames.TASK_SLOTS_TOTAL, () -> (long) getNumberRegisteredSlots()); } /** * Suspends the component. This clears the internal state of the slot manager. */ @Override /** * Closes the slot manager. * * @throws Exception if the close operation fails */ @Override public void close() throws Exception { LOG.info("Closing the slot manager."); suspend(); slotManagerMetricGroup.close(); } @Override public void processResourceRequirements(ResourceRequirements resourceRequirements) { checkInit(); LOG.debug("Received resource requirements from job {}: {}", resourceRequirements.getJobId(), resourceRequirements.getResourceRequirements()); if (resourceRequirements.getResourceRequirements().isEmpty()) { jobMasterTargetAddresses.remove(resourceRequirements.getJobId()); } else { jobMasterTargetAddresses.put(resourceRequirements.getJobId(), resourceRequirements.getTargetAddress()); } resourceTracker.notifyResourceRequirements(resourceRequirements.getJobId(), resourceRequirements.getResourceRequirements()); checkResourceRequirements(); } /** * Registers a new task manager at the slot manager. This will make the task managers slots * known and, thus, available for allocation. * * @param taskExecutorConnection for the new task manager * @param initialSlotReport for the new task manager * @return True if the task manager has not been registered before and is registered successfully; otherwise false */ @Override public boolean registerTaskManager(final TaskExecutorConnection taskExecutorConnection, SlotReport initialSlotReport) { checkInit(); LOG.debug("Registering task executor {} under {} at the slot manager.", taskExecutorConnection.getResourceID(), taskExecutorConnection.getInstanceID()); if (taskExecutorManager.isTaskManagerRegistered(taskExecutorConnection.getInstanceID())) { LOG.debug("Task executor {} was already registered.", taskExecutorConnection.getResourceID()); reportSlotStatus(taskExecutorConnection.getInstanceID(), initialSlotReport); return false; } else { if (!taskExecutorManager.registerTaskManager(taskExecutorConnection, initialSlotReport)) { LOG.debug("Task executor {} could not be registered.", taskExecutorConnection.getResourceID()); return false; } for (SlotStatus slotStatus : initialSlotReport) { slotTracker.addSlot( slotStatus.getSlotID(), slotStatus.getResourceProfile(), taskExecutorConnection, slotStatus.getJobID()); } checkResourceRequirements(); return true; } } @Override public boolean unregisterTaskManager(InstanceID instanceId, Exception cause) { checkInit(); LOG.debug("Unregistering task executor {} from the slot manager.", instanceId); if (taskExecutorManager.isTaskManagerRegistered(instanceId)) { slotTracker.removeSlots(taskExecutorManager.getSlotsOf(instanceId)); taskExecutorManager.unregisterTaskExecutor(instanceId); checkResourceRequirements(); return true; } else { LOG.debug("There is no task executor registered with instance ID {}. Ignoring this message.", instanceId); return false; } } /** * Reports the current slot allocations for a task manager identified by the given instance id. * * @param instanceId identifying the task manager for which to report the slot status * @param slotReport containing the status for all of its slots * @return true if the slot status has been updated successfully, otherwise false */ @Override public boolean reportSlotStatus(InstanceID instanceId, SlotReport slotReport) { checkInit(); LOG.debug("Received slot report from instance {}: {}.", instanceId, slotReport); if (taskExecutorManager.isTaskManagerRegistered(instanceId)) { slotTracker.notifySlotStatus(slotReport); checkResourceRequirements(); return true; } else { LOG.debug("Received slot report for unknown task manager with instance id {}. Ignoring this report.", instanceId); return false; } } /** * Free the given slot from the given allocation. If the slot is still allocated by the given * allocation id, then the slot will be marked as free and will be subject to new slot requests. * * @param slotId identifying the slot to free * @param allocationId with which the slot is presumably allocated */ @Override public void freeSlot(SlotID slotId, AllocationID allocationId) { checkInit(); LOG.debug("Freeing slot {}.", slotId); slotTracker.notifyFree(slotId); checkResourceRequirements(); } /** * Matches resource requirements against available resources. In a first round requirements are matched against free * slot, and any match results in a slot allocation. * The remaining unfulfilled requirements are matched against pending slots, allocating more workers if no matching * pending slot could be found. * If the requirements for a job could not be fulfilled then a notification is sent to the job master informing it * as such. * * <p>Performance notes: At it's core this method loops, for each job, over all free/pending slots for each required slot, trying to * find a matching slot. One should generally go in with the assumption that this runs in * numberOfJobsRequiringResources * numberOfRequiredSlots * numberOfFreeOrPendingSlots. * This is especially important when dealing with pending slots, as matches between requirements and pending slots * are not persisted and recomputed on each call. * This may required further refinements in the future; e.g., persisting the matches between requirements and pending slots, * or not matching against pending slots at all. * * <p>When dealing with unspecific resource profiles (i.e., {@link ResourceProfile * then the number of free/pending slots is not relevant because we only need exactly 1 comparison to determine whether * a slot can be fulfilled or not, since they are all the same anyway. * * <p>When dealing with specific resource profiles things can be a lot worse, with the classical cases * where either no matches are found, or only at the very end of the iteration. * In the absolute worst case, with J jobs, requiring R slots each with a unique resource profile such each pair * of these profiles is not matching, and S free/pending slots that don't fulfill any requirement, then this method * does a total of J*R*S resource profile comparisons. */ private void checkResourceRequirements() { final Map<JobID, Collection<ResourceRequirement>> missingResources = resourceTracker.getMissingResources(); if (missingResources.isEmpty()) { return; } final Map<JobID, ResourceCounter> unfulfilledRequirements = new LinkedHashMap<>(); for (Map.Entry<JobID, Collection<ResourceRequirement>> resourceRequirements : missingResources.entrySet()) { final JobID jobId = resourceRequirements.getKey(); final ResourceCounter unfulfilledJobRequirements = tryAllocateSlotsForJob(jobId, resourceRequirements.getValue()); if (!unfulfilledJobRequirements.isEmpty()) { unfulfilledRequirements.put(jobId, unfulfilledJobRequirements); } } if (unfulfilledRequirements.isEmpty()) { return; } final ResourceCounter pendingSlots = new ResourceCounter(taskExecutorManager.getPendingTaskManagerSlots().stream().collect( Collectors.groupingBy( PendingTaskManagerSlot::getResourceProfile, Collectors.summingInt(x -> 1)))); for (Map.Entry<JobID, ResourceCounter> unfulfilledRequirement : unfulfilledRequirements.entrySet()) { tryFulfillRequirementsWithPendingSlots( unfulfilledRequirement.getKey(), unfulfilledRequirement.getValue().getResourceProfilesWithCount(), pendingSlots); } } private ResourceCounter tryAllocateSlotsForJob(JobID jobId, Collection<ResourceRequirement> missingResources) { final ResourceCounter outstandingRequirements = new ResourceCounter(); for (ResourceRequirement resourceRequirement : missingResources) { int numMissingSlots = internalTryAllocateSlots(jobId, jobMasterTargetAddresses.get(jobId), resourceRequirement); if (numMissingSlots > 0) { outstandingRequirements.incrementCount(resourceRequirement.getResourceProfile(), numMissingSlots); } } return outstandingRequirements; } /** * Tries to allocate slots for the given requirement. If there are not enough slots available, the * resource manager is informed to allocate more resources. * * @param jobId job to allocate slots for * @param targetAddress address of the jobmaster * @param resourceRequirement required slots * @return the number of missing slots */ private int internalTryAllocateSlots(JobID jobId, String targetAddress, ResourceRequirement resourceRequirement) { final ResourceProfile requiredResource = resourceRequirement.getResourceProfile(); Collection<TaskManagerSlotInformation> freeSlots = slotTracker.getFreeSlots(); int numUnfulfilled = 0; for (int x = 0; x < resourceRequirement.getNumberOfRequiredSlots(); x++) { final Optional<TaskManagerSlotInformation> reservedSlot = slotMatchingStrategy.findMatchingSlot(requiredResource, freeSlots, this::getNumberRegisteredSlotsOf); if (reservedSlot.isPresent()) { allocateSlot(reservedSlot.get(), jobId, targetAddress, requiredResource); } else { int numRemaining = resourceRequirement.getNumberOfRequiredSlots() - x; numUnfulfilled += numRemaining; break; } } return numUnfulfilled; } /** * Allocates the given slot. This entails sending a registration message to the task manager and treating failures. * * @param taskManagerSlot slot to allocate * @param jobId job for which the slot should be allocated for * @param targetAddress address of the job master * @param resourceProfile resource profile for the requirement for which the slot is used */ private void allocateSlot(TaskManagerSlotInformation taskManagerSlot, JobID jobId, String targetAddress, ResourceProfile resourceProfile) { final SlotID slotId = taskManagerSlot.getSlotId(); LOG.debug("Starting allocation of slot {} for job {} with resource profile {}.", slotId, jobId, resourceProfile); final InstanceID instanceId = taskManagerSlot.getInstanceId(); if (!taskExecutorManager.isTaskManagerRegistered(instanceId)) { throw new IllegalStateException("Could not find a registered task manager for instance id " + instanceId + '.'); } final TaskExecutorConnection taskExecutorConnection = taskManagerSlot.getTaskManagerConnection(); final TaskExecutorGateway gateway = taskExecutorConnection.getTaskExecutorGateway(); final CompletableFuture<Acknowledge> completableFuture = new CompletableFuture<>(); slotTracker.notifyAllocationStart(slotId, jobId); taskExecutorManager.markUsed(instanceId); pendingSlotAllocationFutures.put(slotId, completableFuture); CompletableFuture<Acknowledge> requestFuture = gateway.requestSlot( slotId, jobId, new AllocationID(), resourceProfile, targetAddress, resourceManagerId, taskManagerRequestTimeout); requestFuture.whenComplete( (Acknowledge acknowledge, Throwable throwable) -> { if (acknowledge != null) { completableFuture.complete(acknowledge); } else { completableFuture.completeExceptionally(throwable); } }); CompletableFuture<Void> slotAllocationResponseProcessingFuture = completableFuture.handleAsync( (Acknowledge acknowledge, Throwable throwable) -> { if (acknowledge != null) { LOG.trace("Completed allocation of slot {} for job {}.", slotId, jobId); slotTracker.notifyAllocationComplete(slotId, jobId); } else { if (throwable instanceof SlotOccupiedException) { SlotOccupiedException exception = (SlotOccupiedException) throwable; LOG.debug("Tried allocating slot {} for job {}, but it was already allocated for job {}.", slotId, jobId, exception.getJobId()); slotTracker.notifySlotStatus(Collections.singleton(new SlotStatus(slotId, taskManagerSlot.getResourceProfile(), exception.getJobId(), exception.getAllocationId()))); } else { if (throwable instanceof CancellationException) { LOG.debug("Cancelled allocation of slot {} for job {}.", slotId, jobId, throwable); } else { LOG.warn("Slot allocation for slot {} for job {} failed.", slotId, jobId, throwable); slotTracker.notifyFree(slotId); } } checkResourceRequirements(); } return null; }, mainThreadExecutor); FutureUtils.assertNoException(slotAllocationResponseProcessingFuture); } private void tryFulfillRequirementsWithPendingSlots(JobID jobId, Map<ResourceProfile, Integer> missingResources, ResourceCounter pendingSlots) { for (Map.Entry<ResourceProfile, Integer> missingResource : missingResources.entrySet()) { ResourceProfile profile = missingResource.getKey(); for (int i = 0; i < missingResource.getValue(); i++) { if (!tryFulfillWithPendingSlots(profile, pendingSlots)) { boolean couldAllocateWorkerAndReserveSlot = tryAllocateWorkerAndReserveSlot(profile, pendingSlots); if (!couldAllocateWorkerAndReserveSlot) { LOG.warn("Could not fulfill resource requirements of job {}.", jobId); resourceActions.notifyNotEnoughResourcesAvailable(jobId, resourceTracker.getAcquiredResources(jobId)); return; } } } } } private boolean tryFulfillWithPendingSlots(ResourceProfile resourceProfile, ResourceCounter pendingSlots) { Set<ResourceProfile> pendingSlotProfiles = pendingSlots.getResourceProfiles(); if (pendingSlotProfiles.contains(resourceProfile)) { pendingSlots.decrementCount(resourceProfile, 1); return true; } for (ResourceProfile pendingSlotProfile : pendingSlotProfiles) { if (pendingSlotProfile.isMatching(resourceProfile)) { pendingSlots.decrementCount(pendingSlotProfile, 1); return true; } } return false; } private boolean tryAllocateWorkerAndReserveSlot(ResourceProfile profile, ResourceCounter pendingSlots) { Optional<ResourceRequirement> newlyFulfillableRequirements = taskExecutorManager.allocateWorker(profile); if (newlyFulfillableRequirements.isPresent()) { ResourceRequirement newSlots = newlyFulfillableRequirements.get(); if (newSlots.getNumberOfRequiredSlots() > 1) { pendingSlots.incrementCount(newSlots.getResourceProfile(), newSlots.getNumberOfRequiredSlots() - 1); } return true; } else { return false; } } @Override public int getNumberRegisteredSlots() { return taskExecutorManager.getNumberRegisteredSlots(); } @Override public int getNumberRegisteredSlotsOf(InstanceID instanceId) { return taskExecutorManager.getNumberRegisteredSlotsOf(instanceId); } @Override public int getNumberFreeSlots() { return taskExecutorManager.getNumberFreeSlots(); } @Override public int getNumberFreeSlotsOf(InstanceID instanceId) { return taskExecutorManager.getNumberFreeSlotsOf(instanceId); } @Override public Map<WorkerResourceSpec, Integer> getRequiredResources() { return taskExecutorManager.getRequiredWorkers(); } @Override public ResourceProfile getRegisteredResource() { return taskExecutorManager.getTotalRegisteredResources(); } @Override public ResourceProfile getRegisteredResourceOf(InstanceID instanceID) { return taskExecutorManager.getTotalRegisteredResourcesOf(instanceID); } @Override public ResourceProfile getFreeResource() { return taskExecutorManager.getTotalFreeResources(); } @Override public ResourceProfile getFreeResourceOf(InstanceID instanceID) { return taskExecutorManager.getTotalFreeResourcesOf(instanceID); } @Override public void setFailUnfulfillableRequest(boolean failUnfulfillableRequest) { } @Override public int getNumberPendingSlotRequests() { throw new UnsupportedOperationException(); } private void checkInit() { Preconditions.checkState(started, "The slot manager has not been started."); } }
Let's switch these conditions so that we can avoid iterating the modules every time.
public void execute() { if (this.helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(BUILD_COMMAND); this.errStream.println(commandUsageInfo); return; } Project project; if (FileUtils.hasExtension(this.projectPath)) { if (coverage != null && coverage) { this.outStream.println("Code coverage is not yet supported with single bal files. Ignoring the flag " + "and continuing the test run...\n"); } coverage = false; } BuildOptions buildOptions = constructBuildOptions(); boolean isSingleFileBuild = false; if (FileUtils.hasExtension(this.projectPath)) { if (this.compile) { CommandUtil.printError(this.errStream, "'-c' or '--compile' can only be used with a Ballerina package.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } try { project = SingleFileProject.load(this.projectPath, buildOptions); } catch (ProjectException e) { CommandUtil.printError(this.errStream, e.getMessage(), null, false); CommandUtil.exitError(this.exitWhenFinish); return; } isSingleFileBuild = true; } else { if (null != this.output) { CommandUtil.printError(this.errStream, "'-o' and '--output' are only supported when building a single Ballerina " + "file.", "bal build -o <output-file> <ballerina-file> ", true); CommandUtil.exitError(this.exitWhenFinish); return; } try { project = BuildProject.load(this.projectPath, buildOptions); } catch (ProjectException e) { CommandUtil.printError(this.errStream, e.getMessage(), null, false); CommandUtil.exitError(this.exitWhenFinish); return; } } if (isProjectEmpty(project)) { if (!(this.compile && project.currentPackage().compilerPluginToml().isPresent())) { CommandUtil.printError(this.errStream, "package is empty. please add at least one .bal file", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } } if (this.compile && project.currentPackage().ballerinaToml().get().tomlDocument().toml() .getTable("package").isEmpty()) { CommandUtil.printError(this.errStream, "'package' information not found in " + ProjectConstants.BALLERINA_TOML, null, true); CommandUtil.exitError(this.exitWhenFinish); return; } if (!project.buildOptions().skipTests() && this.debugPort != null) { System.setProperty(SYSTEM_PROP_BAL_DEBUG, this.debugPort); } if (project.buildOptions().codeCoverage()) { if (coverageFormat != null) { if (!coverageFormat.equals(JACOCO_XML_FORMAT)) { String errMsg = "unsupported coverage report format '" + coverageFormat + "' found. Only '" + JACOCO_XML_FORMAT + "' format is supported."; CommandUtil.printError(this.errStream, errMsg, null, false); CommandUtil.exitError(this.exitWhenFinish); return; } } } else { if (includes != null) { this.outStream.println("warning: ignoring --includes flag since code coverage is not enabled"); } if (coverageFormat != null) { this.outStream.println("warning: ignoring --coverage-format flag since code coverage is not " + "enabled"); } } try { readSettings(); } catch (SettingsTomlException e) { this.outStream.println("warning: " + e.getMessage()); } TaskExecutor taskExecutor = new TaskExecutor.TaskBuilder() .addTask(new CleanTargetDirTask(), isSingleFileBuild) .addTask(new ResolveMavenDependenciesTask(outStream)) .addTask(new CompileTask(outStream, errStream)) .addTask(new RunTestsTask(outStream, errStream, includes, coverageFormat), project.buildOptions().skipTests() || isSingleFileBuild) .addTask(new CreateBalaTask(outStream), isSingleFileBuild || !this.compile) .addTask(new CreateExecutableTask(outStream, this.output), this.compile) .build(); taskExecutor.executeTasks(project); if (this.exitWhenFinish) { Runtime.getRuntime().exit(0); } }
if (!(this.compile && project.currentPackage().compilerPluginToml().isPresent())) {
public void execute() { if (this.helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(BUILD_COMMAND); this.errStream.println(commandUsageInfo); return; } Project project; if (FileUtils.hasExtension(this.projectPath)) { if (coverage != null && coverage) { this.outStream.println("Code coverage is not yet supported with single bal files. Ignoring the flag " + "and continuing the test run...\n"); } coverage = false; } BuildOptions buildOptions = constructBuildOptions(); boolean isSingleFileBuild = false; if (FileUtils.hasExtension(this.projectPath)) { if (this.compile) { CommandUtil.printError(this.errStream, "'-c' or '--compile' can only be used with a Ballerina package.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } try { project = SingleFileProject.load(this.projectPath, buildOptions); } catch (ProjectException e) { CommandUtil.printError(this.errStream, e.getMessage(), null, false); CommandUtil.exitError(this.exitWhenFinish); return; } isSingleFileBuild = true; } else { if (null != this.output) { CommandUtil.printError(this.errStream, "'-o' and '--output' are only supported when building a single Ballerina " + "file.", "bal build -o <output-file> <ballerina-file> ", true); CommandUtil.exitError(this.exitWhenFinish); return; } try { project = BuildProject.load(this.projectPath, buildOptions); } catch (ProjectException e) { CommandUtil.printError(this.errStream, e.getMessage(), null, false); CommandUtil.exitError(this.exitWhenFinish); return; } } if (!(this.compile && project.currentPackage().compilerPluginToml().isPresent())) { if (isProjectEmpty(project)) { CommandUtil.printError(this.errStream, "package is empty. please add at least one .bal file", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } } if (this.compile && project.currentPackage().ballerinaToml().get().tomlDocument().toml() .getTable("package").isEmpty()) { CommandUtil.printError(this.errStream, "'package' information not found in " + ProjectConstants.BALLERINA_TOML, null, true); CommandUtil.exitError(this.exitWhenFinish); return; } if (!project.buildOptions().skipTests() && this.debugPort != null) { System.setProperty(SYSTEM_PROP_BAL_DEBUG, this.debugPort); } if (project.buildOptions().codeCoverage()) { if (coverageFormat != null) { if (!coverageFormat.equals(JACOCO_XML_FORMAT)) { String errMsg = "unsupported coverage report format '" + coverageFormat + "' found. Only '" + JACOCO_XML_FORMAT + "' format is supported."; CommandUtil.printError(this.errStream, errMsg, null, false); CommandUtil.exitError(this.exitWhenFinish); return; } } } else { if (includes != null) { this.outStream.println("warning: ignoring --includes flag since code coverage is not enabled"); } if (coverageFormat != null) { this.outStream.println("warning: ignoring --coverage-format flag since code coverage is not " + "enabled"); } } try { readSettings(); } catch (SettingsTomlException e) { this.outStream.println("warning: " + e.getMessage()); } TaskExecutor taskExecutor = new TaskExecutor.TaskBuilder() .addTask(new CleanTargetDirTask(), isSingleFileBuild) .addTask(new ResolveMavenDependenciesTask(outStream)) .addTask(new CompileTask(outStream, errStream)) .addTask(new RunTestsTask(outStream, errStream, includes, coverageFormat), project.buildOptions().skipTests() || isSingleFileBuild) .addTask(new CreateBalaTask(outStream), isSingleFileBuild || !this.compile) .addTask(new CreateExecutableTask(outStream, this.output), this.compile) .build(); taskExecutor.executeTasks(project); if (this.exitWhenFinish) { Runtime.getRuntime().exit(0); } }
class BuildCommand implements BLauncherCmd { private final PrintStream outStream; private final PrintStream errStream; private boolean exitWhenFinish; private boolean skipCopyLibsFromDist; public BuildCommand() { this.projectPath = Paths.get(System.getProperty(ProjectConstants.USER_DIR)); this.outStream = System.out; this.errStream = System.err; this.exitWhenFinish = true; this.skipCopyLibsFromDist = false; } public BuildCommand(Path projectPath, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist) { this.projectPath = projectPath; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; } public BuildCommand(Path projectPath, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist, boolean compile) { this.projectPath = projectPath; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; this.compile = compile; } public BuildCommand(Path projectPath, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist, Boolean skipTests, Boolean testReport, Boolean coverage, String coverageFormat) { this.projectPath = projectPath; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; this.skipTests = skipTests; this.testReport = testReport; this.coverage = coverage; this.coverageFormat = coverageFormat; } public BuildCommand(Path projectPath, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist, String output) { this.projectPath = projectPath; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; this.output = output; } @CommandLine.Option(names = {"--compile", "-c"}, description = "Compile the source without generating " + "executable(s).") private boolean compile; @CommandLine.Option(names = {"--output", "-o"}, description = "Write the output to the given file. The provided " + "output file name may or may not contain the " + "'.jar' extension.") private String output; @CommandLine.Option(names = {"--offline"}, description = "Build/Compile offline without downloading " + "dependencies.") private Boolean offline; @CommandLine.Option(names = {"--skip-tests"}, description = "Skip test compilation and execution.") private Boolean skipTests; @CommandLine.Parameters (arity = "0..1") private final Path projectPath; @CommandLine.Option(names = "--dump-bir", hidden = true) private boolean dumpBIR; @CommandLine.Option(names = "--dump-bir-file", hidden = true) private String dumpBIRFile; @CommandLine.Option(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @CommandLine.Option(names = "--experimental", description = "Enable experimental language features.") private Boolean experimentalFlag; @CommandLine.Option(names = "--debug", description = "run tests in remote debugging mode") private String debugPort; private static final String buildCmd = "bal build [-o <output>] [--offline] [--skip-tests] [--taint-check]\n" + " [<ballerina-file | package-path>]"; @CommandLine.Option(names = "--test-report", description = "enable test report generation") private Boolean testReport; @CommandLine.Option(names = "--code-coverage", description = "enable code coverage") private Boolean coverage; @CommandLine.Option(names = "--coverage-format", description = "list of supported coverage report formats") private String coverageFormat; @CommandLine.Option(names = "--observability-included", description = "package observability in the executable " + "JAR file(s).") private Boolean observabilityIncluded; @CommandLine.Option(names = "--cloud", description = "Enable cloud artifact generation") private String cloud; @CommandLine.Option(names = "--includes", hidden = true, description = "hidden option for code coverage to include all classes") private String includes; @CommandLine.Option(names = "--list-conflicted-classes", description = "list conflicted classes when generating executable") private Boolean listConflictedClasses; private boolean isProjectEmpty(Project project) { for (ModuleId moduleId : project.currentPackage().moduleIds()) { if (!project.currentPackage().module(moduleId).documentIds().isEmpty()) { return false; } } return true; } private BuildOptions constructBuildOptions() { return new BuildOptionsBuilder() .codeCoverage(coverage) .experimental(experimentalFlag) .offline(offline) .skipTests(skipTests) .testReport(testReport) .observabilityIncluded(observabilityIncluded) .cloud(cloud) .dumpBir(dumpBIR) .dumpBirFile(dumpBIRFile) .listConflictedClasses(listConflictedClasses) .build(); } @Override public String getName() { return BUILD_COMMAND; } @Override public void printLongDesc(StringBuilder out) { out.append("Build a Ballerina project and produce an executable JAR file. The \n"); out.append("executable \".jar\" file will be created in the <PROJECT-ROOT>/target/bin directory. \n"); out.append("\n"); out.append("Build a single Ballerina file. This creates an executable .jar file in the \n"); out.append("current directory. The name of the executable file will be \n"); out.append("<ballerina-file-name>.jar. \n"); out.append("\n"); out.append("If the output file is specified with the -o flag, the output \n"); out.append("will be written to the given output file name. The -o flag will only \n"); out.append("work for single files. \n"); } @Override public void printUsage(StringBuilder out) { out.append(" bal build [-o <output>] [--offline] [--skip-tests]\\n\" +\n" + " \" [<ballerina-file | package-path>]"); } @Override public void setParentCmdParser(CommandLine parentCmdParser) { } }
class BuildCommand implements BLauncherCmd { private final PrintStream outStream; private final PrintStream errStream; private boolean exitWhenFinish; private boolean skipCopyLibsFromDist; public BuildCommand() { this.projectPath = Paths.get(System.getProperty(ProjectConstants.USER_DIR)); this.outStream = System.out; this.errStream = System.err; this.exitWhenFinish = true; this.skipCopyLibsFromDist = false; } public BuildCommand(Path projectPath, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist) { this.projectPath = projectPath; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; } public BuildCommand(Path projectPath, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist, boolean compile) { this.projectPath = projectPath; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; this.compile = compile; } public BuildCommand(Path projectPath, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist, Boolean skipTests, Boolean testReport, Boolean coverage, String coverageFormat) { this.projectPath = projectPath; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; this.skipTests = skipTests; this.testReport = testReport; this.coverage = coverage; this.coverageFormat = coverageFormat; } public BuildCommand(Path projectPath, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist, String output) { this.projectPath = projectPath; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; this.output = output; } @CommandLine.Option(names = {"--compile", "-c"}, description = "Compile the source without generating " + "executable(s).") private boolean compile; @CommandLine.Option(names = {"--output", "-o"}, description = "Write the output to the given file. The provided " + "output file name may or may not contain the " + "'.jar' extension.") private String output; @CommandLine.Option(names = {"--offline"}, description = "Build/Compile offline without downloading " + "dependencies.") private Boolean offline; @CommandLine.Option(names = {"--skip-tests"}, description = "Skip test compilation and execution.") private Boolean skipTests; @CommandLine.Parameters (arity = "0..1") private final Path projectPath; @CommandLine.Option(names = "--dump-bir", hidden = true) private boolean dumpBIR; @CommandLine.Option(names = "--dump-bir-file", hidden = true) private String dumpBIRFile; @CommandLine.Option(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @CommandLine.Option(names = "--experimental", description = "Enable experimental language features.") private Boolean experimentalFlag; @CommandLine.Option(names = "--debug", description = "run tests in remote debugging mode") private String debugPort; private static final String buildCmd = "bal build [-o <output>] [--offline] [--skip-tests] [--taint-check]\n" + " [<ballerina-file | package-path>]"; @CommandLine.Option(names = "--test-report", description = "enable test report generation") private Boolean testReport; @CommandLine.Option(names = "--code-coverage", description = "enable code coverage") private Boolean coverage; @CommandLine.Option(names = "--coverage-format", description = "list of supported coverage report formats") private String coverageFormat; @CommandLine.Option(names = "--observability-included", description = "package observability in the executable " + "JAR file(s).") private Boolean observabilityIncluded; @CommandLine.Option(names = "--cloud", description = "Enable cloud artifact generation") private String cloud; @CommandLine.Option(names = "--includes", hidden = true, description = "hidden option for code coverage to include all classes") private String includes; @CommandLine.Option(names = "--list-conflicted-classes", description = "list conflicted classes when generating executable") private Boolean listConflictedClasses; private boolean isProjectEmpty(Project project) { for (ModuleId moduleId : project.currentPackage().moduleIds()) { Module module = project.currentPackage().module(moduleId); if (!module.documentIds().isEmpty() || !module.testDocumentIds().isEmpty()) { return false; } } return true; } private BuildOptions constructBuildOptions() { return new BuildOptionsBuilder() .codeCoverage(coverage) .experimental(experimentalFlag) .offline(offline) .skipTests(skipTests) .testReport(testReport) .observabilityIncluded(observabilityIncluded) .cloud(cloud) .dumpBir(dumpBIR) .dumpBirFile(dumpBIRFile) .listConflictedClasses(listConflictedClasses) .build(); } @Override public String getName() { return BUILD_COMMAND; } @Override public void printLongDesc(StringBuilder out) { out.append("Build a Ballerina project and produce an executable JAR file. The \n"); out.append("executable \".jar\" file will be created in the <PROJECT-ROOT>/target/bin directory. \n"); out.append("\n"); out.append("Build a single Ballerina file. This creates an executable .jar file in the \n"); out.append("current directory. The name of the executable file will be \n"); out.append("<ballerina-file-name>.jar. \n"); out.append("\n"); out.append("If the output file is specified with the -o flag, the output \n"); out.append("will be written to the given output file name. The -o flag will only \n"); out.append("work for single files. \n"); } @Override public void printUsage(StringBuilder out) { out.append(" bal build [-o <output>] [--offline] [--skip-tests]\\n\" +\n" + " \" [<ballerina-file | package-path>]"); } @Override public void setParentCmdParser(CommandLine parentCmdParser) { } }
Again, not a change introduced by this PR, but shouldn't we handle other possible exceptions too? Not sure why we've handled (only) a NPE here?
private static BValue getRecord(BType entityBodyType, BValue bjson) { try { return JSONUtils.convertJSONToStruct(bjson, (BStructureType) entityBodyType); } catch (NullPointerException ex) { throw new BallerinaConnectorException("cannot convert payload to record type: " + entityBodyType.getName()); } }
} catch (NullPointerException ex) {
private static BValue getRecord(BType entityBodyType, BValue bjson) { try { return JSONUtils.convertJSONToStruct(bjson, (BStructureType) entityBodyType); } catch (NullPointerException ex) { throw new BallerinaConnectorException("cannot convert payload to record type: " + entityBodyType.getName()); } }
class HttpDispatcher { public static HttpService findService(HTTPServicesRegistry servicesRegistry, HttpCarbonMessage inboundReqMsg) { try { Map<String, HttpService> servicesOnInterface; List<String> sortedServiceURIs; String hostName = inboundReqMsg.getHeader(HttpHeaderNames.HOST.toString()); if (hostName != null && servicesRegistry.getServicesMapHolder(hostName) != null) { servicesOnInterface = servicesRegistry.getServicesByHost(hostName); sortedServiceURIs = servicesRegistry.getSortedServiceURIsByHost(hostName); } else { servicesOnInterface = servicesRegistry.getServicesByHost(DEFAULT_HOST); sortedServiceURIs = servicesRegistry.getSortedServiceURIsByHost(DEFAULT_HOST); } String rawUri = (String) inboundReqMsg.getProperty(HttpConstants.TO); inboundReqMsg.setProperty(HttpConstants.RAW_URI, rawUri); Map<String, Map<String, String>> matrixParams = new HashMap<>(); String uriWithoutMatrixParams = URIUtil.extractMatrixParams(rawUri, matrixParams); inboundReqMsg.setProperty(HttpConstants.TO, uriWithoutMatrixParams); inboundReqMsg.setProperty(HttpConstants.MATRIX_PARAMS, matrixParams); URI validatedUri = getValidatedURI(uriWithoutMatrixParams); String basePath = servicesRegistry.findTheMostSpecificBasePath(validatedUri.getPath(), servicesOnInterface, sortedServiceURIs); if (basePath == null) { inboundReqMsg.setProperty(HttpConstants.HTTP_STATUS_CODE, 404); throw new BallerinaConnectorException("no matching service found for path : " + validatedUri.getRawPath()); } HttpService service = servicesOnInterface.get(basePath); setInboundReqProperties(inboundReqMsg, validatedUri, basePath); return service; } catch (Exception e) { throw new BallerinaConnectorException(e.getMessage()); } } private static void setInboundReqProperties(HttpCarbonMessage inboundReqMsg, URI requestUri, String basePath) { String subPath = URIUtil.getSubPath(requestUri.getPath(), basePath); inboundReqMsg.setProperty(HttpConstants.BASE_PATH, basePath); inboundReqMsg.setProperty(HttpConstants.SUB_PATH, subPath); inboundReqMsg.setProperty(HttpConstants.QUERY_STR, requestUri.getQuery()); inboundReqMsg.setProperty(HttpConstants.RAW_QUERY_STR, requestUri.getRawQuery()); } public static URI getValidatedURI(String uriStr) { URI requestUri; try { requestUri = URI.create(uriStr); } catch (IllegalArgumentException e) { throw new BallerinaConnectorException(e.getMessage()); } return requestUri; } /** * This method finds the matching resource for the incoming request. * * @param servicesRegistry HTTP service registry * @param inboundMessage incoming message. * @return matching resource. */ public static HttpResource findResource(HTTPServicesRegistry servicesRegistry, HttpCarbonMessage inboundMessage) { String protocol = (String) inboundMessage.getProperty(HttpConstants.PROTOCOL); if (protocol == null) { throw new BallerinaConnectorException("protocol not defined in the incoming request"); } try { HttpService service = HttpDispatcher.findService(servicesRegistry, inboundMessage); if (service == null) { throw new BallerinaConnectorException("no Service found to handle the service request"); } return HttpResourceDispatcher.findResource(service, inboundMessage); } catch (Exception e) { throw new BallerinaConnectorException(e.getMessage()); } } public static BValue[] getSignatureParameters(HttpResource httpResource, HttpCarbonMessage httpCarbonMessage, Struct endpointConfig) { ProgramFile programFile = httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(); BMap<String, BValue> serviceEndpoint = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_HTTP, SERVICE_ENDPOINT); BMap<String, BValue> connection = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_HTTP, CONNECTION); BMap<String, BValue> inRequest = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_HTTP, REQUEST); BMap<String, BValue> inRequestEntity = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_MIME, ENTITY); BMap<String, BValue> mediaType = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_MIME, MEDIA_TYPE); HttpUtil.enrichServiceEndpointInfo(serviceEndpoint, httpCarbonMessage, httpResource, endpointConfig); HttpUtil.enrichConnectionInfo(connection, httpCarbonMessage, endpointConfig); serviceEndpoint.put(SERVICE_ENDPOINT_CONNECTION_FIELD, connection); HttpUtil.enrichConnectionInfo(connection, httpCarbonMessage, endpointConfig); HttpUtil.populateInboundRequest(inRequest, inRequestEntity, mediaType, httpCarbonMessage, programFile); SignatureParams signatureParams = httpResource.getSignatureParams(); BValue[] bValues = new BValue[signatureParams.getParamCount()]; bValues[0] = serviceEndpoint; bValues[1] = inRequest; if (signatureParams.getParamCount() == 2) { return bValues; } Map<String, String> resourceArgumentValues = (Map<String, String>) httpCarbonMessage.getProperty(HttpConstants.RESOURCE_ARGS); for (int i = 0; i < signatureParams.getPathParams().size(); i++) { String argumentValue = resourceArgumentValues.get(signatureParams.getPathParams().get(i).getVarName()); if (argumentValue != null) { try { argumentValue = URLDecoder.decode(argumentValue, "UTF-8"); } catch (UnsupportedEncodingException e) { } } bValues[i + 2] = new BString(argumentValue); } if (signatureParams.getEntityBody() == null) { return bValues; } try { bValues[bValues.length - 1] = populateAndGetEntityBody(inRequest, inRequestEntity, signatureParams.getEntityBody().getVarType()); } catch (BallerinaException ex) { httpCarbonMessage.setProperty(HttpConstants.HTTP_STATUS_CODE, HttpConstants.HTTP_BAD_REQUEST); throw new BallerinaConnectorException("data binding failed: " + ex.getMessage()); } return bValues; } private static BValue populateAndGetEntityBody(BMap<String, BValue> inRequest, BMap<String, BValue> inRequestEntity, BType entityBodyType) { HttpUtil.populateEntityBody(null, inRequest, inRequestEntity, true); try { switch (entityBodyType.getTag()) { case TypeTags.STRING_TAG: BString stringDataSource = EntityBodyHandler.constructStringDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, stringDataSource); return stringDataSource; case TypeTags.JSON_TAG: BValue bjson = EntityBodyHandler.constructJsonDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, bjson); return bjson; case TypeTags.XML_TAG: BXML bxml = EntityBodyHandler.constructXmlDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, bxml); return bxml; case TypeTags.ARRAY_TAG: if (((BArrayType) entityBodyType).getElementType().getTag() == TypeTags.BYTE_TAG) { BByteArray blobDataSource = EntityBodyHandler.constructBlobDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, blobDataSource); return blobDataSource; } else if (((BArrayType) entityBodyType).getElementType().getTag() == TypeTags.RECORD_TYPE_TAG) { bjson = getBJsonValue(inRequestEntity); return getRecordArray(entityBodyType, bjson); } else { throw new BallerinaConnectorException("Incompatible Element type found inside an array " + ((BArrayType) entityBodyType).getElementType().getName()); } case TypeTags.RECORD_TYPE_TAG: bjson = getBJsonValue(inRequestEntity); return getRecord(entityBodyType, bjson); default: } } catch (Exception ex) { throw new BallerinaConnectorException("Error in reading payload : " + ex.getMessage()); } return null; } /** * Convert a json to the relevant record type. * * @param entityBodyType Represents entity body type * @param bjson Represents the json value that needs to be converted * @return the relevant ballerina record or object */ /** * Convert a json array to the relevant record array. * * @param entityBodyType Represents entity body type * @param bjson Represents the json array that needs to be converted * @return the relevant ballerina record or object array */ private static BValue getRecordArray(BType entityBodyType, BValue bjson) { try { return JSONUtils.convertJSONToBArray(bjson, (BArrayType) entityBodyType); } catch (NullPointerException ex) { throw new BallerinaConnectorException("cannot convert payload to an array of type: " + entityBodyType.getName()); } } /** * Given an inbound request entity construct the ballerina json. * * @param inRequestEntity Represents inbound request entity * @return a ballerina json value */ private static BValue getBJsonValue(BMap<String, BValue> inRequestEntity) { BValue bjson = EntityBodyHandler.constructJsonDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, bjson); return bjson; } public static boolean shouldDiffer(HttpResource httpResource) { return (httpResource != null && httpResource.getSignatureParams().getEntityBody() != null); } private HttpDispatcher() { } }
class HttpDispatcher { public static HttpService findService(HTTPServicesRegistry servicesRegistry, HttpCarbonMessage inboundReqMsg) { try { Map<String, HttpService> servicesOnInterface; List<String> sortedServiceURIs; String hostName = inboundReqMsg.getHeader(HttpHeaderNames.HOST.toString()); if (hostName != null && servicesRegistry.getServicesMapHolder(hostName) != null) { servicesOnInterface = servicesRegistry.getServicesByHost(hostName); sortedServiceURIs = servicesRegistry.getSortedServiceURIsByHost(hostName); } else { servicesOnInterface = servicesRegistry.getServicesByHost(DEFAULT_HOST); sortedServiceURIs = servicesRegistry.getSortedServiceURIsByHost(DEFAULT_HOST); } String rawUri = (String) inboundReqMsg.getProperty(HttpConstants.TO); inboundReqMsg.setProperty(HttpConstants.RAW_URI, rawUri); Map<String, Map<String, String>> matrixParams = new HashMap<>(); String uriWithoutMatrixParams = URIUtil.extractMatrixParams(rawUri, matrixParams); inboundReqMsg.setProperty(HttpConstants.TO, uriWithoutMatrixParams); inboundReqMsg.setProperty(HttpConstants.MATRIX_PARAMS, matrixParams); URI validatedUri = getValidatedURI(uriWithoutMatrixParams); String basePath = servicesRegistry.findTheMostSpecificBasePath(validatedUri.getPath(), servicesOnInterface, sortedServiceURIs); if (basePath == null) { inboundReqMsg.setProperty(HttpConstants.HTTP_STATUS_CODE, 404); throw new BallerinaConnectorException("no matching service found for path : " + validatedUri.getRawPath()); } HttpService service = servicesOnInterface.get(basePath); setInboundReqProperties(inboundReqMsg, validatedUri, basePath); return service; } catch (Exception e) { throw new BallerinaConnectorException(e.getMessage()); } } private static void setInboundReqProperties(HttpCarbonMessage inboundReqMsg, URI requestUri, String basePath) { String subPath = URIUtil.getSubPath(requestUri.getPath(), basePath); inboundReqMsg.setProperty(HttpConstants.BASE_PATH, basePath); inboundReqMsg.setProperty(HttpConstants.SUB_PATH, subPath); inboundReqMsg.setProperty(HttpConstants.QUERY_STR, requestUri.getQuery()); inboundReqMsg.setProperty(HttpConstants.RAW_QUERY_STR, requestUri.getRawQuery()); } public static URI getValidatedURI(String uriStr) { URI requestUri; try { requestUri = URI.create(uriStr); } catch (IllegalArgumentException e) { throw new BallerinaConnectorException(e.getMessage()); } return requestUri; } /** * This method finds the matching resource for the incoming request. * * @param servicesRegistry HTTP service registry * @param inboundMessage incoming message. * @return matching resource. */ public static HttpResource findResource(HTTPServicesRegistry servicesRegistry, HttpCarbonMessage inboundMessage) { String protocol = (String) inboundMessage.getProperty(HttpConstants.PROTOCOL); if (protocol == null) { throw new BallerinaConnectorException("protocol not defined in the incoming request"); } try { HttpService service = HttpDispatcher.findService(servicesRegistry, inboundMessage); if (service == null) { throw new BallerinaConnectorException("no Service found to handle the service request"); } return HttpResourceDispatcher.findResource(service, inboundMessage); } catch (Exception e) { throw new BallerinaConnectorException(e.getMessage()); } } public static BValue[] getSignatureParameters(HttpResource httpResource, HttpCarbonMessage httpCarbonMessage, Struct endpointConfig) { ProgramFile programFile = httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(); BMap<String, BValue> serviceEndpoint = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_HTTP, SERVICE_ENDPOINT); BMap<String, BValue> connection = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_HTTP, CONNECTION); BMap<String, BValue> inRequest = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_HTTP, REQUEST); BMap<String, BValue> inRequestEntity = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_MIME, ENTITY); BMap<String, BValue> mediaType = BLangConnectorSPIUtil.createBStruct(programFile, PROTOCOL_PACKAGE_MIME, MEDIA_TYPE); HttpUtil.enrichServiceEndpointInfo(serviceEndpoint, httpCarbonMessage, httpResource, endpointConfig); HttpUtil.enrichConnectionInfo(connection, httpCarbonMessage, endpointConfig); serviceEndpoint.put(SERVICE_ENDPOINT_CONNECTION_FIELD, connection); HttpUtil.enrichConnectionInfo(connection, httpCarbonMessage, endpointConfig); HttpUtil.populateInboundRequest(inRequest, inRequestEntity, mediaType, httpCarbonMessage, programFile); SignatureParams signatureParams = httpResource.getSignatureParams(); BValue[] bValues = new BValue[signatureParams.getParamCount()]; bValues[0] = serviceEndpoint; bValues[1] = inRequest; if (signatureParams.getParamCount() == 2) { return bValues; } Map<String, String> resourceArgumentValues = (Map<String, String>) httpCarbonMessage.getProperty(HttpConstants.RESOURCE_ARGS); for (int i = 0; i < signatureParams.getPathParams().size(); i++) { String argumentValue = resourceArgumentValues.get(signatureParams.getPathParams().get(i).getVarName()); if (argumentValue != null) { try { argumentValue = URLDecoder.decode(argumentValue, "UTF-8"); } catch (UnsupportedEncodingException e) { } } bValues[i + 2] = new BString(argumentValue); } if (signatureParams.getEntityBody() == null) { return bValues; } try { bValues[bValues.length - 1] = populateAndGetEntityBody(inRequest, inRequestEntity, signatureParams.getEntityBody().getVarType()); } catch (BallerinaException ex) { httpCarbonMessage.setProperty(HttpConstants.HTTP_STATUS_CODE, HttpConstants.HTTP_BAD_REQUEST); throw new BallerinaConnectorException("data binding failed: " + ex.getMessage()); } return bValues; } private static BValue populateAndGetEntityBody(BMap<String, BValue> inRequest, BMap<String, BValue> inRequestEntity, BType entityBodyType) { HttpUtil.populateEntityBody(null, inRequest, inRequestEntity, true); try { switch (entityBodyType.getTag()) { case TypeTags.STRING_TAG: BString stringDataSource = EntityBodyHandler.constructStringDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, stringDataSource); return stringDataSource; case TypeTags.JSON_TAG: BValue bjson = EntityBodyHandler.constructJsonDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, bjson); return bjson; case TypeTags.XML_TAG: BXML bxml = EntityBodyHandler.constructXmlDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, bxml); return bxml; case TypeTags.ARRAY_TAG: if (((BArrayType) entityBodyType).getElementType().getTag() == TypeTags.BYTE_TAG) { BByteArray blobDataSource = EntityBodyHandler.constructBlobDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, blobDataSource); return blobDataSource; } else if (((BArrayType) entityBodyType).getElementType().getTag() == TypeTags.RECORD_TYPE_TAG) { bjson = getBJsonValue(inRequestEntity); return getRecordArray(entityBodyType, bjson); } else { throw new BallerinaConnectorException("Incompatible Element type found inside an array " + ((BArrayType) entityBodyType).getElementType().getName()); } case TypeTags.RECORD_TYPE_TAG: bjson = getBJsonValue(inRequestEntity); return getRecord(entityBodyType, bjson); default: } } catch (Exception ex) { throw new BallerinaConnectorException("Error in reading payload : " + ex.getMessage()); } return null; } /** * Convert a json to the relevant record type. * * @param entityBodyType Represents entity body type * @param bjson Represents the json value that needs to be converted * @return the relevant ballerina record or object */ /** * Convert a json array to the relevant record array. * * @param entityBodyType Represents entity body type * @param bjson Represents the json array that needs to be converted * @return the relevant ballerina record or object array */ private static BValue getRecordArray(BType entityBodyType, BValue bjson) { try { return JSONUtils.convertJSONToBArray(bjson, (BArrayType) entityBodyType); } catch (NullPointerException ex) { throw new BallerinaConnectorException("cannot convert payload to an array of type: " + entityBodyType.getName()); } } /** * Given an inbound request entity construct the ballerina json. * * @param inRequestEntity Represents inbound request entity * @return a ballerina json value */ private static BValue getBJsonValue(BMap<String, BValue> inRequestEntity) { BValue bjson = EntityBodyHandler.constructJsonDataSource(inRequestEntity); EntityBodyHandler.addMessageDataSource(inRequestEntity, bjson); return bjson; } public static boolean shouldDiffer(HttpResource httpResource) { return (httpResource != null && httpResource.getSignatureParams().getEntityBody() != null); } private HttpDispatcher() { } }
> No need to lowercase. "." lower-case still "." done
private boolean isValidDirectory(FileStatus fileStatus) { if (!fileStatus.isDirectory()) { return false; } String lcFileName = fileStatus.getPath().getName().toLowerCase(); return !(lcFileName.startsWith(".")); }
String lcFileName = fileStatus.getPath().getName().toLowerCase();
private boolean isValidDirectory(FileStatus fileStatus) { if (!fileStatus.isDirectory()) { return false; } String dirName = fileStatus.getPath().getName(); return !(dirName.startsWith(".")); }
class HiveRemoteFileIO implements RemoteFileIO { private static final Logger LOG = LogManager.getLogger(HiveRemoteFileIO.class); private final Configuration configuration; private FileSystem fileSystem; private final Map<String, Long> blockHostToId = new ConcurrentHashMap<>(); private final Map<Long, String> idToBlockHost = new ConcurrentHashMap<>(); private long hostId = 0; private static final int UNKNOWN_STORAGE_ID = -1; public HiveRemoteFileIO(Configuration configuration) { this.configuration = configuration; } public Map<RemotePathKey, List<RemoteFileDesc>> getRemoteFiles(RemotePathKey pathKey) { ImmutableMap.Builder<RemotePathKey, List<RemoteFileDesc>> resultPartitions = ImmutableMap.builder(); String path = ObjectStorageUtils.formatObjectStoragePath(pathKey.getPath()); List<RemoteFileDesc> fileDescs = Lists.newArrayList(); try { URI uri = new Path(path).toUri(); FileSystem fileSystem; if (!FeConstants.runningUnitTest) { fileSystem = FileSystem.get(uri, configuration); } else { fileSystem = this.fileSystem; } RemoteIterator<LocatedFileStatus> blockIterator; if (!pathKey.isRecursive()) { blockIterator = fileSystem.listLocatedStatus(new Path(uri.getPath())); } else { blockIterator = listFilesRecursive(fileSystem, new Path(uri.getPath())); } while (blockIterator.hasNext()) { LocatedFileStatus locatedFileStatus = blockIterator.next(); if (!isValidDataFile(locatedFileStatus)) { continue; } String locateName = locatedFileStatus.getPath().toUri().getPath(); String fileName = PartitionUtil.getSuffixName(uri.getPath(), locateName); BlockLocation[] blockLocations = locatedFileStatus.getBlockLocations(); List<RemoteFileBlockDesc> fileBlockDescs = getRemoteFileBlockDesc(blockLocations); fileDescs.add(new RemoteFileDesc(fileName, "", locatedFileStatus.getLen(), locatedFileStatus.getModificationTime(), ImmutableList.copyOf(fileBlockDescs), ImmutableList.of())); } } catch (FileNotFoundException e) { LOG.warn("Hive remote file on path: {} not existed, ignore it", path, e); } catch (Exception e) { LOG.error("Failed to get hive remote file's metadata on path: {}", path, e); throw new StarRocksConnectorException("Failed to get hive remote file's metadata on path: %s. msg: %s", pathKey, e.getMessage()); } return resultPartitions.put(pathKey, fileDescs).build(); } private RemoteIterator<LocatedFileStatus> listFilesRecursive(FileSystem fileSystem, Path f) throws FileNotFoundException, IOException { return new RemoteIterator<LocatedFileStatus>() { private Stack<RemoteIterator<LocatedFileStatus>> itors = new Stack<>(); private RemoteIterator<LocatedFileStatus> curItor = fileSystem.listLocatedStatus(f); private LocatedFileStatus curFile; @Override public boolean hasNext() throws IOException { while (curFile == null) { if (curItor.hasNext()) { handleFileStat(curItor.next()); } else if (!itors.empty()) { curItor = itors.pop(); } else { return false; } } return true; } private void handleFileStat(LocatedFileStatus stat) throws IOException { if (stat.isFile()) { curFile = stat; } else if (isValidDirectory(stat)) { try { RemoteIterator<LocatedFileStatus> newDirItor = fileSystem.listLocatedStatus(stat.getPath()); itors.push(curItor); curItor = newDirItor; } catch (FileNotFoundException ignored) { LOG.debug("Directory {} deleted while attempting for recursive listing", stat.getPath()); } } } @Override public LocatedFileStatus next() throws IOException { if (hasNext()) { LocatedFileStatus result = curFile; curFile = null; return result; } throw new java.util.NoSuchElementException("No more entry in " + f); } }; } private boolean isValidDataFile(FileStatus fileStatus) { if (!fileStatus.isFile()) { return false; } String lcFileName = fileStatus.getPath().getName().toLowerCase(); return !(lcFileName.startsWith(".") || lcFileName.startsWith("_") || lcFileName.endsWith(".copying") || lcFileName.endsWith(".tmp")); } protected List<RemoteFileBlockDesc> getRemoteFileBlockDesc(BlockLocation[] blockLocations) throws IOException { List<RemoteFileBlockDesc> fileBlockDescs = Lists.newArrayList(); for (BlockLocation blockLocation : blockLocations) { fileBlockDescs.add(buildRemoteFileBlockDesc( blockLocation.getOffset(), blockLocation.getLength(), getReplicaHostIds(blockLocation.getNames())) ); } return fileBlockDescs; } public RemoteFileBlockDesc buildRemoteFileBlockDesc(long offset, long length, long[] replicaHostIds) { return new RemoteFileBlockDesc(offset, length, replicaHostIds, new long[] {UNKNOWN_STORAGE_ID}, this); } public long[] getReplicaHostIds(String[] hostNames) { long[] replicaHostIds = new long[hostNames.length]; for (int j = 0; j < hostNames.length; j++) { String name = hostNames[j]; replicaHostIds[j] = getHostId(name); } return replicaHostIds; } public long getHostId(String hostName) { return blockHostToId.computeIfAbsent(hostName, k -> { long newId = hostId++; idToBlockHost.put(newId, hostName); return newId; }); } public String getHdfsDataNodeIp(long hostId) { String hostPort = idToBlockHost.get(hostId); return hostPort.split(":")[0]; } @VisibleForTesting public void setFileSystem(FileSystem fs) { this.fileSystem = fs; } }
class HiveRemoteFileIO implements RemoteFileIO { private static final Logger LOG = LogManager.getLogger(HiveRemoteFileIO.class); private final Configuration configuration; private FileSystem fileSystem; private final Map<String, Long> blockHostToId = new ConcurrentHashMap<>(); private final Map<Long, String> idToBlockHost = new ConcurrentHashMap<>(); private long hostId = 0; private static final int UNKNOWN_STORAGE_ID = -1; public HiveRemoteFileIO(Configuration configuration) { this.configuration = configuration; } public Map<RemotePathKey, List<RemoteFileDesc>> getRemoteFiles(RemotePathKey pathKey) { ImmutableMap.Builder<RemotePathKey, List<RemoteFileDesc>> resultPartitions = ImmutableMap.builder(); String path = ObjectStorageUtils.formatObjectStoragePath(pathKey.getPath()); List<RemoteFileDesc> fileDescs = Lists.newArrayList(); try { URI uri = new Path(path).toUri(); FileSystem fileSystem; if (!FeConstants.runningUnitTest) { fileSystem = FileSystem.get(uri, configuration); } else { fileSystem = this.fileSystem; } RemoteIterator<LocatedFileStatus> blockIterator; if (!pathKey.isRecursive()) { blockIterator = fileSystem.listLocatedStatus(new Path(uri.getPath())); } else { blockIterator = listFilesRecursive(fileSystem, new Path(uri.getPath())); } while (blockIterator.hasNext()) { LocatedFileStatus locatedFileStatus = blockIterator.next(); if (!isValidDataFile(locatedFileStatus)) { continue; } String locateName = locatedFileStatus.getPath().toUri().getPath(); String fileName = PartitionUtil.getSuffixName(uri.getPath(), locateName); BlockLocation[] blockLocations = locatedFileStatus.getBlockLocations(); List<RemoteFileBlockDesc> fileBlockDescs = getRemoteFileBlockDesc(blockLocations); fileDescs.add(new RemoteFileDesc(fileName, "", locatedFileStatus.getLen(), locatedFileStatus.getModificationTime(), ImmutableList.copyOf(fileBlockDescs), ImmutableList.of())); } } catch (FileNotFoundException e) { LOG.warn("Hive remote file on path: {} not existed, ignore it", path, e); } catch (Exception e) { LOG.error("Failed to get hive remote file's metadata on path: {}", path, e); throw new StarRocksConnectorException("Failed to get hive remote file's metadata on path: %s. msg: %s", pathKey, e.getMessage()); } return resultPartitions.put(pathKey, fileDescs).build(); } private RemoteIterator<LocatedFileStatus> listFilesRecursive(FileSystem fileSystem, Path f) throws FileNotFoundException, IOException { return new RemoteIterator<LocatedFileStatus>() { private Stack<RemoteIterator<LocatedFileStatus>> itors = new Stack<>(); private RemoteIterator<LocatedFileStatus> curItor = fileSystem.listLocatedStatus(f); private LocatedFileStatus curFile; @Override public boolean hasNext() throws IOException { while (curFile == null) { if (curItor.hasNext()) { handleFileStat(curItor.next()); } else if (!itors.empty()) { curItor = itors.pop(); } else { return false; } } return true; } private void handleFileStat(LocatedFileStatus stat) throws IOException { if (stat.isFile()) { curFile = stat; } else if (isValidDirectory(stat)) { try { RemoteIterator<LocatedFileStatus> newDirItor = fileSystem.listLocatedStatus(stat.getPath()); itors.push(curItor); curItor = newDirItor; } catch (FileNotFoundException ignored) { LOG.debug("Directory {} deleted while attempting for recursive listing", stat.getPath()); } } } @Override public LocatedFileStatus next() throws IOException { if (hasNext()) { LocatedFileStatus result = curFile; curFile = null; return result; } throw new java.util.NoSuchElementException("No more entry in " + f); } }; } private boolean isValidDataFile(FileStatus fileStatus) { if (!fileStatus.isFile()) { return false; } String lcFileName = fileStatus.getPath().getName().toLowerCase(); return !(lcFileName.startsWith(".") || lcFileName.startsWith("_") || lcFileName.endsWith(".copying") || lcFileName.endsWith(".tmp")); } protected List<RemoteFileBlockDesc> getRemoteFileBlockDesc(BlockLocation[] blockLocations) throws IOException { List<RemoteFileBlockDesc> fileBlockDescs = Lists.newArrayList(); for (BlockLocation blockLocation : blockLocations) { fileBlockDescs.add(buildRemoteFileBlockDesc( blockLocation.getOffset(), blockLocation.getLength(), getReplicaHostIds(blockLocation.getNames())) ); } return fileBlockDescs; } public RemoteFileBlockDesc buildRemoteFileBlockDesc(long offset, long length, long[] replicaHostIds) { return new RemoteFileBlockDesc(offset, length, replicaHostIds, new long[] {UNKNOWN_STORAGE_ID}, this); } public long[] getReplicaHostIds(String[] hostNames) { long[] replicaHostIds = new long[hostNames.length]; for (int j = 0; j < hostNames.length; j++) { String name = hostNames[j]; replicaHostIds[j] = getHostId(name); } return replicaHostIds; } public long getHostId(String hostName) { return blockHostToId.computeIfAbsent(hostName, k -> { long newId = hostId++; idToBlockHost.put(newId, hostName); return newId; }); } public String getHdfsDataNodeIp(long hostId) { String hostPort = idToBlockHost.get(hostId); return hostPort.split(":")[0]; } @VisibleForTesting public void setFileSystem(FileSystem fs) { this.fileSystem = fs; } }
Before: the time contains two parts: part one is the time usage for waiting for all instances to report profile(i.d. `endProfile`), the second is the profile merge time usage(i.e. `buildMergedQueryProfile`). After: the time only contains the first part.
private void initProfile(long beginTimeInNanoSecond) { profile = buildTopLevelProfile(); if (coord != null) { if (coord.getQueryProfile() != null) { coord.getQueryProfile().getCounterTotalTime() .setValue(TimeUtils.getEstimatedTime(beginTimeInNanoSecond)); long profileCollectStartTime = System.currentTimeMillis(); coord.endProfile(); profile.getChild("Summary").addInfoString(ProfileManager.PROFILE_TIME, DebugUtil.getPrettyStringMs(System.currentTimeMillis() - profileCollectStartTime)); profile.addChild(coord.buildMergedQueryProfile()); } } profile.computeTimeInChildProfile(); }
coord.endProfile();
private void initProfile(long beginTimeInNanoSecond) { profile = buildTopLevelProfile(); if (coord != null) { if (coord.getQueryProfile() != null) { coord.getQueryProfile().getCounterTotalTime() .setValue(TimeUtils.getEstimatedTime(beginTimeInNanoSecond)); long profileCollectStartTime = System.currentTimeMillis(); coord.endProfile(); profile.getChild("Summary").addInfoString(ProfileManager.PROFILE_TIME, DebugUtil.getPrettyStringMs(System.currentTimeMillis() - profileCollectStartTime)); profile.addChild(coord.buildMergedQueryProfile()); } } profile.computeTimeInChildProfile(); }
class StmtExecutor { private static final Logger LOG = LogManager.getLogger(StmtExecutor.class); private static final AtomicLong STMT_ID_GENERATOR = new AtomicLong(0); private final ConnectContext context; private final MysqlSerializer serializer; private final OriginStatement originStmt; private StatementBase parsedStmt; private RuntimeProfile profile; private Coordinator coord = null; private LeaderOpExecutor leaderOpExecutor = null; private RedirectStatus redirectStatus = null; private final boolean isProxy; private List<ByteBuffer> proxyResultBuffer = null; private ShowResultSet proxyResultSet = null; private PQueryStatistics statisticsForAuditLog; private List<StmtExecutor> subStmtExecutors; private HttpResultSender httpResultSender; public StmtExecutor(ConnectContext context, OriginStatement originStmt, boolean isProxy) { this.context = context; this.originStmt = originStmt; this.serializer = context.getSerializer(); this.isProxy = isProxy; if (isProxy) { proxyResultBuffer = new ArrayList<>(); } } @VisibleForTesting public StmtExecutor(ConnectContext context, String stmt) { this(context, new OriginStatement(stmt, 0), false); } public StmtExecutor(ConnectContext ctx, StatementBase parsedStmt) { this.context = ctx; this.parsedStmt = parsedStmt; this.originStmt = parsedStmt.getOrigStmt(); this.serializer = context.getSerializer(); this.isProxy = false; } public Coordinator getCoordinator() { return this.coord; } private RuntimeProfile buildTopLevelProfile() { RuntimeProfile profile = new RuntimeProfile("Query"); RuntimeProfile summaryProfile = new RuntimeProfile("Summary"); summaryProfile.addInfoString(ProfileManager.QUERY_ID, DebugUtil.printId(context.getExecutionId())); summaryProfile.addInfoString(ProfileManager.START_TIME, TimeUtils.longToTimeString(context.getStartTime())); long currentTimestamp = System.currentTimeMillis(); long totalTimeMs = currentTimestamp - context.getStartTime(); summaryProfile.addInfoString(ProfileManager.END_TIME, TimeUtils.longToTimeString(currentTimestamp)); summaryProfile.addInfoString(ProfileManager.TOTAL_TIME, DebugUtil.getPrettyStringMs(totalTimeMs)); summaryProfile.addInfoString(ProfileManager.QUERY_TYPE, "Query"); summaryProfile.addInfoString(ProfileManager.QUERY_STATE, context.getState().toProfileString()); summaryProfile.addInfoString("StarRocks Version", String.format("%s-%s", Version.STARROCKS_VERSION, Version.STARROCKS_COMMIT_HASH)); summaryProfile.addInfoString(ProfileManager.USER, context.getQualifiedUser()); summaryProfile.addInfoString(ProfileManager.DEFAULT_DB, context.getDatabase()); summaryProfile.addInfoString(ProfileManager.SQL_STATEMENT, originStmt.originStmt); SessionVariable variables = context.getSessionVariable(); if (variables != null) { StringBuilder sb = new StringBuilder(); sb.append(SessionVariable.PARALLEL_FRAGMENT_EXEC_INSTANCE_NUM).append("=") .append(variables.getParallelExecInstanceNum()).append(","); sb.append(SessionVariable.MAX_PARALLEL_SCAN_INSTANCE_NUM).append("=") .append(variables.getMaxParallelScanInstanceNum()).append(","); sb.append(SessionVariable.PIPELINE_DOP).append("=").append(variables.getPipelineDop()).append(","); sb.append(SessionVariable.ENABLE_ADAPTIVE_SINK_DOP).append("=") .append(variables.getEnableAdaptiveSinkDop()) .append(","); sb.append(SessionVariable.ENABLE_RUNTIME_ADAPTIVE_DOP).append("=") .append(variables.isEnableRuntimeAdaptiveDop()) .append(","); sb.append(SessionVariable.RUNTIME_PROFILE_REPORT_INTERVAL).append("=") .append(variables.getRuntimeProfileReportInterval()) .append(","); if (context.getResourceGroup() != null) { sb.append(SessionVariable.RESOURCE_GROUP).append("=").append(context.getResourceGroup().getName()) .append(","); } sb.deleteCharAt(sb.length() - 1); summaryProfile.addInfoString(ProfileManager.VARIABLES, sb.toString()); summaryProfile.addInfoString("NonDefaultSessionVariables", variables.getNonDefaultVariablesJson()); } profile.addChild(summaryProfile); RuntimeProfile plannerProfile = new RuntimeProfile("Planner"); profile.addChild(plannerProfile); context.getPlannerProfile().build(plannerProfile); return profile; } public boolean isForwardToLeader() { if (GlobalStateMgr.getCurrentState().isLeader()) { return false; } if (parsedStmt instanceof QueryStatement && !GlobalStateMgr.getCurrentState().isLeader() && !GlobalStateMgr.getCurrentState().canRead()) { return true; } if (redirectStatus == null) { return false; } else { return redirectStatus.isForwardToLeader(); } } public ByteBuffer getOutputPacket() { if (leaderOpExecutor == null) { return null; } else { return leaderOpExecutor.getOutputPacket(); } } public ShowResultSet getProxyResultSet() { return proxyResultSet; } public ShowResultSet getShowResultSet() { if (leaderOpExecutor == null) { return null; } else { return leaderOpExecutor.getProxyResultSet(); } } public boolean sendResultToChannel(MysqlChannel channel) throws IOException { if (leaderOpExecutor == null) { return false; } else { return leaderOpExecutor.sendResultToChannel(channel); } } public StatementBase getParsedStmt() { return parsedStmt; } public void execute() throws Exception { long beginTimeInNanoSecond = TimeUtils.getStartTime(); context.setStmtId(STMT_ID_GENERATOR.incrementAndGet()); UUID uuid = context.getQueryId(); context.setExecutionId(UUIDUtil.toTUniqueId(uuid)); SessionVariable sessionVariableBackup = context.getSessionVariable(); if (context instanceof HttpConnectContext) { httpResultSender = new HttpResultSender((HttpConnectContext) context); } try { resolveParseStmtForForward(); if (parsedStmt != null) { Map<String, String> optHints = null; if (parsedStmt instanceof QueryStatement && ((QueryStatement) parsedStmt).getQueryRelation() instanceof SelectRelation) { SelectRelation selectRelation = (SelectRelation) ((QueryStatement) parsedStmt).getQueryRelation(); optHints = selectRelation.getSelectList().getOptHints(); } if (optHints != null) { SessionVariable sessionVariable = (SessionVariable) sessionVariableBackup.clone(); for (String key : optHints.keySet()) { VariableMgr.setSystemVariable(sessionVariable, new SystemVariable(key, new StringLiteral(optHints.get(key))), true); } context.setSessionVariable(sessionVariable); } if (parsedStmt.isExplain()) { context.setExplainLevel(parsedStmt.getExplainLevel()); } } ExecPlan execPlan = null; boolean execPlanBuildByNewPlanner = false; try (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer("Total")) { redirectStatus = parsedStmt.getRedirectStatus(); if (!isForwardToLeader()) { if (context.shouldDumpQuery()) { if (context.getDumpInfo() == null) { context.setDumpInfo(new QueryDumpInfo(context)); } else { context.getDumpInfo().reset(); } context.getDumpInfo().setOriginStmt(parsedStmt.getOrigStmt().originStmt); context.getDumpInfo().setStatement(parsedStmt); } if (parsedStmt instanceof ShowStmt) { com.starrocks.sql.analyzer.Analyzer.analyze(parsedStmt, context); Authorizer.check(parsedStmt, context); QueryStatement selectStmt = ((ShowStmt) parsedStmt).toSelectStmt(); if (selectStmt != null) { parsedStmt = selectStmt; execPlan = StatementPlanner.plan(parsedStmt, context); } } else { execPlan = StatementPlanner.plan(parsedStmt, context); if (parsedStmt instanceof QueryStatement && context.shouldDumpQuery()) { context.getDumpInfo().setExplainInfo(execPlan.getExplainString(TExplainLevel.COSTS)); } } execPlanBuildByNewPlanner = true; } } catch (SemanticException e) { dumpException(e); throw new AnalysisException(e.getMessage()); } catch (StarRocksPlannerException e) { dumpException(e); if (e.getType().equals(ErrorType.USER_ERROR)) { throw e; } else if (e.getType().equals(ErrorType.UNSUPPORTED) && e.getMessage().contains("UDF function")) { LOG.warn("New planner not implement : " + originStmt.originStmt, e); analyze(context.getSessionVariable().toThrift()); } else { LOG.warn("New planner error: " + originStmt.originStmt, e); throw e; } } if (context.isHTTPQueryDump) { return; } if (isForwardToLeader()) { forwardToLeader(); return; } else { LOG.debug("no need to transfer to Leader. stmt: {}", context.getStmtId()); } if (parsedStmt instanceof QueryStatement) { context.getState().setIsQuery(true); final boolean isStatisticsJob = AnalyzerUtils.isStatisticsJob(context, parsedStmt); context.setStatisticsJob(isStatisticsJob); if (Config.enable_sql_blacklist && !parsedStmt.isExplain()) { OriginStatement origStmt = parsedStmt.getOrigStmt(); if (origStmt != null) { String originSql = origStmt.originStmt.trim() .toLowerCase().replaceAll(" +", " "); SqlBlackList.verifying(originSql); } } Preconditions.checkNotNull(execPlan, "query must has a plan"); int retryTime = Config.max_query_retry_time; for (int i = 0; i < retryTime; i++) { boolean needRetry = false; try { if (i > 0) { uuid = UUID.randomUUID(); LOG.info("transfer QueryId: {} to {}", DebugUtil.printId(context.getQueryId()), DebugUtil.printId(uuid)); context.setExecutionId( new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits())); } Preconditions.checkState(execPlanBuildByNewPlanner, "must use new planner"); handleQueryStmt(execPlan); break; } catch (RemoteFileNotFoundException e) { if (i == retryTime - 1) { throw e; } List<ScanNode> scanNodes = execPlan.getScanNodes(); boolean existExternalCatalog = false; for (ScanNode scanNode : scanNodes) { if (scanNode instanceof HdfsScanNode) { HiveTable hiveTable = ((HdfsScanNode) scanNode).getHiveTable(); String catalogName = hiveTable.getCatalogName(); if (CatalogMgr.isExternalCatalog(catalogName)) { existExternalCatalog = true; ConnectorMetadata metadata = GlobalStateMgr.getCurrentState().getMetadataMgr() .getOptionalMetadata(hiveTable.getCatalogName()).get(); metadata.refreshTable(hiveTable.getDbName(), hiveTable, new ArrayList<>(), true); metadata.clear(); } } } if (!existExternalCatalog) { throw e; } if (!context.getMysqlChannel().isSend()) { String originStmt; if (parsedStmt.getOrigStmt() != null) { originStmt = parsedStmt.getOrigStmt().originStmt; } else { originStmt = this.originStmt.originStmt; } needRetry = true; LOG.warn("retry {} times. stmt: {}", (i + 1), originStmt); } else { throw e; } PlannerProfile.addCustomProperties("HMS.RETRY", String.valueOf(i + 1)); } catch (RpcException e) { if (i == 0 && context.getQueryDetail() == null && Config.log_plan_cancelled_by_crash_be) { LOG.warn( "Query cancelled by crash of backends or RpcException, [QueryId={}] [SQL={}] [Plan={}]", DebugUtil.printId(context.getExecutionId()), originStmt == null ? "" : originStmt.originStmt, execPlan.getExplainString(TExplainLevel.COSTS), e); } if (i == retryTime - 1) { throw e; } if (!context.getMysqlChannel().isSend()) { String originStmt; if (parsedStmt.getOrigStmt() != null) { originStmt = parsedStmt.getOrigStmt().originStmt; } else { originStmt = this.originStmt.originStmt; } needRetry = true; LOG.warn("retry {} times. stmt: {}", (i + 1), originStmt); } else { throw e; } } finally { if (!needRetry && context.getSessionVariable().isEnableProfile()) { writeProfile(execPlan, beginTimeInNanoSecond); if (parsedStmt.isExplain() && StatementBase.ExplainLevel.ANALYZE.equals(parsedStmt.getExplainLevel())) { handleExplainStmt(ExplainAnalyzer.analyze( ProfilingExecPlan.buildFrom(execPlan), profile, null)); } } QeProcessorImpl.INSTANCE.unregisterQuery(context.getExecutionId()); } } } else if (parsedStmt instanceof SetStmt) { handleSetStmt(); } else if (parsedStmt instanceof UseDbStmt) { handleUseDbStmt(); } else if (parsedStmt instanceof UseCatalogStmt) { handleUseCatalogStmt(); } else if (parsedStmt instanceof SetCatalogStmt) { handleSetCatalogStmt(); } else if (parsedStmt instanceof CreateTableAsSelectStmt) { if (execPlanBuildByNewPlanner) { handleCreateTableAsSelectStmt(beginTimeInNanoSecond); } else { throw new AnalysisException("old planner does not support CTAS statement"); } } else if (parsedStmt instanceof DmlStmt) { handleDMLStmtWithProfile(execPlan, (DmlStmt) parsedStmt, beginTimeInNanoSecond); } else if (parsedStmt instanceof DdlStmt) { handleDdlStmt(); } else if (parsedStmt instanceof ShowStmt) { handleShow(); } else if (parsedStmt instanceof KillStmt) { handleKill(); } else if (parsedStmt instanceof ExportStmt) { handleExportStmt(context.getQueryId()); } else if (parsedStmt instanceof UnsupportedStmt) { handleUnsupportedStmt(); } else if (parsedStmt instanceof AnalyzeStmt) { handleAnalyzeStmt(); } else if (parsedStmt instanceof AnalyzeProfileStmt) { handleAnalyzeProfileStmt(); } else if (parsedStmt instanceof DropHistogramStmt) { handleDropHistogramStmt(); } else if (parsedStmt instanceof DropStatsStmt) { handleDropStatsStmt(); } else if (parsedStmt instanceof KillAnalyzeStmt) { handleKillAnalyzeStmt(); } else if (parsedStmt instanceof AddSqlBlackListStmt) { handleAddSqlBlackListStmt(); } else if (parsedStmt instanceof DelSqlBlackListStmt) { handleDelSqlBlackListStmt(); } else if (parsedStmt instanceof ExecuteAsStmt) { handleExecAsStmt(); } else if (parsedStmt instanceof ExecuteScriptStmt) { handleExecScriptStmt(); } else if (parsedStmt instanceof SetRoleStmt) { handleSetRole(); } else if (parsedStmt instanceof SetDefaultRoleStmt) { handleSetDefaultRole(); } else if (parsedStmt instanceof UpdateFailPointStatusStatement) { handleUpdateFailPointStatusStmt(); } else { context.getState().setError("Do not support this query."); } } catch (IOException e) { LOG.warn("execute IOException ", e); context.getState().setError(e.getMessage()); throw e; } catch (UserException e) { String sql = originStmt != null ? originStmt.originStmt : ""; LOG.info("execute Exception, sql: {}, error: {}", sql, e.getMessage()); context.getState().setError(e.getMessage()); context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR); } catch (Throwable e) { String sql = originStmt != null ? originStmt.originStmt : ""; LOG.warn("execute Exception, sql " + sql, e); context.getState().setError(e.getMessage()); if (parsedStmt instanceof KillStmt) { context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR); } } finally { GlobalStateMgr.getCurrentState().getMetadataMgr().removeQueryMetadata(); if (context.getState().isError() && coord != null) { coord.cancel(); } if (parsedStmt instanceof InsertStmt && !parsedStmt.isExplain()) { if (Config.enable_sql_blacklist) { OriginStatement origStmt = parsedStmt.getOrigStmt(); if (origStmt != null) { String originSql = origStmt.originStmt.trim() .toLowerCase().replaceAll(" +", " "); SqlBlackList.verifying(originSql); } } } context.setSessionVariable(sessionVariableBackup); } } private void handleCreateTableAsSelectStmt(long beginTimeInNanoSecond) throws Exception { CreateTableAsSelectStmt createTableAsSelectStmt = (CreateTableAsSelectStmt) parsedStmt; if (!createTableAsSelectStmt.createTable(context)) { return; } try { InsertStmt insertStmt = createTableAsSelectStmt.getInsertStmt(); ExecPlan execPlan = new StatementPlanner().plan(insertStmt, context); handleDMLStmtWithProfile(execPlan, ((CreateTableAsSelectStmt) parsedStmt).getInsertStmt(), beginTimeInNanoSecond); if (context.getState().getStateType() == MysqlStateType.ERR) { ((CreateTableAsSelectStmt) parsedStmt).dropTable(context); } } catch (Throwable t) { LOG.warn("handle create table as select stmt fail", t); ((CreateTableAsSelectStmt) parsedStmt).dropTable(context); throw t; } } private void resolveParseStmtForForward() throws AnalysisException { if (parsedStmt == null) { List<StatementBase> stmts; try { stmts = com.starrocks.sql.parser.SqlParser.parse(originStmt.originStmt, context.getSessionVariable()); parsedStmt = stmts.get(originStmt.idx); parsedStmt.setOrigStmt(originStmt); } catch (ParsingException parsingException) { throw new AnalysisException(parsingException.getMessage()); } } } private void dumpException(Exception e) { if (context.isHTTPQueryDump()) { context.getDumpInfo().addException(ExceptionUtils.getStackTrace(e)); } else if (context.getSessionVariable().getEnableQueryDump()) { QueryDumpLog.getQueryDump().log(GsonUtils.GSON.toJson(context.getDumpInfo())); } } private void forwardToLeader() throws Exception { leaderOpExecutor = new LeaderOpExecutor(parsedStmt, originStmt, context, redirectStatus); LOG.debug("need to transfer to Leader. stmt: {}", context.getStmtId()); leaderOpExecutor.execute(); } private void writeProfile(ExecPlan plan, long beginTimeInNanoSecond) { initProfile(beginTimeInNanoSecond); ProfilingExecPlan profilingPlan = plan == null ? null : plan.getProfilingPlan(); String profileContent = ProfileManager.getInstance().pushProfile(profilingPlan, profile); if (context.getQueryDetail() != null) { context.getQueryDetail().setProfile(profileContent); } } public void analyze(TQueryOptions tQueryOptions) throws UserException { LOG.info("begin to analyze stmt: {}, forwarded stmt id: {}", context.getStmtId(), context.getForwardedStmtId()); resolveParseStmtForForward(); redirectStatus = parsedStmt.getRedirectStatus(); if (isForwardToLeader()) { return; } if (parsedStmt instanceof ShowStmt) { QueryStatement selectStmt = ((ShowStmt) parsedStmt).toSelectStmt(); if (selectStmt != null) { Preconditions.checkState(false, "Shouldn't reach here"); } } try { parsedStmt.analyze(new Analyzer(context.getGlobalStateMgr(), context)); } catch (AnalysisException e) { throw e; } catch (Exception e) { LOG.warn("Analyze failed because ", e); throw new AnalysisException("Unexpected exception: " + e.getMessage()); } } public void registerSubStmtExecutor(StmtExecutor subStmtExecutor) { if (subStmtExecutors == null) { subStmtExecutors = Lists.newArrayList(); } subStmtExecutors.add(subStmtExecutor); } public void cancel() { if (parsedStmt instanceof DeleteStmt && ((DeleteStmt) parsedStmt).shouldHandledByDeleteHandler()) { DeleteStmt deleteStmt = (DeleteStmt) parsedStmt; long jobId = deleteStmt.getJobId(); if (jobId != -1) { GlobalStateMgr.getCurrentState().getDeleteMgr().killJob(jobId); } } else { if (subStmtExecutors != null && !subStmtExecutors.isEmpty()) { for (StmtExecutor sub : subStmtExecutors) { sub.cancel(); } } Coordinator coordRef = coord; if (coordRef != null) { coordRef.cancel(); } } } private void handleKill() throws DdlException { KillStmt killStmt = (KillStmt) parsedStmt; long id = killStmt.getConnectionId(); ConnectContext killCtx = context.getConnectScheduler().getContext(id); if (killCtx == null) { ErrorReport.reportDdlException(ErrorCode.ERR_NO_SUCH_THREAD, id); } Preconditions.checkNotNull(killCtx); if (context == killCtx) { context.setKilled(); } else { if (!Objects.equals(killCtx.getQualifiedUser(), context.getQualifiedUser())) { Authorizer.checkSystemAction(context.getCurrentUserIdentity(), context.getCurrentRoleIds(), PrivilegeType.OPERATE); } killCtx.kill(killStmt.isConnectionKill()); } context.getState().setOk(); } private void handleSetStmt() { try { SetStmt setStmt = (SetStmt) parsedStmt; SetExecutor executor = new SetExecutor(context, setStmt); executor.execute(); } catch (DdlException e) { context.getState().setError(e.getMessage()); return; } context.getState().setOk(); } private Coordinator.Factory getCoordinatorFactory() { return new DefaultCoordinator.Factory(); } private void handleQueryStmt(ExecPlan execPlan) throws Exception { context.getMysqlChannel().reset(); boolean isExplainAnalyze = parsedStmt.isExplain() && StatementBase.ExplainLevel.ANALYZE.equals(parsedStmt.getExplainLevel()); boolean isSchedulerExplain = parsedStmt.isExplain() && StatementBase.ExplainLevel.SCHEDULER.equals(parsedStmt.getExplainLevel()); if (isExplainAnalyze) { context.getSessionVariable().setEnableProfile(true); context.getSessionVariable().setPipelineProfileLevel(1); context.getSessionVariable().setProfileLimitFold(false); } else if (isSchedulerExplain) { } else if (parsedStmt.isExplain()) { handleExplainStmt(buildExplainString(execPlan, ResourceGroupClassifier.QueryType.SELECT)); return; } if (context.getQueryDetail() != null) { context.getQueryDetail().setExplain(buildExplainString(execPlan, ResourceGroupClassifier.QueryType.SELECT)); } StatementBase queryStmt = parsedStmt; List<PlanFragment> fragments = execPlan.getFragments(); List<ScanNode> scanNodes = execPlan.getScanNodes(); TDescriptorTable descTable = execPlan.getDescTbl().toThrift(); List<String> colNames = execPlan.getColNames(); List<Expr> outputExprs = execPlan.getOutputExprs(); coord = getCoordinatorFactory().createQueryScheduler(context, fragments, scanNodes, descTable); QeProcessorImpl.INSTANCE.registerQuery(context.getExecutionId(), new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord)); if (isSchedulerExplain) { coord.startSchedulingWithoutDeploy(); handleExplainStmt(coord.getSchedulerExplain()); return; } coord.exec(); coord.setTopProfileSupplier(this::buildTopLevelProfile); coord.setExecPlanSupplier(() -> execPlan); RowBatch batch; boolean isOutfileQuery = false; if (queryStmt instanceof QueryStatement) { isOutfileQuery = ((QueryStatement) queryStmt).hasOutFileClause(); } if (context instanceof HttpConnectContext) { batch = httpResultSender.sendQueryResult(coord, execPlan); } else { MysqlChannel channel = context.getMysqlChannel(); boolean isSendFields = false; do { batch = coord.getNext(); if (batch.getBatch() != null && !isOutfileQuery && !isExplainAnalyze) { if (!isSendFields) { sendFields(colNames, outputExprs); isSendFields = true; } if (!isProxy && channel.isSendBufferNull()) { int bufferSize = 0; for (ByteBuffer row : batch.getBatch().getRows()) { bufferSize += (row.position() - row.limit()); } channel.initBuffer(bufferSize + 8); } for (ByteBuffer row : batch.getBatch().getRows()) { if (isProxy) { proxyResultBuffer.add(row); } else { channel.sendOnePacket(row); } } context.updateReturnRows(batch.getBatch().getRows().size()); } } while (!batch.isEos()); if (!isSendFields && !isOutfileQuery && !isExplainAnalyze) { sendFields(colNames, outputExprs); } } statisticsForAuditLog = batch.getQueryStatistics(); if (!isOutfileQuery) { context.getState().setEof(); } else { context.getState().setOk(statisticsForAuditLog.returnedRows, 0, ""); } if (null == statisticsForAuditLog || null == statisticsForAuditLog.statsItems || statisticsForAuditLog.statsItems.isEmpty()) { return; } Set<Long> tableIds = Sets.newHashSet(); for (QueryStatisticsItemPB item : statisticsForAuditLog.statsItems) { TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(item.tableId); entity.counterScanRowsTotal.increase(item.scanRows); entity.counterScanBytesTotal.increase(item.scanBytes); tableIds.add(item.tableId); } for (Long tableId : tableIds) { TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(tableId); entity.counterScanFinishedTotal.increase(1L); } } private void handleAnalyzeStmt() throws IOException { AnalyzeStmt analyzeStmt = (AnalyzeStmt) parsedStmt; Database db = MetaUtils.getDatabase(context, analyzeStmt.getTableName()); Table table = MetaUtils.getTable(context, analyzeStmt.getTableName()); if (StatisticUtils.isEmptyTable(table)) { return; } StatsConstants.AnalyzeType analyzeType; if (analyzeStmt.getAnalyzeTypeDesc() instanceof AnalyzeHistogramDesc) { analyzeType = StatsConstants.AnalyzeType.HISTOGRAM; } else { if (analyzeStmt.isSample()) { analyzeType = StatsConstants.AnalyzeType.SAMPLE; } else { analyzeType = StatsConstants.AnalyzeType.FULL; } } AnalyzeStatus analyzeStatus; if (analyzeStmt.isExternal()) { String catalogName = analyzeStmt.getTableName().getCatalog(); analyzeStatus = new ExternalAnalyzeStatus(GlobalStateMgr.getCurrentState().getNextId(), catalogName, db.getOriginName(), table.getName(), table.getUUID(), analyzeStmt.getColumnNames(), analyzeType, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties(), LocalDateTime.now()); analyzeStatus.setStatus(StatsConstants.ScheduleStatus.PENDING); GlobalStateMgr.getCurrentAnalyzeMgr().addOrUpdateAnalyzeStatus(analyzeStatus); } else { analyzeStatus = new NativeAnalyzeStatus(GlobalStateMgr.getCurrentState().getNextId(), db.getId(), table.getId(), analyzeStmt.getColumnNames(), analyzeType, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties(), LocalDateTime.now()); analyzeStatus.setStatus(StatsConstants.ScheduleStatus.FAILED); GlobalStateMgr.getCurrentAnalyzeMgr().addAnalyzeStatus(analyzeStatus); analyzeStatus.setStatus(StatsConstants.ScheduleStatus.PENDING); GlobalStateMgr.getCurrentAnalyzeMgr().replayAddAnalyzeStatus(analyzeStatus); } int timeout = context.getSessionVariable().getQueryTimeoutS(); try { Future<?> future = GlobalStateMgr.getCurrentAnalyzeMgr().getAnalyzeTaskThreadPool() .submit(() -> executeAnalyze(analyzeStmt, analyzeStatus, db, table)); if (!analyzeStmt.isAsync()) { context.getSessionVariable().setQueryTimeoutS((int) Config.statistic_collect_query_timeout); future.get(); } } catch (RejectedExecutionException e) { analyzeStatus.setStatus(StatsConstants.ScheduleStatus.FAILED); analyzeStatus.setReason("The statistics tasks running concurrently exceed the upper limit"); if (analyzeStmt.isExternal()) { GlobalStateMgr.getCurrentAnalyzeMgr().addOrUpdateAnalyzeStatus(analyzeStatus); } else { GlobalStateMgr.getCurrentAnalyzeMgr().addAnalyzeStatus(analyzeStatus); } } catch (ExecutionException | InterruptedException e) { analyzeStatus.setStatus(StatsConstants.ScheduleStatus.FAILED); analyzeStatus.setReason("The statistics tasks running failed"); if (analyzeStmt.isExternal()) { GlobalStateMgr.getCurrentAnalyzeMgr().addOrUpdateAnalyzeStatus(analyzeStatus); } else { GlobalStateMgr.getCurrentAnalyzeMgr().addAnalyzeStatus(analyzeStatus); } } finally { context.getSessionVariable().setQueryTimeoutS(timeout); } ShowResultSet resultSet = analyzeStatus.toShowResult(); if (isProxy) { proxyResultSet = resultSet; context.getState().setEof(); return; } sendShowResult(resultSet); } private void handleAnalyzeProfileStmt() throws IOException { AnalyzeProfileStmt analyzeProfileStmt = (AnalyzeProfileStmt) parsedStmt; String queryId = analyzeProfileStmt.getQueryId(); List<Integer> planNodeIds = analyzeProfileStmt.getPlanNodeIds(); ProfileManager.ProfileElement profileElement = ProfileManager.getInstance().getProfileElement(queryId); Preconditions.checkNotNull(profileElement, "query not exists"); handleExplainStmt(ExplainAnalyzer.analyze(profileElement.plan, RuntimeProfileParser.parseFrom(CompressionUtils.gzipDecompressString(profileElement.profileContent)), planNodeIds)); } private void executeAnalyze(AnalyzeStmt analyzeStmt, AnalyzeStatus analyzeStatus, Database db, Table table) { ConnectContext statsConnectCtx = StatisticUtils.buildConnectContext(); statsConnectCtx.getSessionVariable().setStatisticCollectParallelism( context.getSessionVariable().getStatisticCollectParallelism()); statsConnectCtx.setThreadLocalInfo(); statsConnectCtx.setStatisticsConnection(true); executeAnalyze(statsConnectCtx, analyzeStmt, analyzeStatus, db, table); } private void executeAnalyze(ConnectContext statsConnectCtx, AnalyzeStmt analyzeStmt, AnalyzeStatus analyzeStatus, Database db, Table table) { StatisticExecutor statisticExecutor = new StatisticExecutor(); if (analyzeStmt.isExternal()) { StatsConstants.AnalyzeType analyzeType = analyzeStmt.isSample() ? StatsConstants.AnalyzeType.SAMPLE : StatsConstants.AnalyzeType.FULL; statisticExecutor.collectStatistics(statsConnectCtx, StatisticsCollectJobFactory.buildExternalStatisticsCollectJob( analyzeStmt.getTableName().getCatalog(), db, table, analyzeStmt.getColumnNames(), analyzeType, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties()), analyzeStatus, false); } else { if (analyzeStmt.getAnalyzeTypeDesc() instanceof AnalyzeHistogramDesc) { statisticExecutor.collectStatistics(statsConnectCtx, new HistogramStatisticsCollectJob(db, table, analyzeStmt.getColumnNames(), StatsConstants.AnalyzeType.HISTOGRAM, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties()), analyzeStatus, false); } else { StatsConstants.AnalyzeType analyzeType = analyzeStmt.isSample() ? StatsConstants.AnalyzeType.SAMPLE : StatsConstants.AnalyzeType.FULL; statisticExecutor.collectStatistics(statsConnectCtx, StatisticsCollectJobFactory.buildStatisticsCollectJob(db, table, null, analyzeStmt.getColumnNames(), analyzeType, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties()), analyzeStatus, false); } } } private void handleDropStatsStmt() { DropStatsStmt dropStatsStmt = (DropStatsStmt) parsedStmt; Table table = MetaUtils.getTable(context, dropStatsStmt.getTableName()); if (dropStatsStmt.isExternal()) { GlobalStateMgr.getCurrentAnalyzeMgr().dropExternalStats(table.getUUID()); List<String> columns = table.getBaseSchema().stream().map(Column::getName).collect(Collectors.toList()); GlobalStateMgr.getCurrentStatisticStorage().expireConnectorTableColumnStatistics(table, columns); } else { List<String> columns = table.getBaseSchema().stream().filter(d -> !d.isAggregated()).map(Column::getName) .collect(Collectors.toList()); GlobalStateMgr.getCurrentAnalyzeMgr().dropAnalyzeStatus(table.getId()); GlobalStateMgr.getCurrentAnalyzeMgr() .dropBasicStatsMetaAndData(StatisticUtils.buildConnectContext(), Sets.newHashSet(table.getId())); GlobalStateMgr.getCurrentStatisticStorage().expireTableAndColumnStatistics(table, columns); } } private void handleDropHistogramStmt() { DropHistogramStmt dropHistogramStmt = (DropHistogramStmt) parsedStmt; OlapTable table = (OlapTable) MetaUtils.getTable(context, dropHistogramStmt.getTableName()); List<String> columns = table.getBaseSchema().stream().filter(d -> !d.isAggregated()).map(Column::getName) .collect(Collectors.toList()); GlobalStateMgr.getCurrentAnalyzeMgr().dropAnalyzeStatus(table.getId()); GlobalStateMgr.getCurrentAnalyzeMgr() .dropHistogramStatsMetaAndData(StatisticUtils.buildConnectContext(), Sets.newHashSet(table.getId())); GlobalStateMgr.getCurrentStatisticStorage().expireHistogramStatistics(table.getId(), columns); } private void handleKillAnalyzeStmt() { KillAnalyzeStmt killAnalyzeStmt = (KillAnalyzeStmt) parsedStmt; long analyzeId = killAnalyzeStmt.getAnalyzeId(); AnalyzeMgr analyzeManager = GlobalStateMgr.getCurrentAnalyzeMgr(); checkPrivilegeForKillAnalyzeStmt(context, analyzeId); analyzeManager.killConnection(analyzeId); } private void checkTblPrivilegeForKillAnalyzeStmt(ConnectContext context, String catalogName, String dbName, String tableName, long analyzeId) { MetaUtils.getDatabase(catalogName, dbName); MetaUtils.getTable(catalogName, dbName, tableName); Authorizer.checkTableAction(context.getCurrentUserIdentity(), context.getCurrentRoleIds(), catalogName, dbName, tableName, PrivilegeType.SELECT); Authorizer.checkTableAction(context.getCurrentUserIdentity(), context.getCurrentRoleIds(), catalogName, dbName, tableName, PrivilegeType.INSERT); } public void checkPrivilegeForKillAnalyzeStmt(ConnectContext context, long analyzeId) { AnalyzeMgr analyzeManager = GlobalStateMgr.getCurrentAnalyzeMgr(); AnalyzeStatus analyzeStatus = analyzeManager.getAnalyzeStatus(analyzeId); AnalyzeJob analyzeJob = analyzeManager.getAnalyzeJob(analyzeId); if (analyzeStatus != null) { try { String catalogName = analyzeStatus.getCatalogName(); String dbName = analyzeStatus.getDbName(); String tableName = analyzeStatus.getTableName(); checkTblPrivilegeForKillAnalyzeStmt(context, catalogName, dbName, tableName, analyzeId); } catch (MetaNotFoundException ignore) { } } else if (analyzeJob != null) { Set<TableName> tableNames = AnalyzerUtils.getAllTableNamesForAnalyzeJobStmt(analyzeJob.getDbId(), analyzeJob.getTableId()); tableNames.forEach(tableName -> checkTblPrivilegeForKillAnalyzeStmt(context, tableName.getCatalog(), tableName.getDb(), tableName.getTbl(), analyzeId) ); } } private void handleAddSqlBlackListStmt() { AddSqlBlackListStmt addSqlBlackListStmt = (AddSqlBlackListStmt) parsedStmt; SqlBlackList.getInstance().put(addSqlBlackListStmt.getSqlPattern()); } private void handleDelSqlBlackListStmt() { DelSqlBlackListStmt delSqlBlackListStmt = (DelSqlBlackListStmt) parsedStmt; List<Long> indexs = delSqlBlackListStmt.getIndexs(); if (indexs != null) { for (long id : indexs) { SqlBlackList.getInstance().delete(id); } } } private void handleExecAsStmt() throws UserException { ExecuteAsExecutor.execute((ExecuteAsStmt) parsedStmt, context); } private void handleExecScriptStmt() throws IOException, UserException { ShowResultSet resultSet = ExecuteScriptExecutor.execute((ExecuteScriptStmt) parsedStmt, context); if (isProxy) { proxyResultSet = resultSet; context.getState().setEof(); return; } sendShowResult(resultSet); } private void handleSetRole() throws PrivilegeException, UserException { SetRoleExecutor.execute((SetRoleStmt) parsedStmt, context); } private void handleSetDefaultRole() throws PrivilegeException, UserException { SetDefaultRoleExecutor.execute((SetDefaultRoleStmt) parsedStmt, context); } private void handleUnsupportedStmt() { context.getMysqlChannel().reset(); context.getState().setOk(); } private void handleUseDbStmt() throws AnalysisException { UseDbStmt useDbStmt = (UseDbStmt) parsedStmt; try { context.getGlobalStateMgr().changeCatalogDb(context, useDbStmt.getIdentifier()); } catch (Exception e) { context.getState().setError(e.getMessage()); return; } context.getState().setOk(); } private void handleUseCatalogStmt() throws AnalysisException { UseCatalogStmt useCatalogStmt = (UseCatalogStmt) parsedStmt; try { String catalogName = useCatalogStmt.getCatalogName(); context.getGlobalStateMgr().changeCatalog(context, catalogName); } catch (Exception e) { context.getState().setError(e.getMessage()); return; } context.getState().setOk(); } private void handleSetCatalogStmt() throws AnalysisException { SetCatalogStmt setCatalogStmt = (SetCatalogStmt) parsedStmt; try { String catalogName = setCatalogStmt.getCatalogName(); context.getGlobalStateMgr().changeCatalog(context, catalogName); } catch (Exception e) { context.getState().setError(e.getMessage()); return; } context.getState().setOk(); } private void sendMetaData(ShowResultSetMetaData metaData) throws IOException { serializer.reset(); serializer.writeVInt(metaData.getColumnCount()); context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); for (Column col : metaData.getColumns()) { serializer.reset(); serializer.writeField(col.getName(), col.getType()); context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } serializer.reset(); MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState()); eofPacket.writeTo(serializer); context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } private void sendFields(List<String> colNames, List<Expr> exprs) throws IOException { serializer.reset(); serializer.writeVInt(colNames.size()); if (isProxy) { proxyResultBuffer.add(serializer.toByteBuffer()); } else { context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } for (int i = 0; i < colNames.size(); ++i) { serializer.reset(); serializer.writeField(colNames.get(i), exprs.get(i).getOriginType()); if (isProxy) { proxyResultBuffer.add(serializer.toByteBuffer()); } else { context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } } serializer.reset(); MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState()); eofPacket.writeTo(serializer); if (isProxy) { proxyResultBuffer.add(serializer.toByteBuffer()); } else { context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } } public void sendShowResult(ShowResultSet resultSet) throws IOException { context.updateReturnRows(resultSet.getResultRows().size()); if (context instanceof HttpConnectContext) { httpResultSender.sendShowResult(resultSet); return; } sendMetaData(resultSet.getMetaData()); for (List<String> row : resultSet.getResultRows()) { serializer.reset(); for (String item : row) { if (item == null || item.equals(FeConstants.NULL_STRING)) { serializer.writeNull(); } else { serializer.writeLenEncodedString(item); } } context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } context.getState().setEof(); } private void handleShow() throws IOException, AnalysisException, DdlException { ShowExecutor executor = new ShowExecutor(context, (ShowStmt) parsedStmt); ShowResultSet resultSet = executor.execute(); if (resultSet == null) { return; } if (isProxy) { proxyResultSet = resultSet; context.getState().setEof(); return; } sendShowResult(resultSet); } private void handleExplainStmt(String explainString) throws IOException { if (context instanceof HttpConnectContext) { httpResultSender.sendExplainResult(explainString); return; } if (context.getQueryDetail() != null) { context.getQueryDetail().setExplain(explainString); } ShowResultSetMetaData metaData = ShowResultSetMetaData.builder() .addColumn(new Column("Explain String", ScalarType.createVarchar(20))) .build(); sendMetaData(metaData); for (String item : explainString.split("\n")) { serializer.reset(); serializer.writeLenEncodedString(item); context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } context.getState().setEof(); } private String buildExplainString(ExecPlan execPlan, ResourceGroupClassifier.QueryType queryType) { String explainString = ""; if (parsedStmt.getExplainLevel() == StatementBase.ExplainLevel.VERBOSE) { TWorkGroup resourceGroup = CoordinatorPreprocessor.prepareResourceGroup(context, queryType); String resourceGroupStr = resourceGroup != null ? resourceGroup.getName() : ResourceGroup.DEFAULT_RESOURCE_GROUP_NAME; explainString += "RESOURCE GROUP: " + resourceGroupStr + "\n\n"; } if (execPlan == null) { explainString += "NOT AVAILABLE"; } else { if (parsedStmt.getExplainLevel() == OPTIMIZER) { explainString += PlannerProfile.printPlannerTimer(context.getPlannerProfile()); } else if (parsedStmt.getExplainLevel() == REWRITE) { explainString += PlannerProfile.printPlannerTrace(context.getPlannerProfile()); } else { explainString += execPlan.getExplainString(parsedStmt.getExplainLevel()); } } return explainString; } private void handleDdlStmt() { try { ShowResultSet resultSet = DDLStmtExecutor.execute(parsedStmt, context); if (resultSet == null) { context.getState().setOk(); } else { if (isProxy) { proxyResultSet = resultSet; context.getState().setEof(); } else { sendShowResult(resultSet); } } } catch (QueryStateException e) { if (e.getQueryState().getStateType() != MysqlStateType.OK) { String sql = AstToStringBuilder.toString(parsedStmt); if (sql == null) { sql = originStmt.originStmt; } LOG.warn("DDL statement (" + sql + ") process failed.", e); } context.setState(e.getQueryState()); } catch (Throwable e) { String sql = AstToStringBuilder.toString(parsedStmt); if (sql == null || sql.isEmpty()) { sql = originStmt.originStmt; } LOG.warn("DDL statement (" + sql + ") process failed.", e); context.getState().setError("Unexpected exception: " + e.getMessage()); } } private void handleExportStmt(UUID queryId) throws Exception { ExportStmt exportStmt = (ExportStmt) parsedStmt; exportStmt.setExportStartTime(context.getStartTime()); context.getGlobalStateMgr().getExportMgr().addExportJob(queryId, exportStmt); } private void handleUpdateFailPointStatusStmt() throws Exception { FailPointExecutor executor = new FailPointExecutor(context, parsedStmt); executor.execute(); } public PQueryStatistics getQueryStatisticsForAuditLog() { if (statisticsForAuditLog == null && coord != null) { statisticsForAuditLog = coord.getAuditStatistics(); } if (statisticsForAuditLog == null) { statisticsForAuditLog = new PQueryStatistics(); } if (statisticsForAuditLog.scanBytes == null) { statisticsForAuditLog.scanBytes = 0L; } if (statisticsForAuditLog.scanRows == null) { statisticsForAuditLog.scanRows = 0L; } if (statisticsForAuditLog.cpuCostNs == null) { statisticsForAuditLog.cpuCostNs = 0L; } if (statisticsForAuditLog.memCostBytes == null) { statisticsForAuditLog.memCostBytes = 0L; } if (statisticsForAuditLog.spillBytes == null) { statisticsForAuditLog.spillBytes = 0L; } return statisticsForAuditLog; } public void handleInsertOverwrite(InsertStmt insertStmt) throws Exception { Database database = MetaUtils.getDatabase(context, insertStmt.getTableName()); Table table = insertStmt.getTargetTable(); if (!(table instanceof OlapTable)) { LOG.warn("insert overwrite table:{} type:{} is not supported", table.getName(), table.getClass()); throw new RuntimeException("not supported table type for insert overwrite"); } OlapTable olapTable = (OlapTable) insertStmt.getTargetTable(); InsertOverwriteJob job = new InsertOverwriteJob(GlobalStateMgr.getCurrentState().getNextId(), insertStmt, database.getId(), olapTable.getId()); if (!database.writeLockAndCheckExist()) { throw new DmlException("database:%s does not exist.", database.getFullName()); } try { CreateInsertOverwriteJobLog info = new CreateInsertOverwriteJobLog(job.getJobId(), job.getTargetDbId(), job.getTargetTableId(), job.getSourcePartitionIds()); GlobalStateMgr.getCurrentState().getEditLog().logCreateInsertOverwrite(info); } finally { database.writeUnlock(); } insertStmt.setOverwriteJobId(job.getJobId()); InsertOverwriteJobMgr manager = GlobalStateMgr.getCurrentState().getInsertOverwriteJobMgr(); manager.executeJob(context, this, job); } /** * `handleDMLStmtWithProfile` executes DML statement and write profile at the end. * NOTE: `writeProfile` can only be called once, otherwise the profile detail will be lost. */ public void handleDMLStmtWithProfile(ExecPlan execPlan, DmlStmt stmt, long beginTimeInNanoSecond) throws Exception { try { handleDMLStmt(execPlan, stmt); } catch (Throwable t) { LOG.warn("DML statement(" + originStmt.originStmt + ") process failed.", t); throw t; } finally { if (context.getSessionVariable().isEnableProfile()) { writeProfile(execPlan, beginTimeInNanoSecond); if (parsedStmt.isExplain() && StatementBase.ExplainLevel.ANALYZE.equals(parsedStmt.getExplainLevel())) { handleExplainStmt(ExplainAnalyzer.analyze(ProfilingExecPlan.buildFrom(execPlan), profile, null)); } } QeProcessorImpl.INSTANCE.unregisterQuery(context.getExecutionId()); } } /** * `handleDMLStmt` only executes DML statement and no write profile at the end. */ public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception { boolean isExplainAnalyze = parsedStmt.isExplain() && StatementBase.ExplainLevel.ANALYZE.equals(parsedStmt.getExplainLevel()); boolean isSchedulerExplain = parsedStmt.isExplain() && StatementBase.ExplainLevel.SCHEDULER.equals(parsedStmt.getExplainLevel()); if (isExplainAnalyze) { context.getSessionVariable().setEnableProfile(true); context.getSessionVariable().setPipelineProfileLevel(1); context.getSessionVariable().setProfileLimitFold(false); } else if (isSchedulerExplain) { } else if (stmt.isExplain()) { handleExplainStmt(buildExplainString(execPlan, ResourceGroupClassifier.QueryType.INSERT)); return; } if (context.getQueryDetail() != null) { context.getQueryDetail().setExplain(buildExplainString(execPlan, ResourceGroupClassifier.QueryType.INSERT)); } if (stmt instanceof DeleteStmt && ((DeleteStmt) stmt).shouldHandledByDeleteHandler()) { try { context.getGlobalStateMgr().getDeleteMgr().process((DeleteStmt) stmt); context.getState().setOk(); } catch (QueryStateException e) { if (e.getQueryState().getStateType() != MysqlStateType.OK) { LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e); } context.setState(e.getQueryState()); } return; } MetaUtils.normalizationTableName(context, stmt.getTableName()); String catalogName = stmt.getTableName().getCatalog(); String dbName = stmt.getTableName().getDb(); String tableName = stmt.getTableName().getTbl(); Database database = GlobalStateMgr.getCurrentState().getMetadataMgr().getDb(catalogName, dbName); final Table targetTable; if (stmt instanceof InsertStmt && ((InsertStmt) stmt).getTargetTable() != null) { targetTable = ((InsertStmt) stmt).getTargetTable(); } else { targetTable = GlobalStateMgr.getCurrentState().getMetadataMgr().getTable(catalogName, dbName, tableName); } if (isExplainAnalyze) { Preconditions.checkState(targetTable instanceof OlapTable, "explain analyze only supports insert into olap native table"); } if (parsedStmt instanceof InsertStmt && ((InsertStmt) parsedStmt).isOverwrite() && !((InsertStmt) parsedStmt).hasOverwriteJob() && !(targetTable.isIcebergTable() || targetTable.isHiveTable())) { handleInsertOverwrite((InsertStmt) parsedStmt); return; } String label = DebugUtil.printId(context.getExecutionId()); if (stmt instanceof InsertStmt) { String stmtLabel = ((InsertStmt) stmt).getLabel(); label = Strings.isNullOrEmpty(stmtLabel) ? "insert_" + label : stmtLabel; } else if (stmt instanceof UpdateStmt) { label = "update_" + label; } else if (stmt instanceof DeleteStmt) { label = "delete_" + label; } else { throw unsupportedException( "Unsupported dml statement " + parsedStmt.getClass().getSimpleName()); } TransactionState.LoadJobSourceType sourceType = TransactionState.LoadJobSourceType.INSERT_STREAMING; MetricRepo.COUNTER_LOAD_ADD.increase(1L); long transactionId = -1; TransactionState txnState = null; if (targetTable instanceof ExternalOlapTable) { ExternalOlapTable externalTable = (ExternalOlapTable) targetTable; TAuthenticateParams authenticateParams = new TAuthenticateParams(); authenticateParams.setUser(externalTable.getSourceTableUser()); authenticateParams.setPasswd(externalTable.getSourceTablePassword()); authenticateParams.setHost(context.getRemoteIP()); authenticateParams.setDb_name(externalTable.getSourceTableDbName()); authenticateParams.setTable_names(Lists.newArrayList(externalTable.getSourceTableName())); transactionId = GlobalStateMgr.getCurrentGlobalTransactionMgr() .beginRemoteTransaction(externalTable.getSourceTableDbId(), Lists.newArrayList(externalTable.getSourceTableId()), label, externalTable.getSourceTableHost(), externalTable.getSourceTablePort(), new TransactionState.TxnCoordinator(TransactionState.TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), sourceType, context.getSessionVariable().getQueryTimeoutS(), authenticateParams); } else if (targetTable instanceof SystemTable || (targetTable.isIcebergTable() || targetTable.isHiveTable())) { } else { transactionId = GlobalStateMgr.getCurrentGlobalTransactionMgr().beginTransaction( database.getId(), Lists.newArrayList(targetTable.getId()), label, new TransactionState.TxnCoordinator(TransactionState.TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), sourceType, context.getSessionVariable().getQueryTimeoutS()); txnState = GlobalStateMgr.getCurrentGlobalTransactionMgr() .getTransactionState(database.getId(), transactionId); if (txnState == null) { throw new DdlException("txn does not exist: " + transactionId); } if (targetTable instanceof OlapTable) { txnState.addTableIndexes((OlapTable) targetTable); } } if (context.getMysqlChannel() != null) { context.getMysqlChannel().reset(); } long createTime = System.currentTimeMillis(); long loadedRows = 0; int filteredRows = 0; long loadedBytes = 0; long jobId = -1; long estimateScanRows = -1; TransactionStatus txnStatus = TransactionStatus.ABORTED; boolean insertError = false; String trackingSql = ""; try { if (execPlan.getFragments().get(0).getSink() instanceof OlapTableSink) { context.getSessionVariable().setPreferComputeNode(false); context.getSessionVariable().setUseComputeNodes(0); OlapTableSink dataSink = (OlapTableSink) execPlan.getFragments().get(0).getSink(); dataSink.init(context.getExecutionId(), transactionId, database.getId(), ConnectContext.get().getSessionVariable().getQueryTimeoutS()); dataSink.complete(); } coord = getCoordinatorFactory().createInsertScheduler( context, execPlan.getFragments(), execPlan.getScanNodes(), execPlan.getDescTbl().toThrift()); List<ScanNode> scanNodes = execPlan.getScanNodes(); boolean containOlapScanNode = false; for (ScanNode scanNode : scanNodes) { if (scanNode instanceof OlapScanNode) { estimateScanRows += ((OlapScanNode) scanNode).getActualRows(); containOlapScanNode = true; } } TLoadJobType type; if (containOlapScanNode) { coord.setLoadJobType(TLoadJobType.INSERT_QUERY); type = TLoadJobType.INSERT_QUERY; } else { estimateScanRows = execPlan.getFragments().get(0).getPlanRoot().getCardinality(); coord.setLoadJobType(TLoadJobType.INSERT_VALUES); type = TLoadJobType.INSERT_VALUES; } context.setStatisticsJob(AnalyzerUtils.isStatisticsJob(context, parsedStmt)); if (!(targetTable.isIcebergTable() || targetTable.isHiveTable())) { jobId = context.getGlobalStateMgr().getLoadMgr().registerLoadJob( label, database.getFullName(), targetTable.getId(), EtlJobType.INSERT, createTime, estimateScanRows, type, ConnectContext.get().getSessionVariable().getQueryTimeoutS()); } coord.setLoadJobId(jobId); trackingSql = "select tracking_log from information_schema.load_tracking_logs where job_id=" + jobId; QeProcessorImpl.QueryInfo queryInfo = new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord); QeProcessorImpl.INSTANCE.registerQuery(context.getExecutionId(), queryInfo); if (isSchedulerExplain) { coord.startSchedulingWithoutDeploy(); handleExplainStmt(coord.getSchedulerExplain()); return; } coord.exec(); coord.setTopProfileSupplier(this::buildTopLevelProfile); coord.setExecPlanSupplier(() -> execPlan); long jobDeadLineMs = System.currentTimeMillis() + context.getSessionVariable().getQueryTimeoutS() * 1000; coord.join(context.getSessionVariable().getQueryTimeoutS()); if (!coord.isDone()) { /* * In this case, There are two factors that lead query cancelled: * 1: TIMEOUT * 2: BE EXCEPTION * So we should distinguish these two factors. */ if (!coord.checkBackendState()) { if (Config.log_plan_cancelled_by_crash_be && context.getQueryDetail() == null) { LOG.warn("Query cancelled by crash of backends [QueryId={}] [SQL={}] [Plan={}]", DebugUtil.printId(context.getExecutionId()), originStmt == null ? "" : originStmt.originStmt, execPlan.getExplainString(TExplainLevel.COSTS)); } coord.cancel(); ErrorReport.reportDdlException(ErrorCode.ERR_QUERY_EXCEPTION); } else { coord.cancel(); if (coord.isThriftServerHighLoad()) { ErrorReport.reportDdlException(ErrorCode.ERR_QUERY_TIMEOUT, "Please check the thrift-server-pool metrics, " + "if the pool size reaches thrift_server_max_worker_threads(default is 4096), " + "you can set the config to a higher value in fe.conf, " + "or set parallel_fragment_exec_instance_num to a lower value in session variable"); } else { ErrorReport.reportDdlException(ErrorCode.ERR_QUERY_TIMEOUT, "Increase the query_timeout session variable and retry"); } } } if (!coord.getExecStatus().ok()) { String errMsg = coord.getExecStatus().getErrorMsg(); if (errMsg.length() == 0) { errMsg = coord.getExecStatus().getErrorCodeString(); } LOG.warn("insert failed: {}", errMsg); ErrorReport.reportDdlException(errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT); } LOG.debug("delta files is {}", coord.getDeltaUrls()); if (coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL) != null) { loadedRows = Long.parseLong(coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL)); } if (coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL) != null) { filteredRows = Integer.parseInt(coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL)); } if (coord.getLoadCounters().get(LoadJob.LOADED_BYTES) != null) { loadedBytes = Long.parseLong(coord.getLoadCounters().get(LoadJob.LOADED_BYTES)); } if (context.getSessionVariable().getEnableInsertStrict()) { if (filteredRows > 0) { if (targetTable instanceof ExternalOlapTable) { ExternalOlapTable externalTable = (ExternalOlapTable) targetTable; GlobalStateMgr.getCurrentGlobalTransactionMgr().abortRemoteTransaction( externalTable.getSourceTableDbId(), transactionId, externalTable.getSourceTableHost(), externalTable.getSourceTablePort(), TransactionCommitFailedException.FILTER_DATA_IN_STRICT_MODE + ", tracking sql = " + trackingSql ); } else if (targetTable instanceof SystemTable || (targetTable.isHiveTable() || targetTable.isIcebergTable())) { } else { GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction( database.getId(), transactionId, TransactionCommitFailedException.FILTER_DATA_IN_STRICT_MODE + ", tracking sql = " + trackingSql, TabletFailInfo.fromThrift(coord.getFailInfos()) ); } context.getState().setError("Insert has filtered data in strict mode, txn_id = " + transactionId + " tracking sql = " + trackingSql); insertError = true; return; } } if (targetTable instanceof ExternalOlapTable) { ExternalOlapTable externalTable = (ExternalOlapTable) targetTable; if (GlobalStateMgr.getCurrentGlobalTransactionMgr().commitRemoteTransaction( externalTable.getSourceTableDbId(), transactionId, externalTable.getSourceTableHost(), externalTable.getSourceTablePort(), coord.getCommitInfos())) { txnStatus = TransactionStatus.VISIBLE; MetricRepo.COUNTER_LOAD_FINISHED.increase(1L); } else { txnStatus = TransactionStatus.COMMITTED; } } else if (targetTable instanceof SystemTable) { txnStatus = TransactionStatus.VISIBLE; } else if (targetTable instanceof IcebergTable) { List<TSinkCommitInfo> commitInfos = coord.getSinkCommitInfos(); if (stmt instanceof InsertStmt && ((InsertStmt) stmt).isOverwrite()) { for (TSinkCommitInfo commitInfo : commitInfos) { commitInfo.setIs_overwrite(true); } } context.getGlobalStateMgr().getMetadataMgr().finishSink(catalogName, dbName, tableName, commitInfos); txnStatus = TransactionStatus.VISIBLE; label = "FAKE_ICEBERG_SINK_LABEL"; } else if (targetTable instanceof HiveTable) { List<TSinkCommitInfo> commitInfos = coord.getSinkCommitInfos(); HiveTableSink hiveTableSink = (HiveTableSink) execPlan.getFragments().get(0).getSink(); String stagingDir = hiveTableSink.getStagingDir(); if (stmt instanceof InsertStmt) { InsertStmt insertStmt = (InsertStmt) stmt; for (TSinkCommitInfo commitInfo : commitInfos) { commitInfo.setStaging_dir(stagingDir); if (insertStmt.isOverwrite()) { commitInfo.setIs_overwrite(true); } } } context.getGlobalStateMgr().getMetadataMgr().finishSink(catalogName, dbName, tableName, commitInfos); txnStatus = TransactionStatus.VISIBLE; label = "FAKE_HIVE_SINK_LABEL"; } else { if (isExplainAnalyze) { GlobalStateMgr.getCurrentGlobalTransactionMgr() .abortTransaction(database.getId(), transactionId, "Explain Analyze"); txnStatus = TransactionStatus.ABORTED; } else if (GlobalStateMgr.getCurrentGlobalTransactionMgr().commitAndPublishTransaction( database, transactionId, TabletCommitInfo.fromThrift(coord.getCommitInfos()), TabletFailInfo.fromThrift(coord.getFailInfos()), Config.enable_sync_publish ? jobDeadLineMs - System.currentTimeMillis() : context.getSessionVariable().getTransactionVisibleWaitTimeout() * 1000, new InsertTxnCommitAttachment(loadedRows))) { txnStatus = TransactionStatus.VISIBLE; MetricRepo.COUNTER_LOAD_FINISHED.increase(1L); if (null != targetTable) { TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(targetTable.getId()); entity.counterInsertLoadFinishedTotal.increase(1L); entity.counterInsertLoadRowsTotal.increase(loadedRows); entity.counterInsertLoadBytesTotal.increase(loadedBytes); } } else { txnStatus = TransactionStatus.COMMITTED; } } } catch (Throwable t) { String failedSql = ""; if (originStmt != null && originStmt.originStmt != null) { failedSql = originStmt.originStmt; } LOG.warn("failed to handle stmt [{}] label: {}", failedSql, label, t); String errMsg = t.getMessage(); if (errMsg == null) { errMsg = "A problem occurred while executing the [ " + failedSql + "] statement with label:" + label; } try { if (targetTable instanceof ExternalOlapTable) { ExternalOlapTable externalTable = (ExternalOlapTable) targetTable; GlobalStateMgr.getCurrentGlobalTransactionMgr().abortRemoteTransaction( externalTable.getSourceTableDbId(), transactionId, externalTable.getSourceTableHost(), externalTable.getSourceTablePort(), errMsg); } else { GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction( database.getId(), transactionId, errMsg, coord == null ? Lists.newArrayList() : TabletFailInfo.fromThrift(coord.getFailInfos())); } } catch (Exception abortTxnException) { LOG.warn("errors when abort txn", abortTxnException); } StringBuilder sb = new StringBuilder(errMsg); if (coord != null && !Strings.isNullOrEmpty(coord.getTrackingUrl())) { sb.append(". tracking sql: ").append(trackingSql); } context.getState().setError(sb.toString()); try { if (jobId != -1) { Preconditions.checkNotNull(coord); context.getGlobalStateMgr().getLoadMgr() .recordFinishedOrCacnelledLoadJob(jobId, EtlJobType.INSERT, "Cancelled, msg: " + t.getMessage(), coord.getTrackingUrl()); jobId = -1; } } catch (Exception abortTxnException) { LOG.warn("errors when cancel insert load job {}", jobId); } throw new UserException(t.getMessage()); } finally { QeProcessorImpl.INSTANCE.unregisterQuery(context.getExecutionId()); if (insertError) { try { if (jobId != -1) { context.getGlobalStateMgr().getLoadMgr() .recordFinishedOrCacnelledLoadJob(jobId, EtlJobType.INSERT, "Cancelled", coord.getTrackingUrl()); jobId = -1; } } catch (Exception abortTxnException) { LOG.warn("errors when cancel insert load job {}", jobId); } } else if (txnState != null) { StatisticUtils.triggerCollectionOnFirstLoad(txnState, database, targetTable, true); } } String errMsg = ""; if (txnStatus.equals(TransactionStatus.COMMITTED)) { String timeoutInfo = GlobalStateMgr.getCurrentGlobalTransactionMgr() .getTxnPublishTimeoutDebugInfo(database.getId(), transactionId); LOG.warn("txn {} publish timeout {}", transactionId, timeoutInfo); if (timeoutInfo.length() > 240) { timeoutInfo = timeoutInfo.substring(0, 240) + "..."; } errMsg = "Publish timeout " + timeoutInfo; } try { if (jobId != -1) { context.getGlobalStateMgr().getLoadMgr().recordFinishedOrCacnelledLoadJob(jobId, EtlJobType.INSERT, "", coord.getTrackingUrl()); } } catch (MetaNotFoundException e) { LOG.warn("Record info of insert load with error {}", e.getMessage(), e); errMsg = "Record info of insert load with error " + e.getMessage(); } StringBuilder sb = new StringBuilder(); sb.append("{'label':'").append(label).append("', 'status':'").append(txnStatus.name()); sb.append("', 'txnId':'").append(transactionId).append("'"); if (!Strings.isNullOrEmpty(errMsg)) { sb.append(", 'err':'").append(errMsg).append("'"); } sb.append("}"); context.getState().setOk(loadedRows, filteredRows, sb.toString()); } public String getOriginStmtInString() { if (originStmt == null) { return ""; } return originStmt.originStmt; } public Pair<List<TResultBatch>, Status> executeStmtWithExecPlan(ConnectContext context, ExecPlan plan) { List<TResultBatch> sqlResult = Lists.newArrayList(); try { UUID uuid = context.getQueryId(); context.setExecutionId(UUIDUtil.toTUniqueId(uuid)); coord = getCoordinatorFactory().createQueryScheduler( context, plan.getFragments(), plan.getScanNodes(), plan.getDescTbl().toThrift()); QeProcessorImpl.INSTANCE.registerQuery(context.getExecutionId(), coord); coord.exec(); RowBatch batch; do { batch = coord.getNext(); if (batch.getBatch() != null) { sqlResult.add(batch.getBatch()); } } while (!batch.isEos()); } catch (Exception e) { LOG.warn(e); coord.getExecStatus().setInternalErrorStatus(e.getMessage()); } finally { QeProcessorImpl.INSTANCE.unregisterQuery(context.getExecutionId()); } return Pair.create(sqlResult, coord.getExecStatus()); } public List<ByteBuffer> getProxyResultBuffer() { return proxyResultBuffer; } }
class StmtExecutor { private static final Logger LOG = LogManager.getLogger(StmtExecutor.class); private static final AtomicLong STMT_ID_GENERATOR = new AtomicLong(0); private final ConnectContext context; private final MysqlSerializer serializer; private final OriginStatement originStmt; private StatementBase parsedStmt; private RuntimeProfile profile; private Coordinator coord = null; private LeaderOpExecutor leaderOpExecutor = null; private RedirectStatus redirectStatus = null; private final boolean isProxy; private List<ByteBuffer> proxyResultBuffer = null; private ShowResultSet proxyResultSet = null; private PQueryStatistics statisticsForAuditLog; private List<StmtExecutor> subStmtExecutors; private HttpResultSender httpResultSender; public StmtExecutor(ConnectContext context, OriginStatement originStmt, boolean isProxy) { this.context = context; this.originStmt = originStmt; this.serializer = context.getSerializer(); this.isProxy = isProxy; if (isProxy) { proxyResultBuffer = new ArrayList<>(); } } @VisibleForTesting public StmtExecutor(ConnectContext context, String stmt) { this(context, new OriginStatement(stmt, 0), false); } public StmtExecutor(ConnectContext ctx, StatementBase parsedStmt) { this.context = ctx; this.parsedStmt = parsedStmt; this.originStmt = parsedStmt.getOrigStmt(); this.serializer = context.getSerializer(); this.isProxy = false; } public Coordinator getCoordinator() { return this.coord; } private RuntimeProfile buildTopLevelProfile() { RuntimeProfile profile = new RuntimeProfile("Query"); RuntimeProfile summaryProfile = new RuntimeProfile("Summary"); summaryProfile.addInfoString(ProfileManager.QUERY_ID, DebugUtil.printId(context.getExecutionId())); summaryProfile.addInfoString(ProfileManager.START_TIME, TimeUtils.longToTimeString(context.getStartTime())); long currentTimestamp = System.currentTimeMillis(); long totalTimeMs = currentTimestamp - context.getStartTime(); summaryProfile.addInfoString(ProfileManager.END_TIME, TimeUtils.longToTimeString(currentTimestamp)); summaryProfile.addInfoString(ProfileManager.TOTAL_TIME, DebugUtil.getPrettyStringMs(totalTimeMs)); summaryProfile.addInfoString(ProfileManager.QUERY_TYPE, "Query"); summaryProfile.addInfoString(ProfileManager.QUERY_STATE, context.getState().toProfileString()); summaryProfile.addInfoString("StarRocks Version", String.format("%s-%s", Version.STARROCKS_VERSION, Version.STARROCKS_COMMIT_HASH)); summaryProfile.addInfoString(ProfileManager.USER, context.getQualifiedUser()); summaryProfile.addInfoString(ProfileManager.DEFAULT_DB, context.getDatabase()); summaryProfile.addInfoString(ProfileManager.SQL_STATEMENT, originStmt.originStmt); SessionVariable variables = context.getSessionVariable(); if (variables != null) { StringBuilder sb = new StringBuilder(); sb.append(SessionVariable.PARALLEL_FRAGMENT_EXEC_INSTANCE_NUM).append("=") .append(variables.getParallelExecInstanceNum()).append(","); sb.append(SessionVariable.MAX_PARALLEL_SCAN_INSTANCE_NUM).append("=") .append(variables.getMaxParallelScanInstanceNum()).append(","); sb.append(SessionVariable.PIPELINE_DOP).append("=").append(variables.getPipelineDop()).append(","); sb.append(SessionVariable.ENABLE_ADAPTIVE_SINK_DOP).append("=") .append(variables.getEnableAdaptiveSinkDop()) .append(","); sb.append(SessionVariable.ENABLE_RUNTIME_ADAPTIVE_DOP).append("=") .append(variables.isEnableRuntimeAdaptiveDop()) .append(","); sb.append(SessionVariable.RUNTIME_PROFILE_REPORT_INTERVAL).append("=") .append(variables.getRuntimeProfileReportInterval()) .append(","); if (context.getResourceGroup() != null) { sb.append(SessionVariable.RESOURCE_GROUP).append("=").append(context.getResourceGroup().getName()) .append(","); } sb.deleteCharAt(sb.length() - 1); summaryProfile.addInfoString(ProfileManager.VARIABLES, sb.toString()); summaryProfile.addInfoString("NonDefaultSessionVariables", variables.getNonDefaultVariablesJson()); } profile.addChild(summaryProfile); RuntimeProfile plannerProfile = new RuntimeProfile("Planner"); profile.addChild(plannerProfile); context.getPlannerProfile().build(plannerProfile); return profile; } public boolean isForwardToLeader() { if (GlobalStateMgr.getCurrentState().isLeader()) { return false; } if (parsedStmt instanceof QueryStatement && !GlobalStateMgr.getCurrentState().isLeader() && !GlobalStateMgr.getCurrentState().canRead()) { return true; } if (redirectStatus == null) { return false; } else { return redirectStatus.isForwardToLeader(); } } public ByteBuffer getOutputPacket() { if (leaderOpExecutor == null) { return null; } else { return leaderOpExecutor.getOutputPacket(); } } public ShowResultSet getProxyResultSet() { return proxyResultSet; } public ShowResultSet getShowResultSet() { if (leaderOpExecutor == null) { return null; } else { return leaderOpExecutor.getProxyResultSet(); } } public boolean sendResultToChannel(MysqlChannel channel) throws IOException { if (leaderOpExecutor == null) { return false; } else { return leaderOpExecutor.sendResultToChannel(channel); } } public StatementBase getParsedStmt() { return parsedStmt; } public void execute() throws Exception { long beginTimeInNanoSecond = TimeUtils.getStartTime(); context.setStmtId(STMT_ID_GENERATOR.incrementAndGet()); UUID uuid = context.getQueryId(); context.setExecutionId(UUIDUtil.toTUniqueId(uuid)); SessionVariable sessionVariableBackup = context.getSessionVariable(); if (context instanceof HttpConnectContext) { httpResultSender = new HttpResultSender((HttpConnectContext) context); } try { resolveParseStmtForForward(); if (parsedStmt != null) { Map<String, String> optHints = null; if (parsedStmt instanceof QueryStatement && ((QueryStatement) parsedStmt).getQueryRelation() instanceof SelectRelation) { SelectRelation selectRelation = (SelectRelation) ((QueryStatement) parsedStmt).getQueryRelation(); optHints = selectRelation.getSelectList().getOptHints(); } if (optHints != null) { SessionVariable sessionVariable = (SessionVariable) sessionVariableBackup.clone(); for (String key : optHints.keySet()) { VariableMgr.setSystemVariable(sessionVariable, new SystemVariable(key, new StringLiteral(optHints.get(key))), true); } context.setSessionVariable(sessionVariable); } if (parsedStmt.isExplain()) { context.setExplainLevel(parsedStmt.getExplainLevel()); } } ExecPlan execPlan = null; boolean execPlanBuildByNewPlanner = false; try (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer("Total")) { redirectStatus = parsedStmt.getRedirectStatus(); if (!isForwardToLeader()) { if (context.shouldDumpQuery()) { if (context.getDumpInfo() == null) { context.setDumpInfo(new QueryDumpInfo(context)); } else { context.getDumpInfo().reset(); } context.getDumpInfo().setOriginStmt(parsedStmt.getOrigStmt().originStmt); context.getDumpInfo().setStatement(parsedStmt); } if (parsedStmt instanceof ShowStmt) { com.starrocks.sql.analyzer.Analyzer.analyze(parsedStmt, context); Authorizer.check(parsedStmt, context); QueryStatement selectStmt = ((ShowStmt) parsedStmt).toSelectStmt(); if (selectStmt != null) { parsedStmt = selectStmt; execPlan = StatementPlanner.plan(parsedStmt, context); } } else { execPlan = StatementPlanner.plan(parsedStmt, context); if (parsedStmt instanceof QueryStatement && context.shouldDumpQuery()) { context.getDumpInfo().setExplainInfo(execPlan.getExplainString(TExplainLevel.COSTS)); } } execPlanBuildByNewPlanner = true; } } catch (SemanticException e) { dumpException(e); throw new AnalysisException(e.getMessage()); } catch (StarRocksPlannerException e) { dumpException(e); if (e.getType().equals(ErrorType.USER_ERROR)) { throw e; } else if (e.getType().equals(ErrorType.UNSUPPORTED) && e.getMessage().contains("UDF function")) { LOG.warn("New planner not implement : " + originStmt.originStmt, e); analyze(context.getSessionVariable().toThrift()); } else { LOG.warn("New planner error: " + originStmt.originStmt, e); throw e; } } if (context.isHTTPQueryDump) { return; } if (isForwardToLeader()) { forwardToLeader(); return; } else { LOG.debug("no need to transfer to Leader. stmt: {}", context.getStmtId()); } if (parsedStmt instanceof QueryStatement) { context.getState().setIsQuery(true); final boolean isStatisticsJob = AnalyzerUtils.isStatisticsJob(context, parsedStmt); context.setStatisticsJob(isStatisticsJob); if (Config.enable_sql_blacklist && !parsedStmt.isExplain()) { OriginStatement origStmt = parsedStmt.getOrigStmt(); if (origStmt != null) { String originSql = origStmt.originStmt.trim() .toLowerCase().replaceAll(" +", " "); SqlBlackList.verifying(originSql); } } Preconditions.checkNotNull(execPlan, "query must has a plan"); int retryTime = Config.max_query_retry_time; for (int i = 0; i < retryTime; i++) { boolean needRetry = false; try { if (i > 0) { uuid = UUID.randomUUID(); LOG.info("transfer QueryId: {} to {}", DebugUtil.printId(context.getQueryId()), DebugUtil.printId(uuid)); context.setExecutionId( new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits())); } Preconditions.checkState(execPlanBuildByNewPlanner, "must use new planner"); handleQueryStmt(execPlan); break; } catch (RemoteFileNotFoundException e) { if (i == retryTime - 1) { throw e; } List<ScanNode> scanNodes = execPlan.getScanNodes(); boolean existExternalCatalog = false; for (ScanNode scanNode : scanNodes) { if (scanNode instanceof HdfsScanNode) { HiveTable hiveTable = ((HdfsScanNode) scanNode).getHiveTable(); String catalogName = hiveTable.getCatalogName(); if (CatalogMgr.isExternalCatalog(catalogName)) { existExternalCatalog = true; ConnectorMetadata metadata = GlobalStateMgr.getCurrentState().getMetadataMgr() .getOptionalMetadata(hiveTable.getCatalogName()).get(); metadata.refreshTable(hiveTable.getDbName(), hiveTable, new ArrayList<>(), true); metadata.clear(); } } } if (!existExternalCatalog) { throw e; } if (!context.getMysqlChannel().isSend()) { String originStmt; if (parsedStmt.getOrigStmt() != null) { originStmt = parsedStmt.getOrigStmt().originStmt; } else { originStmt = this.originStmt.originStmt; } needRetry = true; LOG.warn("retry {} times. stmt: {}", (i + 1), originStmt); } else { throw e; } PlannerProfile.addCustomProperties("HMS.RETRY", String.valueOf(i + 1)); } catch (RpcException e) { if (i == 0 && context.getQueryDetail() == null && Config.log_plan_cancelled_by_crash_be) { LOG.warn( "Query cancelled by crash of backends or RpcException, [QueryId={}] [SQL={}] [Plan={}]", DebugUtil.printId(context.getExecutionId()), originStmt == null ? "" : originStmt.originStmt, execPlan.getExplainString(TExplainLevel.COSTS), e); } if (i == retryTime - 1) { throw e; } if (!context.getMysqlChannel().isSend()) { String originStmt; if (parsedStmt.getOrigStmt() != null) { originStmt = parsedStmt.getOrigStmt().originStmt; } else { originStmt = this.originStmt.originStmt; } needRetry = true; LOG.warn("retry {} times. stmt: {}", (i + 1), originStmt); } else { throw e; } } finally { if (!needRetry && context.getSessionVariable().isEnableProfile()) { writeProfile(execPlan, beginTimeInNanoSecond); if (parsedStmt.isExplain() && StatementBase.ExplainLevel.ANALYZE.equals(parsedStmt.getExplainLevel())) { handleExplainStmt(ExplainAnalyzer.analyze( ProfilingExecPlan.buildFrom(execPlan), profile, null)); } } QeProcessorImpl.INSTANCE.unregisterQuery(context.getExecutionId()); } } } else if (parsedStmt instanceof SetStmt) { handleSetStmt(); } else if (parsedStmt instanceof UseDbStmt) { handleUseDbStmt(); } else if (parsedStmt instanceof UseCatalogStmt) { handleUseCatalogStmt(); } else if (parsedStmt instanceof SetCatalogStmt) { handleSetCatalogStmt(); } else if (parsedStmt instanceof CreateTableAsSelectStmt) { if (execPlanBuildByNewPlanner) { handleCreateTableAsSelectStmt(beginTimeInNanoSecond); } else { throw new AnalysisException("old planner does not support CTAS statement"); } } else if (parsedStmt instanceof DmlStmt) { handleDMLStmtWithProfile(execPlan, (DmlStmt) parsedStmt, beginTimeInNanoSecond); } else if (parsedStmt instanceof DdlStmt) { handleDdlStmt(); } else if (parsedStmt instanceof ShowStmt) { handleShow(); } else if (parsedStmt instanceof KillStmt) { handleKill(); } else if (parsedStmt instanceof ExportStmt) { handleExportStmt(context.getQueryId()); } else if (parsedStmt instanceof UnsupportedStmt) { handleUnsupportedStmt(); } else if (parsedStmt instanceof AnalyzeStmt) { handleAnalyzeStmt(); } else if (parsedStmt instanceof AnalyzeProfileStmt) { handleAnalyzeProfileStmt(); } else if (parsedStmt instanceof DropHistogramStmt) { handleDropHistogramStmt(); } else if (parsedStmt instanceof DropStatsStmt) { handleDropStatsStmt(); } else if (parsedStmt instanceof KillAnalyzeStmt) { handleKillAnalyzeStmt(); } else if (parsedStmt instanceof AddSqlBlackListStmt) { handleAddSqlBlackListStmt(); } else if (parsedStmt instanceof DelSqlBlackListStmt) { handleDelSqlBlackListStmt(); } else if (parsedStmt instanceof ExecuteAsStmt) { handleExecAsStmt(); } else if (parsedStmt instanceof ExecuteScriptStmt) { handleExecScriptStmt(); } else if (parsedStmt instanceof SetRoleStmt) { handleSetRole(); } else if (parsedStmt instanceof SetDefaultRoleStmt) { handleSetDefaultRole(); } else if (parsedStmt instanceof UpdateFailPointStatusStatement) { handleUpdateFailPointStatusStmt(); } else { context.getState().setError("Do not support this query."); } } catch (IOException e) { LOG.warn("execute IOException ", e); context.getState().setError(e.getMessage()); throw e; } catch (UserException e) { String sql = originStmt != null ? originStmt.originStmt : ""; LOG.info("execute Exception, sql: {}, error: {}", sql, e.getMessage()); context.getState().setError(e.getMessage()); context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR); } catch (Throwable e) { String sql = originStmt != null ? originStmt.originStmt : ""; LOG.warn("execute Exception, sql " + sql, e); context.getState().setError(e.getMessage()); if (parsedStmt instanceof KillStmt) { context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR); } } finally { GlobalStateMgr.getCurrentState().getMetadataMgr().removeQueryMetadata(); if (context.getState().isError() && coord != null) { coord.cancel(); } if (parsedStmt instanceof InsertStmt && !parsedStmt.isExplain()) { if (Config.enable_sql_blacklist) { OriginStatement origStmt = parsedStmt.getOrigStmt(); if (origStmt != null) { String originSql = origStmt.originStmt.trim() .toLowerCase().replaceAll(" +", " "); SqlBlackList.verifying(originSql); } } } context.setSessionVariable(sessionVariableBackup); } } private void handleCreateTableAsSelectStmt(long beginTimeInNanoSecond) throws Exception { CreateTableAsSelectStmt createTableAsSelectStmt = (CreateTableAsSelectStmt) parsedStmt; if (!createTableAsSelectStmt.createTable(context)) { return; } try { InsertStmt insertStmt = createTableAsSelectStmt.getInsertStmt(); ExecPlan execPlan = new StatementPlanner().plan(insertStmt, context); handleDMLStmtWithProfile(execPlan, ((CreateTableAsSelectStmt) parsedStmt).getInsertStmt(), beginTimeInNanoSecond); if (context.getState().getStateType() == MysqlStateType.ERR) { ((CreateTableAsSelectStmt) parsedStmt).dropTable(context); } } catch (Throwable t) { LOG.warn("handle create table as select stmt fail", t); ((CreateTableAsSelectStmt) parsedStmt).dropTable(context); throw t; } } private void resolveParseStmtForForward() throws AnalysisException { if (parsedStmt == null) { List<StatementBase> stmts; try { stmts = com.starrocks.sql.parser.SqlParser.parse(originStmt.originStmt, context.getSessionVariable()); parsedStmt = stmts.get(originStmt.idx); parsedStmt.setOrigStmt(originStmt); } catch (ParsingException parsingException) { throw new AnalysisException(parsingException.getMessage()); } } } private void dumpException(Exception e) { if (context.isHTTPQueryDump()) { context.getDumpInfo().addException(ExceptionUtils.getStackTrace(e)); } else if (context.getSessionVariable().getEnableQueryDump()) { QueryDumpLog.getQueryDump().log(GsonUtils.GSON.toJson(context.getDumpInfo())); } } private void forwardToLeader() throws Exception { leaderOpExecutor = new LeaderOpExecutor(parsedStmt, originStmt, context, redirectStatus); LOG.debug("need to transfer to Leader. stmt: {}", context.getStmtId()); leaderOpExecutor.execute(); } private void writeProfile(ExecPlan plan, long beginTimeInNanoSecond) { initProfile(beginTimeInNanoSecond); ProfilingExecPlan profilingPlan = plan == null ? null : plan.getProfilingPlan(); String profileContent = ProfileManager.getInstance().pushProfile(profilingPlan, profile); if (context.getQueryDetail() != null) { context.getQueryDetail().setProfile(profileContent); } } public void analyze(TQueryOptions tQueryOptions) throws UserException { LOG.info("begin to analyze stmt: {}, forwarded stmt id: {}", context.getStmtId(), context.getForwardedStmtId()); resolveParseStmtForForward(); redirectStatus = parsedStmt.getRedirectStatus(); if (isForwardToLeader()) { return; } if (parsedStmt instanceof ShowStmt) { QueryStatement selectStmt = ((ShowStmt) parsedStmt).toSelectStmt(); if (selectStmt != null) { Preconditions.checkState(false, "Shouldn't reach here"); } } try { parsedStmt.analyze(new Analyzer(context.getGlobalStateMgr(), context)); } catch (AnalysisException e) { throw e; } catch (Exception e) { LOG.warn("Analyze failed because ", e); throw new AnalysisException("Unexpected exception: " + e.getMessage()); } } public void registerSubStmtExecutor(StmtExecutor subStmtExecutor) { if (subStmtExecutors == null) { subStmtExecutors = Lists.newArrayList(); } subStmtExecutors.add(subStmtExecutor); } public void cancel() { if (parsedStmt instanceof DeleteStmt && ((DeleteStmt) parsedStmt).shouldHandledByDeleteHandler()) { DeleteStmt deleteStmt = (DeleteStmt) parsedStmt; long jobId = deleteStmt.getJobId(); if (jobId != -1) { GlobalStateMgr.getCurrentState().getDeleteMgr().killJob(jobId); } } else { if (subStmtExecutors != null && !subStmtExecutors.isEmpty()) { for (StmtExecutor sub : subStmtExecutors) { sub.cancel(); } } Coordinator coordRef = coord; if (coordRef != null) { coordRef.cancel(); } } } private void handleKill() throws DdlException { KillStmt killStmt = (KillStmt) parsedStmt; long id = killStmt.getConnectionId(); ConnectContext killCtx = context.getConnectScheduler().getContext(id); if (killCtx == null) { ErrorReport.reportDdlException(ErrorCode.ERR_NO_SUCH_THREAD, id); } Preconditions.checkNotNull(killCtx); if (context == killCtx) { context.setKilled(); } else { if (!Objects.equals(killCtx.getQualifiedUser(), context.getQualifiedUser())) { Authorizer.checkSystemAction(context.getCurrentUserIdentity(), context.getCurrentRoleIds(), PrivilegeType.OPERATE); } killCtx.kill(killStmt.isConnectionKill()); } context.getState().setOk(); } private void handleSetStmt() { try { SetStmt setStmt = (SetStmt) parsedStmt; SetExecutor executor = new SetExecutor(context, setStmt); executor.execute(); } catch (DdlException e) { context.getState().setError(e.getMessage()); return; } context.getState().setOk(); } private Coordinator.Factory getCoordinatorFactory() { return new DefaultCoordinator.Factory(); } private void handleQueryStmt(ExecPlan execPlan) throws Exception { context.getMysqlChannel().reset(); boolean isExplainAnalyze = parsedStmt.isExplain() && StatementBase.ExplainLevel.ANALYZE.equals(parsedStmt.getExplainLevel()); boolean isSchedulerExplain = parsedStmt.isExplain() && StatementBase.ExplainLevel.SCHEDULER.equals(parsedStmt.getExplainLevel()); if (isExplainAnalyze) { context.getSessionVariable().setEnableProfile(true); context.getSessionVariable().setPipelineProfileLevel(1); context.getSessionVariable().setProfileLimitFold(false); } else if (isSchedulerExplain) { } else if (parsedStmt.isExplain()) { handleExplainStmt(buildExplainString(execPlan, ResourceGroupClassifier.QueryType.SELECT)); return; } if (context.getQueryDetail() != null) { context.getQueryDetail().setExplain(buildExplainString(execPlan, ResourceGroupClassifier.QueryType.SELECT)); } StatementBase queryStmt = parsedStmt; List<PlanFragment> fragments = execPlan.getFragments(); List<ScanNode> scanNodes = execPlan.getScanNodes(); TDescriptorTable descTable = execPlan.getDescTbl().toThrift(); List<String> colNames = execPlan.getColNames(); List<Expr> outputExprs = execPlan.getOutputExprs(); coord = getCoordinatorFactory().createQueryScheduler(context, fragments, scanNodes, descTable); QeProcessorImpl.INSTANCE.registerQuery(context.getExecutionId(), new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord)); if (isSchedulerExplain) { coord.startSchedulingWithoutDeploy(); handleExplainStmt(coord.getSchedulerExplain()); return; } coord.exec(); coord.setTopProfileSupplier(this::buildTopLevelProfile); coord.setExecPlanSupplier(() -> execPlan); RowBatch batch; boolean isOutfileQuery = false; if (queryStmt instanceof QueryStatement) { isOutfileQuery = ((QueryStatement) queryStmt).hasOutFileClause(); } if (context instanceof HttpConnectContext) { batch = httpResultSender.sendQueryResult(coord, execPlan); } else { MysqlChannel channel = context.getMysqlChannel(); boolean isSendFields = false; do { batch = coord.getNext(); if (batch.getBatch() != null && !isOutfileQuery && !isExplainAnalyze) { if (!isSendFields) { sendFields(colNames, outputExprs); isSendFields = true; } if (!isProxy && channel.isSendBufferNull()) { int bufferSize = 0; for (ByteBuffer row : batch.getBatch().getRows()) { bufferSize += (row.position() - row.limit()); } channel.initBuffer(bufferSize + 8); } for (ByteBuffer row : batch.getBatch().getRows()) { if (isProxy) { proxyResultBuffer.add(row); } else { channel.sendOnePacket(row); } } context.updateReturnRows(batch.getBatch().getRows().size()); } } while (!batch.isEos()); if (!isSendFields && !isOutfileQuery && !isExplainAnalyze) { sendFields(colNames, outputExprs); } } statisticsForAuditLog = batch.getQueryStatistics(); if (!isOutfileQuery) { context.getState().setEof(); } else { context.getState().setOk(statisticsForAuditLog.returnedRows, 0, ""); } if (null == statisticsForAuditLog || null == statisticsForAuditLog.statsItems || statisticsForAuditLog.statsItems.isEmpty()) { return; } Set<Long> tableIds = Sets.newHashSet(); for (QueryStatisticsItemPB item : statisticsForAuditLog.statsItems) { TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(item.tableId); entity.counterScanRowsTotal.increase(item.scanRows); entity.counterScanBytesTotal.increase(item.scanBytes); tableIds.add(item.tableId); } for (Long tableId : tableIds) { TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(tableId); entity.counterScanFinishedTotal.increase(1L); } } private void handleAnalyzeStmt() throws IOException { AnalyzeStmt analyzeStmt = (AnalyzeStmt) parsedStmt; Database db = MetaUtils.getDatabase(context, analyzeStmt.getTableName()); Table table = MetaUtils.getTable(context, analyzeStmt.getTableName()); if (StatisticUtils.isEmptyTable(table)) { return; } StatsConstants.AnalyzeType analyzeType; if (analyzeStmt.getAnalyzeTypeDesc() instanceof AnalyzeHistogramDesc) { analyzeType = StatsConstants.AnalyzeType.HISTOGRAM; } else { if (analyzeStmt.isSample()) { analyzeType = StatsConstants.AnalyzeType.SAMPLE; } else { analyzeType = StatsConstants.AnalyzeType.FULL; } } AnalyzeStatus analyzeStatus; if (analyzeStmt.isExternal()) { String catalogName = analyzeStmt.getTableName().getCatalog(); analyzeStatus = new ExternalAnalyzeStatus(GlobalStateMgr.getCurrentState().getNextId(), catalogName, db.getOriginName(), table.getName(), table.getUUID(), analyzeStmt.getColumnNames(), analyzeType, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties(), LocalDateTime.now()); analyzeStatus.setStatus(StatsConstants.ScheduleStatus.PENDING); GlobalStateMgr.getCurrentAnalyzeMgr().addOrUpdateAnalyzeStatus(analyzeStatus); } else { analyzeStatus = new NativeAnalyzeStatus(GlobalStateMgr.getCurrentState().getNextId(), db.getId(), table.getId(), analyzeStmt.getColumnNames(), analyzeType, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties(), LocalDateTime.now()); analyzeStatus.setStatus(StatsConstants.ScheduleStatus.FAILED); GlobalStateMgr.getCurrentAnalyzeMgr().addAnalyzeStatus(analyzeStatus); analyzeStatus.setStatus(StatsConstants.ScheduleStatus.PENDING); GlobalStateMgr.getCurrentAnalyzeMgr().replayAddAnalyzeStatus(analyzeStatus); } int timeout = context.getSessionVariable().getQueryTimeoutS(); try { Future<?> future = GlobalStateMgr.getCurrentAnalyzeMgr().getAnalyzeTaskThreadPool() .submit(() -> executeAnalyze(analyzeStmt, analyzeStatus, db, table)); if (!analyzeStmt.isAsync()) { context.getSessionVariable().setQueryTimeoutS((int) Config.statistic_collect_query_timeout); future.get(); } } catch (RejectedExecutionException e) { analyzeStatus.setStatus(StatsConstants.ScheduleStatus.FAILED); analyzeStatus.setReason("The statistics tasks running concurrently exceed the upper limit"); if (analyzeStmt.isExternal()) { GlobalStateMgr.getCurrentAnalyzeMgr().addOrUpdateAnalyzeStatus(analyzeStatus); } else { GlobalStateMgr.getCurrentAnalyzeMgr().addAnalyzeStatus(analyzeStatus); } } catch (ExecutionException | InterruptedException e) { analyzeStatus.setStatus(StatsConstants.ScheduleStatus.FAILED); analyzeStatus.setReason("The statistics tasks running failed"); if (analyzeStmt.isExternal()) { GlobalStateMgr.getCurrentAnalyzeMgr().addOrUpdateAnalyzeStatus(analyzeStatus); } else { GlobalStateMgr.getCurrentAnalyzeMgr().addAnalyzeStatus(analyzeStatus); } } finally { context.getSessionVariable().setQueryTimeoutS(timeout); } ShowResultSet resultSet = analyzeStatus.toShowResult(); if (isProxy) { proxyResultSet = resultSet; context.getState().setEof(); return; } sendShowResult(resultSet); } private void handleAnalyzeProfileStmt() throws IOException { AnalyzeProfileStmt analyzeProfileStmt = (AnalyzeProfileStmt) parsedStmt; String queryId = analyzeProfileStmt.getQueryId(); List<Integer> planNodeIds = analyzeProfileStmt.getPlanNodeIds(); ProfileManager.ProfileElement profileElement = ProfileManager.getInstance().getProfileElement(queryId); Preconditions.checkNotNull(profileElement, "query not exists"); handleExplainStmt(ExplainAnalyzer.analyze(profileElement.plan, RuntimeProfileParser.parseFrom(CompressionUtils.gzipDecompressString(profileElement.profileContent)), planNodeIds)); } private void executeAnalyze(AnalyzeStmt analyzeStmt, AnalyzeStatus analyzeStatus, Database db, Table table) { ConnectContext statsConnectCtx = StatisticUtils.buildConnectContext(); statsConnectCtx.getSessionVariable().setStatisticCollectParallelism( context.getSessionVariable().getStatisticCollectParallelism()); statsConnectCtx.setThreadLocalInfo(); statsConnectCtx.setStatisticsConnection(true); executeAnalyze(statsConnectCtx, analyzeStmt, analyzeStatus, db, table); } private void executeAnalyze(ConnectContext statsConnectCtx, AnalyzeStmt analyzeStmt, AnalyzeStatus analyzeStatus, Database db, Table table) { StatisticExecutor statisticExecutor = new StatisticExecutor(); if (analyzeStmt.isExternal()) { StatsConstants.AnalyzeType analyzeType = analyzeStmt.isSample() ? StatsConstants.AnalyzeType.SAMPLE : StatsConstants.AnalyzeType.FULL; statisticExecutor.collectStatistics(statsConnectCtx, StatisticsCollectJobFactory.buildExternalStatisticsCollectJob( analyzeStmt.getTableName().getCatalog(), db, table, analyzeStmt.getColumnNames(), analyzeType, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties()), analyzeStatus, false); } else { if (analyzeStmt.getAnalyzeTypeDesc() instanceof AnalyzeHistogramDesc) { statisticExecutor.collectStatistics(statsConnectCtx, new HistogramStatisticsCollectJob(db, table, analyzeStmt.getColumnNames(), StatsConstants.AnalyzeType.HISTOGRAM, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties()), analyzeStatus, false); } else { StatsConstants.AnalyzeType analyzeType = analyzeStmt.isSample() ? StatsConstants.AnalyzeType.SAMPLE : StatsConstants.AnalyzeType.FULL; statisticExecutor.collectStatistics(statsConnectCtx, StatisticsCollectJobFactory.buildStatisticsCollectJob(db, table, null, analyzeStmt.getColumnNames(), analyzeType, StatsConstants.ScheduleType.ONCE, analyzeStmt.getProperties()), analyzeStatus, false); } } } private void handleDropStatsStmt() { DropStatsStmt dropStatsStmt = (DropStatsStmt) parsedStmt; Table table = MetaUtils.getTable(context, dropStatsStmt.getTableName()); if (dropStatsStmt.isExternal()) { GlobalStateMgr.getCurrentAnalyzeMgr().dropExternalStats(table.getUUID()); List<String> columns = table.getBaseSchema().stream().map(Column::getName).collect(Collectors.toList()); GlobalStateMgr.getCurrentStatisticStorage().expireConnectorTableColumnStatistics(table, columns); } else { List<String> columns = table.getBaseSchema().stream().filter(d -> !d.isAggregated()).map(Column::getName) .collect(Collectors.toList()); GlobalStateMgr.getCurrentAnalyzeMgr().dropAnalyzeStatus(table.getId()); GlobalStateMgr.getCurrentAnalyzeMgr() .dropBasicStatsMetaAndData(StatisticUtils.buildConnectContext(), Sets.newHashSet(table.getId())); GlobalStateMgr.getCurrentStatisticStorage().expireTableAndColumnStatistics(table, columns); } } private void handleDropHistogramStmt() { DropHistogramStmt dropHistogramStmt = (DropHistogramStmt) parsedStmt; OlapTable table = (OlapTable) MetaUtils.getTable(context, dropHistogramStmt.getTableName()); List<String> columns = table.getBaseSchema().stream().filter(d -> !d.isAggregated()).map(Column::getName) .collect(Collectors.toList()); GlobalStateMgr.getCurrentAnalyzeMgr().dropAnalyzeStatus(table.getId()); GlobalStateMgr.getCurrentAnalyzeMgr() .dropHistogramStatsMetaAndData(StatisticUtils.buildConnectContext(), Sets.newHashSet(table.getId())); GlobalStateMgr.getCurrentStatisticStorage().expireHistogramStatistics(table.getId(), columns); } private void handleKillAnalyzeStmt() { KillAnalyzeStmt killAnalyzeStmt = (KillAnalyzeStmt) parsedStmt; long analyzeId = killAnalyzeStmt.getAnalyzeId(); AnalyzeMgr analyzeManager = GlobalStateMgr.getCurrentAnalyzeMgr(); checkPrivilegeForKillAnalyzeStmt(context, analyzeId); analyzeManager.killConnection(analyzeId); } private void checkTblPrivilegeForKillAnalyzeStmt(ConnectContext context, String catalogName, String dbName, String tableName, long analyzeId) { MetaUtils.getDatabase(catalogName, dbName); MetaUtils.getTable(catalogName, dbName, tableName); Authorizer.checkTableAction(context.getCurrentUserIdentity(), context.getCurrentRoleIds(), catalogName, dbName, tableName, PrivilegeType.SELECT); Authorizer.checkTableAction(context.getCurrentUserIdentity(), context.getCurrentRoleIds(), catalogName, dbName, tableName, PrivilegeType.INSERT); } public void checkPrivilegeForKillAnalyzeStmt(ConnectContext context, long analyzeId) { AnalyzeMgr analyzeManager = GlobalStateMgr.getCurrentAnalyzeMgr(); AnalyzeStatus analyzeStatus = analyzeManager.getAnalyzeStatus(analyzeId); AnalyzeJob analyzeJob = analyzeManager.getAnalyzeJob(analyzeId); if (analyzeStatus != null) { try { String catalogName = analyzeStatus.getCatalogName(); String dbName = analyzeStatus.getDbName(); String tableName = analyzeStatus.getTableName(); checkTblPrivilegeForKillAnalyzeStmt(context, catalogName, dbName, tableName, analyzeId); } catch (MetaNotFoundException ignore) { } } else if (analyzeJob != null) { Set<TableName> tableNames = AnalyzerUtils.getAllTableNamesForAnalyzeJobStmt(analyzeJob.getDbId(), analyzeJob.getTableId()); tableNames.forEach(tableName -> checkTblPrivilegeForKillAnalyzeStmt(context, tableName.getCatalog(), tableName.getDb(), tableName.getTbl(), analyzeId) ); } } private void handleAddSqlBlackListStmt() { AddSqlBlackListStmt addSqlBlackListStmt = (AddSqlBlackListStmt) parsedStmt; SqlBlackList.getInstance().put(addSqlBlackListStmt.getSqlPattern()); } private void handleDelSqlBlackListStmt() { DelSqlBlackListStmt delSqlBlackListStmt = (DelSqlBlackListStmt) parsedStmt; List<Long> indexs = delSqlBlackListStmt.getIndexs(); if (indexs != null) { for (long id : indexs) { SqlBlackList.getInstance().delete(id); } } } private void handleExecAsStmt() throws UserException { ExecuteAsExecutor.execute((ExecuteAsStmt) parsedStmt, context); } private void handleExecScriptStmt() throws IOException, UserException { ShowResultSet resultSet = ExecuteScriptExecutor.execute((ExecuteScriptStmt) parsedStmt, context); if (isProxy) { proxyResultSet = resultSet; context.getState().setEof(); return; } sendShowResult(resultSet); } private void handleSetRole() throws PrivilegeException, UserException { SetRoleExecutor.execute((SetRoleStmt) parsedStmt, context); } private void handleSetDefaultRole() throws PrivilegeException, UserException { SetDefaultRoleExecutor.execute((SetDefaultRoleStmt) parsedStmt, context); } private void handleUnsupportedStmt() { context.getMysqlChannel().reset(); context.getState().setOk(); } private void handleUseDbStmt() throws AnalysisException { UseDbStmt useDbStmt = (UseDbStmt) parsedStmt; try { context.getGlobalStateMgr().changeCatalogDb(context, useDbStmt.getIdentifier()); } catch (Exception e) { context.getState().setError(e.getMessage()); return; } context.getState().setOk(); } private void handleUseCatalogStmt() throws AnalysisException { UseCatalogStmt useCatalogStmt = (UseCatalogStmt) parsedStmt; try { String catalogName = useCatalogStmt.getCatalogName(); context.getGlobalStateMgr().changeCatalog(context, catalogName); } catch (Exception e) { context.getState().setError(e.getMessage()); return; } context.getState().setOk(); } private void handleSetCatalogStmt() throws AnalysisException { SetCatalogStmt setCatalogStmt = (SetCatalogStmt) parsedStmt; try { String catalogName = setCatalogStmt.getCatalogName(); context.getGlobalStateMgr().changeCatalog(context, catalogName); } catch (Exception e) { context.getState().setError(e.getMessage()); return; } context.getState().setOk(); } private void sendMetaData(ShowResultSetMetaData metaData) throws IOException { serializer.reset(); serializer.writeVInt(metaData.getColumnCount()); context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); for (Column col : metaData.getColumns()) { serializer.reset(); serializer.writeField(col.getName(), col.getType()); context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } serializer.reset(); MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState()); eofPacket.writeTo(serializer); context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } private void sendFields(List<String> colNames, List<Expr> exprs) throws IOException { serializer.reset(); serializer.writeVInt(colNames.size()); if (isProxy) { proxyResultBuffer.add(serializer.toByteBuffer()); } else { context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } for (int i = 0; i < colNames.size(); ++i) { serializer.reset(); serializer.writeField(colNames.get(i), exprs.get(i).getOriginType()); if (isProxy) { proxyResultBuffer.add(serializer.toByteBuffer()); } else { context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } } serializer.reset(); MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState()); eofPacket.writeTo(serializer); if (isProxy) { proxyResultBuffer.add(serializer.toByteBuffer()); } else { context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } } public void sendShowResult(ShowResultSet resultSet) throws IOException { context.updateReturnRows(resultSet.getResultRows().size()); if (context instanceof HttpConnectContext) { httpResultSender.sendShowResult(resultSet); return; } sendMetaData(resultSet.getMetaData()); for (List<String> row : resultSet.getResultRows()) { serializer.reset(); for (String item : row) { if (item == null || item.equals(FeConstants.NULL_STRING)) { serializer.writeNull(); } else { serializer.writeLenEncodedString(item); } } context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } context.getState().setEof(); } private void handleShow() throws IOException, AnalysisException, DdlException { ShowExecutor executor = new ShowExecutor(context, (ShowStmt) parsedStmt); ShowResultSet resultSet = executor.execute(); if (resultSet == null) { return; } if (isProxy) { proxyResultSet = resultSet; context.getState().setEof(); return; } sendShowResult(resultSet); } private void handleExplainStmt(String explainString) throws IOException { if (context instanceof HttpConnectContext) { httpResultSender.sendExplainResult(explainString); return; } if (context.getQueryDetail() != null) { context.getQueryDetail().setExplain(explainString); } ShowResultSetMetaData metaData = ShowResultSetMetaData.builder() .addColumn(new Column("Explain String", ScalarType.createVarchar(20))) .build(); sendMetaData(metaData); for (String item : explainString.split("\n")) { serializer.reset(); serializer.writeLenEncodedString(item); context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); } context.getState().setEof(); } private String buildExplainString(ExecPlan execPlan, ResourceGroupClassifier.QueryType queryType) { String explainString = ""; if (parsedStmt.getExplainLevel() == StatementBase.ExplainLevel.VERBOSE) { TWorkGroup resourceGroup = CoordinatorPreprocessor.prepareResourceGroup(context, queryType); String resourceGroupStr = resourceGroup != null ? resourceGroup.getName() : ResourceGroup.DEFAULT_RESOURCE_GROUP_NAME; explainString += "RESOURCE GROUP: " + resourceGroupStr + "\n\n"; } if (execPlan == null) { explainString += "NOT AVAILABLE"; } else { if (parsedStmt.getExplainLevel() == OPTIMIZER) { explainString += PlannerProfile.printPlannerTimer(context.getPlannerProfile()); } else if (parsedStmt.getExplainLevel() == REWRITE) { explainString += PlannerProfile.printPlannerTrace(context.getPlannerProfile()); } else { explainString += execPlan.getExplainString(parsedStmt.getExplainLevel()); } } return explainString; } private void handleDdlStmt() { try { ShowResultSet resultSet = DDLStmtExecutor.execute(parsedStmt, context); if (resultSet == null) { context.getState().setOk(); } else { if (isProxy) { proxyResultSet = resultSet; context.getState().setEof(); } else { sendShowResult(resultSet); } } } catch (QueryStateException e) { if (e.getQueryState().getStateType() != MysqlStateType.OK) { String sql = AstToStringBuilder.toString(parsedStmt); if (sql == null) { sql = originStmt.originStmt; } LOG.warn("DDL statement (" + sql + ") process failed.", e); } context.setState(e.getQueryState()); } catch (Throwable e) { String sql = AstToStringBuilder.toString(parsedStmt); if (sql == null || sql.isEmpty()) { sql = originStmt.originStmt; } LOG.warn("DDL statement (" + sql + ") process failed.", e); context.getState().setError("Unexpected exception: " + e.getMessage()); } } private void handleExportStmt(UUID queryId) throws Exception { ExportStmt exportStmt = (ExportStmt) parsedStmt; exportStmt.setExportStartTime(context.getStartTime()); context.getGlobalStateMgr().getExportMgr().addExportJob(queryId, exportStmt); } private void handleUpdateFailPointStatusStmt() throws Exception { FailPointExecutor executor = new FailPointExecutor(context, parsedStmt); executor.execute(); } public PQueryStatistics getQueryStatisticsForAuditLog() { if (statisticsForAuditLog == null && coord != null) { statisticsForAuditLog = coord.getAuditStatistics(); } if (statisticsForAuditLog == null) { statisticsForAuditLog = new PQueryStatistics(); } if (statisticsForAuditLog.scanBytes == null) { statisticsForAuditLog.scanBytes = 0L; } if (statisticsForAuditLog.scanRows == null) { statisticsForAuditLog.scanRows = 0L; } if (statisticsForAuditLog.cpuCostNs == null) { statisticsForAuditLog.cpuCostNs = 0L; } if (statisticsForAuditLog.memCostBytes == null) { statisticsForAuditLog.memCostBytes = 0L; } if (statisticsForAuditLog.spillBytes == null) { statisticsForAuditLog.spillBytes = 0L; } return statisticsForAuditLog; } public void handleInsertOverwrite(InsertStmt insertStmt) throws Exception { Database database = MetaUtils.getDatabase(context, insertStmt.getTableName()); Table table = insertStmt.getTargetTable(); if (!(table instanceof OlapTable)) { LOG.warn("insert overwrite table:{} type:{} is not supported", table.getName(), table.getClass()); throw new RuntimeException("not supported table type for insert overwrite"); } OlapTable olapTable = (OlapTable) insertStmt.getTargetTable(); InsertOverwriteJob job = new InsertOverwriteJob(GlobalStateMgr.getCurrentState().getNextId(), insertStmt, database.getId(), olapTable.getId()); if (!database.writeLockAndCheckExist()) { throw new DmlException("database:%s does not exist.", database.getFullName()); } try { CreateInsertOverwriteJobLog info = new CreateInsertOverwriteJobLog(job.getJobId(), job.getTargetDbId(), job.getTargetTableId(), job.getSourcePartitionIds()); GlobalStateMgr.getCurrentState().getEditLog().logCreateInsertOverwrite(info); } finally { database.writeUnlock(); } insertStmt.setOverwriteJobId(job.getJobId()); InsertOverwriteJobMgr manager = GlobalStateMgr.getCurrentState().getInsertOverwriteJobMgr(); manager.executeJob(context, this, job); } /** * `handleDMLStmtWithProfile` executes DML statement and write profile at the end. * NOTE: `writeProfile` can only be called once, otherwise the profile detail will be lost. */ public void handleDMLStmtWithProfile(ExecPlan execPlan, DmlStmt stmt, long beginTimeInNanoSecond) throws Exception { try { handleDMLStmt(execPlan, stmt); } catch (Throwable t) { LOG.warn("DML statement(" + originStmt.originStmt + ") process failed.", t); throw t; } finally { if (context.getSessionVariable().isEnableProfile()) { writeProfile(execPlan, beginTimeInNanoSecond); if (parsedStmt.isExplain() && StatementBase.ExplainLevel.ANALYZE.equals(parsedStmt.getExplainLevel())) { handleExplainStmt(ExplainAnalyzer.analyze(ProfilingExecPlan.buildFrom(execPlan), profile, null)); } } QeProcessorImpl.INSTANCE.unregisterQuery(context.getExecutionId()); } } /** * `handleDMLStmt` only executes DML statement and no write profile at the end. */ public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception { boolean isExplainAnalyze = parsedStmt.isExplain() && StatementBase.ExplainLevel.ANALYZE.equals(parsedStmt.getExplainLevel()); boolean isSchedulerExplain = parsedStmt.isExplain() && StatementBase.ExplainLevel.SCHEDULER.equals(parsedStmt.getExplainLevel()); if (isExplainAnalyze) { context.getSessionVariable().setEnableProfile(true); context.getSessionVariable().setPipelineProfileLevel(1); context.getSessionVariable().setProfileLimitFold(false); } else if (isSchedulerExplain) { } else if (stmt.isExplain()) { handleExplainStmt(buildExplainString(execPlan, ResourceGroupClassifier.QueryType.INSERT)); return; } if (context.getQueryDetail() != null) { context.getQueryDetail().setExplain(buildExplainString(execPlan, ResourceGroupClassifier.QueryType.INSERT)); } if (stmt instanceof DeleteStmt && ((DeleteStmt) stmt).shouldHandledByDeleteHandler()) { try { context.getGlobalStateMgr().getDeleteMgr().process((DeleteStmt) stmt); context.getState().setOk(); } catch (QueryStateException e) { if (e.getQueryState().getStateType() != MysqlStateType.OK) { LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e); } context.setState(e.getQueryState()); } return; } MetaUtils.normalizationTableName(context, stmt.getTableName()); String catalogName = stmt.getTableName().getCatalog(); String dbName = stmt.getTableName().getDb(); String tableName = stmt.getTableName().getTbl(); Database database = GlobalStateMgr.getCurrentState().getMetadataMgr().getDb(catalogName, dbName); final Table targetTable; if (stmt instanceof InsertStmt && ((InsertStmt) stmt).getTargetTable() != null) { targetTable = ((InsertStmt) stmt).getTargetTable(); } else { targetTable = GlobalStateMgr.getCurrentState().getMetadataMgr().getTable(catalogName, dbName, tableName); } if (isExplainAnalyze) { Preconditions.checkState(targetTable instanceof OlapTable, "explain analyze only supports insert into olap native table"); } if (parsedStmt instanceof InsertStmt && ((InsertStmt) parsedStmt).isOverwrite() && !((InsertStmt) parsedStmt).hasOverwriteJob() && !(targetTable.isIcebergTable() || targetTable.isHiveTable())) { handleInsertOverwrite((InsertStmt) parsedStmt); return; } String label = DebugUtil.printId(context.getExecutionId()); if (stmt instanceof InsertStmt) { String stmtLabel = ((InsertStmt) stmt).getLabel(); label = Strings.isNullOrEmpty(stmtLabel) ? "insert_" + label : stmtLabel; } else if (stmt instanceof UpdateStmt) { label = "update_" + label; } else if (stmt instanceof DeleteStmt) { label = "delete_" + label; } else { throw unsupportedException( "Unsupported dml statement " + parsedStmt.getClass().getSimpleName()); } TransactionState.LoadJobSourceType sourceType = TransactionState.LoadJobSourceType.INSERT_STREAMING; MetricRepo.COUNTER_LOAD_ADD.increase(1L); long transactionId = -1; TransactionState txnState = null; if (targetTable instanceof ExternalOlapTable) { ExternalOlapTable externalTable = (ExternalOlapTable) targetTable; TAuthenticateParams authenticateParams = new TAuthenticateParams(); authenticateParams.setUser(externalTable.getSourceTableUser()); authenticateParams.setPasswd(externalTable.getSourceTablePassword()); authenticateParams.setHost(context.getRemoteIP()); authenticateParams.setDb_name(externalTable.getSourceTableDbName()); authenticateParams.setTable_names(Lists.newArrayList(externalTable.getSourceTableName())); transactionId = GlobalStateMgr.getCurrentGlobalTransactionMgr() .beginRemoteTransaction(externalTable.getSourceTableDbId(), Lists.newArrayList(externalTable.getSourceTableId()), label, externalTable.getSourceTableHost(), externalTable.getSourceTablePort(), new TransactionState.TxnCoordinator(TransactionState.TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), sourceType, context.getSessionVariable().getQueryTimeoutS(), authenticateParams); } else if (targetTable instanceof SystemTable || (targetTable.isIcebergTable() || targetTable.isHiveTable())) { } else { transactionId = GlobalStateMgr.getCurrentGlobalTransactionMgr().beginTransaction( database.getId(), Lists.newArrayList(targetTable.getId()), label, new TransactionState.TxnCoordinator(TransactionState.TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), sourceType, context.getSessionVariable().getQueryTimeoutS()); txnState = GlobalStateMgr.getCurrentGlobalTransactionMgr() .getTransactionState(database.getId(), transactionId); if (txnState == null) { throw new DdlException("txn does not exist: " + transactionId); } if (targetTable instanceof OlapTable) { txnState.addTableIndexes((OlapTable) targetTable); } } if (context.getMysqlChannel() != null) { context.getMysqlChannel().reset(); } long createTime = System.currentTimeMillis(); long loadedRows = 0; int filteredRows = 0; long loadedBytes = 0; long jobId = -1; long estimateScanRows = -1; TransactionStatus txnStatus = TransactionStatus.ABORTED; boolean insertError = false; String trackingSql = ""; try { if (execPlan.getFragments().get(0).getSink() instanceof OlapTableSink) { context.getSessionVariable().setPreferComputeNode(false); context.getSessionVariable().setUseComputeNodes(0); OlapTableSink dataSink = (OlapTableSink) execPlan.getFragments().get(0).getSink(); dataSink.init(context.getExecutionId(), transactionId, database.getId(), ConnectContext.get().getSessionVariable().getQueryTimeoutS()); dataSink.complete(); } coord = getCoordinatorFactory().createInsertScheduler( context, execPlan.getFragments(), execPlan.getScanNodes(), execPlan.getDescTbl().toThrift()); List<ScanNode> scanNodes = execPlan.getScanNodes(); boolean containOlapScanNode = false; for (ScanNode scanNode : scanNodes) { if (scanNode instanceof OlapScanNode) { estimateScanRows += ((OlapScanNode) scanNode).getActualRows(); containOlapScanNode = true; } } TLoadJobType type; if (containOlapScanNode) { coord.setLoadJobType(TLoadJobType.INSERT_QUERY); type = TLoadJobType.INSERT_QUERY; } else { estimateScanRows = execPlan.getFragments().get(0).getPlanRoot().getCardinality(); coord.setLoadJobType(TLoadJobType.INSERT_VALUES); type = TLoadJobType.INSERT_VALUES; } context.setStatisticsJob(AnalyzerUtils.isStatisticsJob(context, parsedStmt)); if (!(targetTable.isIcebergTable() || targetTable.isHiveTable())) { jobId = context.getGlobalStateMgr().getLoadMgr().registerLoadJob( label, database.getFullName(), targetTable.getId(), EtlJobType.INSERT, createTime, estimateScanRows, type, ConnectContext.get().getSessionVariable().getQueryTimeoutS()); } coord.setLoadJobId(jobId); trackingSql = "select tracking_log from information_schema.load_tracking_logs where job_id=" + jobId; QeProcessorImpl.QueryInfo queryInfo = new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord); QeProcessorImpl.INSTANCE.registerQuery(context.getExecutionId(), queryInfo); if (isSchedulerExplain) { coord.startSchedulingWithoutDeploy(); handleExplainStmt(coord.getSchedulerExplain()); return; } coord.exec(); coord.setTopProfileSupplier(this::buildTopLevelProfile); coord.setExecPlanSupplier(() -> execPlan); long jobDeadLineMs = System.currentTimeMillis() + context.getSessionVariable().getQueryTimeoutS() * 1000; coord.join(context.getSessionVariable().getQueryTimeoutS()); if (!coord.isDone()) { /* * In this case, There are two factors that lead query cancelled: * 1: TIMEOUT * 2: BE EXCEPTION * So we should distinguish these two factors. */ if (!coord.checkBackendState()) { if (Config.log_plan_cancelled_by_crash_be && context.getQueryDetail() == null) { LOG.warn("Query cancelled by crash of backends [QueryId={}] [SQL={}] [Plan={}]", DebugUtil.printId(context.getExecutionId()), originStmt == null ? "" : originStmt.originStmt, execPlan.getExplainString(TExplainLevel.COSTS)); } coord.cancel(); ErrorReport.reportDdlException(ErrorCode.ERR_QUERY_EXCEPTION); } else { coord.cancel(); if (coord.isThriftServerHighLoad()) { ErrorReport.reportDdlException(ErrorCode.ERR_QUERY_TIMEOUT, "Please check the thrift-server-pool metrics, " + "if the pool size reaches thrift_server_max_worker_threads(default is 4096), " + "you can set the config to a higher value in fe.conf, " + "or set parallel_fragment_exec_instance_num to a lower value in session variable"); } else { ErrorReport.reportDdlException(ErrorCode.ERR_QUERY_TIMEOUT, "Increase the query_timeout session variable and retry"); } } } if (!coord.getExecStatus().ok()) { String errMsg = coord.getExecStatus().getErrorMsg(); if (errMsg.length() == 0) { errMsg = coord.getExecStatus().getErrorCodeString(); } LOG.warn("insert failed: {}", errMsg); ErrorReport.reportDdlException(errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT); } LOG.debug("delta files is {}", coord.getDeltaUrls()); if (coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL) != null) { loadedRows = Long.parseLong(coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL)); } if (coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL) != null) { filteredRows = Integer.parseInt(coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL)); } if (coord.getLoadCounters().get(LoadJob.LOADED_BYTES) != null) { loadedBytes = Long.parseLong(coord.getLoadCounters().get(LoadJob.LOADED_BYTES)); } if (context.getSessionVariable().getEnableInsertStrict()) { if (filteredRows > 0) { if (targetTable instanceof ExternalOlapTable) { ExternalOlapTable externalTable = (ExternalOlapTable) targetTable; GlobalStateMgr.getCurrentGlobalTransactionMgr().abortRemoteTransaction( externalTable.getSourceTableDbId(), transactionId, externalTable.getSourceTableHost(), externalTable.getSourceTablePort(), TransactionCommitFailedException.FILTER_DATA_IN_STRICT_MODE + ", tracking sql = " + trackingSql ); } else if (targetTable instanceof SystemTable || (targetTable.isHiveTable() || targetTable.isIcebergTable())) { } else { GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction( database.getId(), transactionId, TransactionCommitFailedException.FILTER_DATA_IN_STRICT_MODE + ", tracking sql = " + trackingSql, TabletFailInfo.fromThrift(coord.getFailInfos()) ); } context.getState().setError("Insert has filtered data in strict mode, txn_id = " + transactionId + " tracking sql = " + trackingSql); insertError = true; return; } } if (targetTable instanceof ExternalOlapTable) { ExternalOlapTable externalTable = (ExternalOlapTable) targetTable; if (GlobalStateMgr.getCurrentGlobalTransactionMgr().commitRemoteTransaction( externalTable.getSourceTableDbId(), transactionId, externalTable.getSourceTableHost(), externalTable.getSourceTablePort(), coord.getCommitInfos())) { txnStatus = TransactionStatus.VISIBLE; MetricRepo.COUNTER_LOAD_FINISHED.increase(1L); } else { txnStatus = TransactionStatus.COMMITTED; } } else if (targetTable instanceof SystemTable) { txnStatus = TransactionStatus.VISIBLE; } else if (targetTable instanceof IcebergTable) { List<TSinkCommitInfo> commitInfos = coord.getSinkCommitInfos(); if (stmt instanceof InsertStmt && ((InsertStmt) stmt).isOverwrite()) { for (TSinkCommitInfo commitInfo : commitInfos) { commitInfo.setIs_overwrite(true); } } context.getGlobalStateMgr().getMetadataMgr().finishSink(catalogName, dbName, tableName, commitInfos); txnStatus = TransactionStatus.VISIBLE; label = "FAKE_ICEBERG_SINK_LABEL"; } else if (targetTable instanceof HiveTable) { List<TSinkCommitInfo> commitInfos = coord.getSinkCommitInfos(); HiveTableSink hiveTableSink = (HiveTableSink) execPlan.getFragments().get(0).getSink(); String stagingDir = hiveTableSink.getStagingDir(); if (stmt instanceof InsertStmt) { InsertStmt insertStmt = (InsertStmt) stmt; for (TSinkCommitInfo commitInfo : commitInfos) { commitInfo.setStaging_dir(stagingDir); if (insertStmt.isOverwrite()) { commitInfo.setIs_overwrite(true); } } } context.getGlobalStateMgr().getMetadataMgr().finishSink(catalogName, dbName, tableName, commitInfos); txnStatus = TransactionStatus.VISIBLE; label = "FAKE_HIVE_SINK_LABEL"; } else { if (isExplainAnalyze) { GlobalStateMgr.getCurrentGlobalTransactionMgr() .abortTransaction(database.getId(), transactionId, "Explain Analyze"); txnStatus = TransactionStatus.ABORTED; } else if (GlobalStateMgr.getCurrentGlobalTransactionMgr().commitAndPublishTransaction( database, transactionId, TabletCommitInfo.fromThrift(coord.getCommitInfos()), TabletFailInfo.fromThrift(coord.getFailInfos()), Config.enable_sync_publish ? jobDeadLineMs - System.currentTimeMillis() : context.getSessionVariable().getTransactionVisibleWaitTimeout() * 1000, new InsertTxnCommitAttachment(loadedRows))) { txnStatus = TransactionStatus.VISIBLE; MetricRepo.COUNTER_LOAD_FINISHED.increase(1L); if (null != targetTable) { TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(targetTable.getId()); entity.counterInsertLoadFinishedTotal.increase(1L); entity.counterInsertLoadRowsTotal.increase(loadedRows); entity.counterInsertLoadBytesTotal.increase(loadedBytes); } } else { txnStatus = TransactionStatus.COMMITTED; } } } catch (Throwable t) { String failedSql = ""; if (originStmt != null && originStmt.originStmt != null) { failedSql = originStmt.originStmt; } LOG.warn("failed to handle stmt [{}] label: {}", failedSql, label, t); String errMsg = t.getMessage(); if (errMsg == null) { errMsg = "A problem occurred while executing the [ " + failedSql + "] statement with label:" + label; } try { if (targetTable instanceof ExternalOlapTable) { ExternalOlapTable externalTable = (ExternalOlapTable) targetTable; GlobalStateMgr.getCurrentGlobalTransactionMgr().abortRemoteTransaction( externalTable.getSourceTableDbId(), transactionId, externalTable.getSourceTableHost(), externalTable.getSourceTablePort(), errMsg); } else { GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction( database.getId(), transactionId, errMsg, coord == null ? Lists.newArrayList() : TabletFailInfo.fromThrift(coord.getFailInfos())); } } catch (Exception abortTxnException) { LOG.warn("errors when abort txn", abortTxnException); } StringBuilder sb = new StringBuilder(errMsg); if (coord != null && !Strings.isNullOrEmpty(coord.getTrackingUrl())) { sb.append(". tracking sql: ").append(trackingSql); } context.getState().setError(sb.toString()); try { if (jobId != -1) { Preconditions.checkNotNull(coord); context.getGlobalStateMgr().getLoadMgr() .recordFinishedOrCacnelledLoadJob(jobId, EtlJobType.INSERT, "Cancelled, msg: " + t.getMessage(), coord.getTrackingUrl()); jobId = -1; } } catch (Exception abortTxnException) { LOG.warn("errors when cancel insert load job {}", jobId); } throw new UserException(t.getMessage()); } finally { QeProcessorImpl.INSTANCE.unregisterQuery(context.getExecutionId()); if (insertError) { try { if (jobId != -1) { context.getGlobalStateMgr().getLoadMgr() .recordFinishedOrCacnelledLoadJob(jobId, EtlJobType.INSERT, "Cancelled", coord.getTrackingUrl()); jobId = -1; } } catch (Exception abortTxnException) { LOG.warn("errors when cancel insert load job {}", jobId); } } else if (txnState != null) { StatisticUtils.triggerCollectionOnFirstLoad(txnState, database, targetTable, true); } } String errMsg = ""; if (txnStatus.equals(TransactionStatus.COMMITTED)) { String timeoutInfo = GlobalStateMgr.getCurrentGlobalTransactionMgr() .getTxnPublishTimeoutDebugInfo(database.getId(), transactionId); LOG.warn("txn {} publish timeout {}", transactionId, timeoutInfo); if (timeoutInfo.length() > 240) { timeoutInfo = timeoutInfo.substring(0, 240) + "..."; } errMsg = "Publish timeout " + timeoutInfo; } try { if (jobId != -1) { context.getGlobalStateMgr().getLoadMgr().recordFinishedOrCacnelledLoadJob(jobId, EtlJobType.INSERT, "", coord.getTrackingUrl()); } } catch (MetaNotFoundException e) { LOG.warn("Record info of insert load with error {}", e.getMessage(), e); errMsg = "Record info of insert load with error " + e.getMessage(); } StringBuilder sb = new StringBuilder(); sb.append("{'label':'").append(label).append("', 'status':'").append(txnStatus.name()); sb.append("', 'txnId':'").append(transactionId).append("'"); if (!Strings.isNullOrEmpty(errMsg)) { sb.append(", 'err':'").append(errMsg).append("'"); } sb.append("}"); context.getState().setOk(loadedRows, filteredRows, sb.toString()); } public String getOriginStmtInString() { if (originStmt == null) { return ""; } return originStmt.originStmt; } public Pair<List<TResultBatch>, Status> executeStmtWithExecPlan(ConnectContext context, ExecPlan plan) { List<TResultBatch> sqlResult = Lists.newArrayList(); try { UUID uuid = context.getQueryId(); context.setExecutionId(UUIDUtil.toTUniqueId(uuid)); coord = getCoordinatorFactory().createQueryScheduler( context, plan.getFragments(), plan.getScanNodes(), plan.getDescTbl().toThrift()); QeProcessorImpl.INSTANCE.registerQuery(context.getExecutionId(), coord); coord.exec(); RowBatch batch; do { batch = coord.getNext(); if (batch.getBatch() != null) { sqlResult.add(batch.getBatch()); } } while (!batch.isEos()); } catch (Exception e) { LOG.warn(e); coord.getExecStatus().setInternalErrorStatus(e.getMessage()); } finally { QeProcessorImpl.INSTANCE.unregisterQuery(context.getExecutionId()); } return Pair.create(sqlResult, coord.getExecStatus()); } public List<ByteBuffer> getProxyResultBuffer() { return proxyResultBuffer; } }
if the user wants delegation then its the current `new MavenLocalRepositoryManager( repoSystem.newLocalRepositoryManager(newSession, new LocalRepository(builder.repoHome.toString())), Paths.get(MavenRepoInitializer.getLocalRepo(MavenRepoInitializer.getSettings())));`
private MavenArtifactResolver(Builder builder) throws AppModelResolverException { this.repoSystem = builder.repoSystem == null ? MavenRepoInitializer.getRepositorySystem( (builder.offline == null ? (builder.repoSession == null ? MavenRepoInitializer.getSettings().isOffline() : builder.repoSession.isOffline()) : builder.offline), builder.workspace) : builder.repoSystem; final DefaultRepositorySystemSession newSession = builder.repoSession == null ? MavenRepoInitializer.newSession(repoSystem) : new DefaultRepositorySystemSession(builder.repoSession); if(builder.offline != null) { newSession.setOffline(builder.offline); } if(builder.repoHome != null) { final MavenLocalRepositoryManager appCreatorLocalRepoManager = new MavenLocalRepositoryManager( repoSystem.newLocalRepositoryManager(newSession, new LocalRepository(builder.repoHome.toString())), builder.localRepoHome==null ? Paths.get(MavenRepoInitializer.getLocalRepo(MavenRepoInitializer.getSettings())) : builder.localRepoHome); newSession.setLocalRepositoryManager(appCreatorLocalRepoManager); localRepoManager = appCreatorLocalRepoManager; } else { localRepoManager = null; } if(newSession.getCache() == null) { newSession.setCache(new DefaultRepositoryCache()); } if (builder.workspace != null) { newSession.setWorkspaceReader(builder.workspace); } this.repoSession = newSession; this.remoteRepos = builder.remoteRepos == null ? MavenRepoInitializer.getRemoteRepos(this.repoSystem, this.repoSession) : builder.remoteRepos; final DefaultRemoteRepositoryManager remoteRepoManager = new DefaultRemoteRepositoryManager(); remoteRepoManager.initService(MavenRepositorySystemUtils.newServiceLocator()); this.remoteRepoManager = remoteRepoManager; }
builder.localRepoHome==null ? Paths.get(MavenRepoInitializer.getLocalRepo(MavenRepoInitializer.getSettings())) : builder.localRepoHome);
private MavenArtifactResolver(Builder builder) throws AppModelResolverException { this.repoSystem = builder.repoSystem == null ? MavenRepoInitializer.getRepositorySystem( (builder.offline == null ? (builder.repoSession == null ? MavenRepoInitializer.getSettings().isOffline() : builder.repoSession.isOffline()) : builder.offline), builder.workspace) : builder.repoSystem; final DefaultRepositorySystemSession newSession = builder.repoSession == null ? MavenRepoInitializer.newSession(repoSystem) : new DefaultRepositorySystemSession(builder.repoSession); if(builder.offline != null) { newSession.setOffline(builder.offline); } MavenLocalRepositoryManager lrm = null; if (builder.repoHome != null) { if (builder.reTryFailedResolutionsAgainstDefaultLocalRepo) { lrm = new MavenLocalRepositoryManager( repoSystem.newLocalRepositoryManager(newSession, new LocalRepository(builder.repoHome.toString())), Paths.get(MavenRepoInitializer.getLocalRepo(MavenRepoInitializer.getSettings()))); newSession.setLocalRepositoryManager(lrm); } else { newSession.setLocalRepositoryManager( repoSystem.newLocalRepositoryManager(newSession, new LocalRepository(builder.repoHome.toString()))); } } localRepoManager = lrm; if(newSession.getCache() == null) { newSession.setCache(new DefaultRepositoryCache()); } if (builder.workspace != null) { newSession.setWorkspaceReader(builder.workspace); } this.repoSession = newSession; this.remoteRepos = builder.remoteRepos == null ? MavenRepoInitializer.getRemoteRepos(this.repoSystem, this.repoSession) : builder.remoteRepos; final DefaultRemoteRepositoryManager remoteRepoManager = new DefaultRemoteRepositoryManager(); remoteRepoManager.initService(MavenRepositorySystemUtils.newServiceLocator()); this.remoteRepoManager = remoteRepoManager; }
class Builder { private Path localRepoHome; private Path repoHome; private RepositorySystem repoSystem; private RepositorySystemSession repoSession; private List<RemoteRepository> remoteRepos = null; private Boolean offline; private LocalWorkspace workspace; private Builder() { } public Builder setLocalRepoHome(Path localHome) { this.localRepoHome = localHome; return this; } public Builder setRepoHome(Path home) { this.repoHome = home; return this; } public Builder setRepositorySystem(RepositorySystem system) { this.repoSystem = system; return this; } public Builder setRepositorySystemSession(RepositorySystemSession session) { this.repoSession = session; return this; } public Builder setRemoteRepositories(List<RemoteRepository> repos) { this.remoteRepos = repos; return this; } public Builder setOffline(boolean offline) { this.offline = offline; return this; } public Builder setWorkspace(LocalWorkspace workspace) { this.workspace = workspace; return this; } public MavenArtifactResolver build() throws AppModelResolverException { return new MavenArtifactResolver(this); } }
class Builder { private Path repoHome; private boolean reTryFailedResolutionsAgainstDefaultLocalRepo; private RepositorySystem repoSystem; private RepositorySystemSession repoSession; private List<RemoteRepository> remoteRepos = null; private Boolean offline; private LocalWorkspace workspace; private Builder() { } /** * In case custom local repository location is configured using {@link * this method can be used to enable artifact resolutions that failed for the configured * custom local repository to be re-tried against the default user local repository before * failing. * <p>NOTE: the default behavior is <b>not</b> to use the default user local repository as the fallback one. * * @param value true if the failed resolution requests should be re-tried against the default * user local repo before failing * * @return this builder instance */ public Builder setReTryFailedResolutionsAgainstDefaultLocalRepo(boolean value) { this.reTryFailedResolutionsAgainstDefaultLocalRepo = value; return this; } public Builder setRepoHome(Path home) { this.repoHome = home; return this; } public Builder setRepositorySystem(RepositorySystem system) { this.repoSystem = system; return this; } public Builder setRepositorySystemSession(RepositorySystemSession session) { this.repoSession = session; return this; } public Builder setRemoteRepositories(List<RemoteRepository> repos) { this.remoteRepos = repos; return this; } public Builder setOffline(boolean offline) { this.offline = offline; return this; } public Builder setWorkspace(LocalWorkspace workspace) { this.workspace = workspace; return this; } public MavenArtifactResolver build() throws AppModelResolverException { return new MavenArtifactResolver(this); } }
Here are my suggestions based on the given code: 1. Import Optimization: Try to avoid using * in imports, as specific import statements enhance code readability and maintainability helping others understand which classes are in use. 2. Simplicity and Efficiency: `partitionNames.contains(partitionName)` has been used twice within the same block that might impact efficiency when there's a large number of partitions. Consider storing result into variable if the collection is large or computation needed to find an element is complex. 3. Clarity: Add comments to your code to explain what you're trying to do - especially for non-obvious parts of the code. The current comment provided is in Chinese which may not be universally understood, consider translating it to English to make it more globally recognized. 4. Error Handling: There doesn't seem to be any error handling included for issues with schema resolution or database connectivity. Exceptions should be logged properly or handled accordingly, otherwise, in case of any exception event, it would be hard to debug where things went wrong. 5. Thread Safety: If this class could potentially be accessed by multiple threads concurrently, consider thread safety. Immutable data structures are normally a good way to go about this. It seems a good start is made with ImmutableList.builder() but ensure its well throughout across the application. 6. Usage of Guava libraries: You're using the Guava library, try to make full use of its capabilities. For instance, instead of calling `.contains()` (which takes O(n) time complexity per call), you can convert partitionNames into a HashSet (from Guava's Sets class), resulting in constant time lookups(O(1)), which is more efficient if you're doing multiple contains checks. 7. Closed Resources: In your code, you rely on an external resource, the connection. Make sure that connections are always closed regardless of whether the operation was successful. Using the `try-with-resource` statement can help automatically close resources. 8. Unit Testing: Ensure that the unit tests are written for this method to avoid any regression bugs in future code changes and also helps you confirm the correct behavior. Remember, these suggestions may vary based on your project requirements and standards.
public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) { try (Connection connection = getConnection()) { List<Partition> partitions = schemaResolver.getPartitions(connection, table); String minInt = IntLiteral.createMaxValue(Type.INT).getStringValue(); String minDate = DateLiteral.createMaxValue(Type.DATE).getStringValue(); ImmutableList.Builder<PartitionInfo> list = ImmutableList.builder(); if (!partitions.isEmpty()) { for (Partition partition : partitions) { String partitionName = partition.getPartitionName(); if (partitionNames.contains(partitionName)) { list.add(partition); } if (partitionName.equalsIgnoreCase(PartitionUtil.MYSQL_PARTITION_MAXVALUE)) { if (partitionNames.contains(minInt) || partitionNames.contains(minDate)) { list.add(partition); } } } return list.build(); } else { return Lists.newArrayList(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } }
} else {
public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) { try (Connection connection = getConnection()) { List<Partition> partitions = schemaResolver.getPartitions(connection, table); String maxInt = IntLiteral.createMaxValue(Type.INT).getStringValue(); String maxDate = DateLiteral.createMaxValue(Type.DATE).getStringValue(); ImmutableList.Builder<PartitionInfo> list = ImmutableList.builder(); if (partitions.isEmpty()) { return Lists.newArrayList(); } for (Partition partition : partitions) { String partitionName = partition.getPartitionName(); if (partitionNames.contains(partitionName)) { list.add(partition); } if (partitionName.equalsIgnoreCase(PartitionUtil.MYSQL_PARTITION_MAXVALUE)) { if (partitionNames.contains(maxInt) || partitionNames.contains(maxDate)) { list.add(partition); } } } return list.build(); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } }
class JDBCMetadata implements ConnectorMetadata { private static Logger LOG = LogManager.getLogger(JDBCMetadata.class); private Map<String, String> properties; private String catalogName; private JDBCSchemaResolver schemaResolver; public JDBCMetadata(Map<String, String> properties, String catalogName) { this.properties = properties; this.catalogName = catalogName; try { Class.forName(properties.get(JDBCResource.DRIVER_CLASS)); } catch (ClassNotFoundException e) { LOG.warn(e.getMessage()); throw new StarRocksConnectorException("doesn't find class: " + e.getMessage()); } if (properties.get(JDBCResource.DRIVER_CLASS).toLowerCase().contains("mysql")) { schemaResolver = new MysqlSchemaResolver(); } else if (properties.get(JDBCResource.DRIVER_CLASS).toLowerCase().contains("postgresql")) { schemaResolver = new PostgresSchemaResolver(); } else { LOG.warn("{} not support yet", properties.get(JDBCResource.DRIVER_CLASS)); throw new StarRocksConnectorException(properties.get(JDBCResource.DRIVER_CLASS) + " not support yet"); } } public Connection getConnection() throws SQLException { return DriverManager.getConnection(properties.get(JDBCResource.URI), properties.get(JDBCResource.USER), properties.get(JDBCResource.PASSWORD)); } @Override public List<String> listDbNames() { try (Connection connection = getConnection()) { return Lists.newArrayList(schemaResolver.listSchemas(connection)); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override public Database getDb(String name) { try { if (listDbNames().contains(name)) { return new Database(0, name); } else { return null; } } catch (StarRocksConnectorException e) { return null; } } @Override public List<String> listTableNames(String dbName) { try (Connection connection = getConnection()) { try (ResultSet resultSet = schemaResolver.getTables(connection, dbName)) { ImmutableList.Builder<String> list = ImmutableList.builder(); while (resultSet.next()) { String tableName = resultSet.getString("TABLE_NAME"); list.add(tableName); } return list.build(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override public Table getTable(String dbName, String tblName) { try (Connection connection = getConnection()) { ResultSet columnSet = schemaResolver.getColumns(connection, dbName, tblName); List<Column> fullSchema = schemaResolver.convertToSRTable(columnSet); List<Column> partitionColumns = listPartitionColumns(dbName, tblName, fullSchema); if (fullSchema.isEmpty()) { return null; } JDBCTableName tableKey = JDBCTableName.of(catalogName, dbName, tblName); if (JDBCTableIdCache.containsTableId(tableKey)) { return schemaResolver.getTable(JDBCTableIdCache.getTableId(tableKey), tblName, fullSchema, partitionColumns, dbName, catalogName, properties); } else { Integer tableId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt(); JDBCTableIdCache.putTableId(tableKey, tableId); return schemaResolver.getTable(tableId, tblName, fullSchema, partitionColumns, dbName, catalogName, properties); } } catch (SQLException | DdlException e) { LOG.warn(e.getMessage()); return null; } } @Override public List<String> listPartitionNames(String databaseName, String tableName) { try (Connection connection = getConnection()) { return schemaResolver.listPartitionNames(connection, databaseName, tableName); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } public List<Column> listPartitionColumns(String databaseName, String tableName, List<Column> fullSchema) { try (Connection connection = getConnection()) { Set<String> partitionColumnNames = schemaResolver.listPartitionColumns(connection, databaseName, tableName) .stream().map(columnName -> columnName.toLowerCase()).collect(Collectors.toSet()); if (partitionColumnNames.size() > 0) { return fullSchema.stream().filter(column -> partitionColumnNames.contains(column.getName().toLowerCase())) .collect(Collectors.toList()); } else { return Lists.newArrayList(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override }
class JDBCMetadata implements ConnectorMetadata { private static Logger LOG = LogManager.getLogger(JDBCMetadata.class); private Map<String, String> properties; private String catalogName; private JDBCSchemaResolver schemaResolver; public JDBCMetadata(Map<String, String> properties, String catalogName) { this.properties = properties; this.catalogName = catalogName; try { Class.forName(properties.get(JDBCResource.DRIVER_CLASS)); } catch (ClassNotFoundException e) { LOG.warn(e.getMessage()); throw new StarRocksConnectorException("doesn't find class: " + e.getMessage()); } if (properties.get(JDBCResource.DRIVER_CLASS).toLowerCase().contains("mysql")) { schemaResolver = new MysqlSchemaResolver(); } else if (properties.get(JDBCResource.DRIVER_CLASS).toLowerCase().contains("postgresql")) { schemaResolver = new PostgresSchemaResolver(); } else { LOG.warn("{} not support yet", properties.get(JDBCResource.DRIVER_CLASS)); throw new StarRocksConnectorException(properties.get(JDBCResource.DRIVER_CLASS) + " not support yet"); } } public Connection getConnection() throws SQLException { return DriverManager.getConnection(properties.get(JDBCResource.URI), properties.get(JDBCResource.USER), properties.get(JDBCResource.PASSWORD)); } @Override public List<String> listDbNames() { try (Connection connection = getConnection()) { return Lists.newArrayList(schemaResolver.listSchemas(connection)); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override public Database getDb(String name) { try { if (listDbNames().contains(name)) { return new Database(0, name); } else { return null; } } catch (StarRocksConnectorException e) { return null; } } @Override public List<String> listTableNames(String dbName) { try (Connection connection = getConnection()) { try (ResultSet resultSet = schemaResolver.getTables(connection, dbName)) { ImmutableList.Builder<String> list = ImmutableList.builder(); while (resultSet.next()) { String tableName = resultSet.getString("TABLE_NAME"); list.add(tableName); } return list.build(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override public Table getTable(String dbName, String tblName) { try (Connection connection = getConnection()) { ResultSet columnSet = schemaResolver.getColumns(connection, dbName, tblName); List<Column> fullSchema = schemaResolver.convertToSRTable(columnSet); List<Column> partitionColumns = listPartitionColumns(dbName, tblName, fullSchema); if (fullSchema.isEmpty()) { return null; } JDBCTableName tableKey = JDBCTableName.of(catalogName, dbName, tblName); if (JDBCTableIdCache.containsTableId(tableKey)) { return schemaResolver.getTable(JDBCTableIdCache.getTableId(tableKey), tblName, fullSchema, partitionColumns, dbName, catalogName, properties); } else { Integer tableId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt(); JDBCTableIdCache.putTableId(tableKey, tableId); return schemaResolver.getTable(tableId, tblName, fullSchema, partitionColumns, dbName, catalogName, properties); } } catch (SQLException | DdlException e) { LOG.warn(e.getMessage()); return null; } } @Override public List<String> listPartitionNames(String databaseName, String tableName) { try (Connection connection = getConnection()) { return schemaResolver.listPartitionNames(connection, databaseName, tableName); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } public List<Column> listPartitionColumns(String databaseName, String tableName, List<Column> fullSchema) { try (Connection connection = getConnection()) { Set<String> partitionColumnNames = schemaResolver.listPartitionColumns(connection, databaseName, tableName) .stream().map(columnName -> columnName.toLowerCase()).collect(Collectors.toSet()); if (partitionColumnNames.size() > 0) { return fullSchema.stream().filter(column -> partitionColumnNames.contains(column.getName().toLowerCase())) .collect(Collectors.toList()); } else { return Lists.newArrayList(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override }
Same statement as above where this could just call into the `setBody(BinaryData)` method
public HttpRequest setBody(byte[] content) { setContentLength(content.length); this.body = BinaryData.fromBytes(content); return this; }
return this;
public HttpRequest setBody(byte[] content) { return setBody(BinaryData.fromBytes(content)); }
class HttpRequest { private static final ClientLogger LOGGER = new ClientLogger(HttpRequest.class); private HttpMethod httpMethod; private URL url; private HttpHeaders headers; private BinaryData body; /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to */ public HttpRequest(HttpMethod httpMethod, URL url) { this(httpMethod, url, new HttpHeaders(), (BinaryData) null); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest(HttpMethod httpMethod, String url) { this.httpMethod = httpMethod; setUrl(url); this.headers = new HttpHeaders(); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, Flux<ByteBuffer> body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(BinaryDataHelper.createBinaryData(new FluxByteBufferContent(body))); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, BinaryData body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(body); } /** * Get the request method. * * @return the request method */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Set the request method. * * @param httpMethod the request method * @return this HttpRequest */ public HttpRequest setHttpMethod(HttpMethod httpMethod) { this.httpMethod = httpMethod; return this; } /** * Get the target address. * * @return the target address */ public URL getUrl() { return url; } /** * Set the target address to send the request to. * * @param url target address as {@link URL} * @return this HttpRequest */ public HttpRequest setUrl(URL url) { this.url = url; return this; } /** * Set the target address to send the request to. * * @param url target address as a String * @return this HttpRequest * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest setUrl(String url) { try { this.url = new URL(url); } catch (MalformedURLException ex) { throw LOGGER.logExceptionAsWarning(new IllegalArgumentException("'url' must be a valid URL.", ex)); } return this; } /** * Get the request headers. * * @return headers to be sent */ public HttpHeaders getHeaders() { return headers; } /** * Set the request headers. * * @param headers the set of headers * @return this HttpRequest */ public HttpRequest setHeaders(HttpHeaders headers) { this.headers = headers; return this; } /** * Set a request header, replacing any existing value. A null for {@code value} will remove the header if one with * matching name exists. * * @param name the header name * @param value the header value * @return this HttpRequest */ public HttpRequest setHeader(String name, String value) { headers.set(name, value); return this; } /** * Get the request content. * * @return the content to be send */ public Flux<ByteBuffer> getBody() { return body == null ? null : body.toFluxByteBuffer(); } /** * Get the request content. * * @return the content to be send */ public BinaryData getBodyAsBinaryData() { return body; } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(String content) { this.body = BinaryData.fromString(content); setContentLength(this.body.getLength()); return this; } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ /** * Set request content. * <p> * Caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(Flux<ByteBuffer> content) { if (content != null) { setBody(BinaryDataHelper.createBinaryData(new FluxByteBufferContent(content))); } else { this.body = null; } return this; } /** * Set request content. * <p> * If provided content has known length, i.e. {@link BinaryData * Content-Length header is updated. Otherwise, * if provided content has unknown length, i.e. {@link BinaryData * the caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(BinaryData content) { this.body = content; if (content != null && content.getLength() != null) { setContentLength(content.getLength()); } return this; } private void setContentLength(long contentLength) { headers.set("Content-Length", String.valueOf(contentLength)); } /** * Creates a copy of the request. * * The main purpose of this is so that this HttpRequest can be changed and the resulting HttpRequest can be a * backup. This means that the cloned HttpHeaders and body must not be able to change from side effects of this * HttpRequest. * * @return a new HTTP request instance with cloned instances of all mutable properties. */ public HttpRequest copy() { final HttpHeaders bufferedHeaders = new HttpHeaders(headers); return new HttpRequest(httpMethod, url, bufferedHeaders, body); } }
class HttpRequest { private static final ClientLogger LOGGER = new ClientLogger(HttpRequest.class); private HttpMethod httpMethod; private URL url; private HttpHeaders headers; private BinaryData body; /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to */ public HttpRequest(HttpMethod httpMethod, URL url) { this(httpMethod, url, new HttpHeaders(), (BinaryData) null); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest(HttpMethod httpMethod, String url) { this.httpMethod = httpMethod; setUrl(url); this.headers = new HttpHeaders(); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, Flux<ByteBuffer> body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(BinaryDataHelper.createBinaryData(new FluxByteBufferContent(body))); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, BinaryData body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(body); } /** * Get the request method. * * @return the request method */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Set the request method. * * @param httpMethod the request method * @return this HttpRequest */ public HttpRequest setHttpMethod(HttpMethod httpMethod) { this.httpMethod = httpMethod; return this; } /** * Get the target address. * * @return the target address */ public URL getUrl() { return url; } /** * Set the target address to send the request to. * * @param url target address as {@link URL} * @return this HttpRequest */ public HttpRequest setUrl(URL url) { this.url = url; return this; } /** * Set the target address to send the request to. * * @param url target address as a String * @return this HttpRequest * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest setUrl(String url) { try { this.url = new URL(url); } catch (MalformedURLException ex) { throw LOGGER.logExceptionAsWarning(new IllegalArgumentException("'url' must be a valid URL.", ex)); } return this; } /** * Get the request headers. * * @return headers to be sent */ public HttpHeaders getHeaders() { return headers; } /** * Set the request headers. * * @param headers the set of headers * @return this HttpRequest */ public HttpRequest setHeaders(HttpHeaders headers) { this.headers = headers; return this; } /** * Set a request header, replacing any existing value. A null for {@code value} will remove the header if one with * matching name exists. * * @param name the header name * @param value the header value * @return this HttpRequest */ public HttpRequest setHeader(String name, String value) { headers.set(name, value); return this; } /** * Get the request content. * * @return the content to be sent */ public Flux<ByteBuffer> getBody() { return body == null ? null : body.toFluxByteBuffer(); } /** * Get the request content. * * @return the content to be sent */ public BinaryData getBodyAsBinaryData() { return body; } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(String content) { return setBody(BinaryData.fromString(content)); } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ /** * Set request content. * <p> * Caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(Flux<ByteBuffer> content) { if (content != null) { this.body = BinaryDataHelper.createBinaryData(new FluxByteBufferContent(content)); } else { this.body = null; } return this; } /** * Set request content. * <p> * If provided content has known length, i.e. {@link BinaryData * Content-Length header is updated. Otherwise, * if provided content has unknown length, i.e. {@link BinaryData * the caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(BinaryData content) { this.body = content; if (content != null && content.getLength() != null) { setContentLength(content.getLength()); } return this; } private void setContentLength(long contentLength) { headers.set("Content-Length", String.valueOf(contentLength)); } /** * Creates a copy of the request. * * The main purpose of this is so that this HttpRequest can be changed and the resulting HttpRequest can be a * backup. This means that the cloned HttpHeaders and body must not be able to change from side effects of this * HttpRequest. * * @return a new HTTP request instance with cloned instances of all mutable properties. */ public HttpRequest copy() { final HttpHeaders bufferedHeaders = new HttpHeaders(headers); return new HttpRequest(httpMethod, url, bufferedHeaders, body); } }
Better wording. Response and value are returned from service not coming from caller. We can't blame the caller here. :-) E.g. Service failed to return a response or expected value.
public Response<CommunicationUserIdentifier> createUserWithResponse(Context context) { context = context == null ? Context.NONE : context; Response<CommunicationIdentityAccessTokenResult> response = client.createWithResponseAsync(new CommunicationIdentityCreateRequest(), context).block(); if (response == null || response.getValue() == null) { throw logger.logExceptionAsError(new IllegalStateException("Create user response and value cannot be null")); } String id = response.getValue().getIdentity().getId(); return new SimpleResponse<CommunicationUserIdentifier>( response, new CommunicationUserIdentifier(id)); }
throw logger.logExceptionAsError(new IllegalStateException("Create user response and value cannot be null"));
public Response<CommunicationUserIdentifier> createUserWithResponse(Context context) { context = context == null ? Context.NONE : context; Response<CommunicationIdentityAccessTokenResult> response = client.createWithResponseAsync(new CommunicationIdentityCreateRequest(), context).block(); if (response == null || response.getValue() == null) { throw logger.logExceptionAsError(new IllegalStateException("Service failed to return a response or expected value.")); } String id = response.getValue().getIdentity().getId(); return new SimpleResponse<CommunicationUserIdentifier>( response, new CommunicationUserIdentifier(id)); }
class CommunicationIdentityClient { private final CommunicationIdentityImpl client; private final ClientLogger logger = new ClientLogger(CommunicationIdentityClient.class); CommunicationIdentityClient(CommunicationIdentityClientImpl communicationIdentityClient) { client = communicationIdentityClient.getCommunicationIdentity(); } /** * Creates a new CommunicationUserIdentifier. * * @return the created Communication User. */ @ServiceMethod(returns = ReturnType.SINGLE) public CommunicationUserIdentifier createUser() { CommunicationIdentityAccessTokenResult result = client.create(new CommunicationIdentityCreateRequest()); return new CommunicationUserIdentifier(result.getIdentity().getId()); } /** * Creates a new CommunicationUserIdentifier with response. * * @param context A {@link Context} representing the request context. * @return the created Communication User. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Creates a new CommunicationUserIdentifier with token. * * @param scopes the list of scopes for the token * @return the result with created communication user and token */ @ServiceMethod(returns = ReturnType.SINGLE) public CommunicationUserIdentifierWithTokenResult createUserWithToken( Iterable<CommunicationTokenScope> scopes) { Objects.requireNonNull(scopes); final List<CommunicationTokenScope> scopesInput = new ArrayList<>(); scopes.forEach(scope -> scopesInput.add(scope)); CommunicationIdentityAccessTokenResult result = client.create( new CommunicationIdentityCreateRequest().setCreateTokenWithScopes(scopesInput)); return userWithAccessTokenResultConverter(result); } /** * Creates a new CommunicationUserIdentifier with token with response. * * @param scopes the list of scopes for the token * @param context A {@link Context} representing the request context. * @return the result with created communication user and token */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<CommunicationUserIdentifierWithTokenResult> createUserWithTokenWithResponse( Iterable<CommunicationTokenScope> scopes, Context context) { Objects.requireNonNull(scopes); context = context == null ? Context.NONE : context; final List<CommunicationTokenScope> scopesInput = new ArrayList<>(); scopes.forEach(scope -> scopesInput.add(scope)); Response<CommunicationIdentityAccessTokenResult> response = client.createWithResponseAsync( new CommunicationIdentityCreateRequest().setCreateTokenWithScopes(scopesInput), context).block(); if (response == null || response.getValue() == null) { throw logger.logExceptionAsError(new IllegalStateException("Create user with token response and value cannot be null")); } return new SimpleResponse<CommunicationUserIdentifierWithTokenResult>( response, userWithAccessTokenResultConverter(response.getValue())); } /** * Deletes a CommunicationUserIdentifier, revokes its tokens and deletes its * data. * * @param communicationUser The user to be deleted. * @return the response */ @ServiceMethod(returns = ReturnType.SINGLE) public Void deleteUser(CommunicationUserIdentifier communicationUser) { Objects.requireNonNull(communicationUser); return client.deleteAsync(communicationUser.getId()).block(); } /** * Deletes a CommunicationUserIdentifier, revokes its tokens and deletes its * data with response. * * @param communicationUser The user to be deleted. * @param context A {@link Context} representing the request context. * @return the response */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteUserWithResponse(CommunicationUserIdentifier communicationUser, Context context) { Objects.requireNonNull(communicationUser); context = context == null ? Context.NONE : context; return client.deleteWithResponseAsync(communicationUser.getId(), context).block(); } /** * Revokes all the tokens created for an identifier. * * @param communicationUser The user to be revoked token. * @return the response */ @ServiceMethod(returns = ReturnType.SINGLE) public Void revokeTokens(CommunicationUserIdentifier communicationUser) { Objects.requireNonNull(communicationUser); return client.revokeAccessTokensAsync(communicationUser.getId()).block(); } /** * Revokes all the tokens created for a user before a specific date. * * @param communicationUser The user to be revoked token. * @param context the context of the request. Can also be null or * Context.NONE. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> revokeTokensWithResponse(CommunicationUserIdentifier communicationUser, Context context) { Objects.requireNonNull(communicationUser); context = context == null ? Context.NONE : context; return client.revokeAccessTokensWithResponseAsync(communicationUser.getId(), context).block(); } /** * Generates a new token for an identity. * * @param communicationUser The user to be issued tokens. * @param scopes The scopes that the token should have. * @return the issued token. */ @ServiceMethod(returns = ReturnType.SINGLE) public AccessToken issueToken(CommunicationUserIdentifier communicationUser, Iterable<CommunicationTokenScope> scopes) { Objects.requireNonNull(communicationUser); Objects.requireNonNull(scopes); final List<CommunicationTokenScope> scopesInput = new ArrayList<>(); scopes.forEach(scope -> scopesInput.add(scope)); CommunicationIdentityAccessToken rawToken = client.issueAccessToken( communicationUser.getId(), new CommunicationIdentityAccessTokenRequest().setScopes(scopesInput)); return new AccessToken(rawToken.getToken(), rawToken.getExpiresOn()); } /** * Generates a new token for an identity. * * @param communicationUser The CommunicationUser from whom to issue a token. * @param scopes The scopes that the token should have. * @param context the context of the request. Can also be null or * Context.NONE. * @return the created CommunicationUserToken. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<AccessToken> issueTokenWithResponse(CommunicationUserIdentifier communicationUser, Iterable<CommunicationTokenScope> scopes, Context context) { Objects.requireNonNull(communicationUser); Objects.requireNonNull(scopes); context = context == null ? Context.NONE : context; final List<CommunicationTokenScope> scopesInput = new ArrayList<>(); scopes.forEach(scope -> scopesInput.add(scope)); Response<CommunicationIdentityAccessToken> response = client.issueAccessTokenWithResponseAsync( communicationUser.getId(), new CommunicationIdentityAccessTokenRequest().setScopes(scopesInput), context) .block(); if (response == null || response.getValue() == null) { throw logger.logExceptionAsError(new IllegalStateException("Issue token response and value cannot be null")); } return new SimpleResponse<AccessToken>( response, new AccessToken(response.getValue().getToken(), response.getValue().getExpiresOn())); } private CommunicationUserIdentifierWithTokenResult userWithAccessTokenResultConverter( CommunicationIdentityAccessTokenResult identityAccessTokenResult) { CommunicationUserIdentifier user = new CommunicationUserIdentifier(identityAccessTokenResult.getIdentity().getId()); AccessToken token = new AccessToken( identityAccessTokenResult.getAccessToken().getToken(), identityAccessTokenResult.getAccessToken().getExpiresOn()); return new CommunicationUserIdentifierWithTokenResult(user, token); } }
class CommunicationIdentityClient { private final CommunicationIdentityImpl client; private final ClientLogger logger = new ClientLogger(CommunicationIdentityClient.class); CommunicationIdentityClient(CommunicationIdentityClientImpl communicationIdentityClient) { client = communicationIdentityClient.getCommunicationIdentity(); } /** * Creates a new CommunicationUserIdentifier. * * @return the created Communication User. */ @ServiceMethod(returns = ReturnType.SINGLE) public CommunicationUserIdentifier createUser() { CommunicationIdentityAccessTokenResult result = client.create(new CommunicationIdentityCreateRequest()); return new CommunicationUserIdentifier(result.getIdentity().getId()); } /** * Creates a new CommunicationUserIdentifier with response. * * @param context A {@link Context} representing the request context. * @return the created Communication User. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Creates a new CommunicationUserIdentifier with token. * * @param scopes the list of scopes for the token * @return the result with created communication user and token */ @ServiceMethod(returns = ReturnType.SINGLE) public CommunicationUserIdentifierWithTokenResult createUserWithToken( Iterable<CommunicationTokenScope> scopes) { Objects.requireNonNull(scopes); final List<CommunicationTokenScope> scopesInput = StreamSupport.stream(scopes.spliterator(), false).collect(Collectors.toList()); CommunicationIdentityAccessTokenResult result = client.create( new CommunicationIdentityCreateRequest().setCreateTokenWithScopes(scopesInput)); return userWithAccessTokenResultConverter(result); } /** * Creates a new CommunicationUserIdentifier with token with response. * * @param scopes the list of scopes for the token * @param context A {@link Context} representing the request context. * @return the result with created communication user and token */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<CommunicationUserIdentifierWithTokenResult> createUserWithTokenWithResponse( Iterable<CommunicationTokenScope> scopes, Context context) { Objects.requireNonNull(scopes); context = context == null ? Context.NONE : context; final List<CommunicationTokenScope> scopesInput = StreamSupport.stream(scopes.spliterator(), false).collect(Collectors.toList()); Response<CommunicationIdentityAccessTokenResult> response = client.createWithResponseAsync( new CommunicationIdentityCreateRequest().setCreateTokenWithScopes(scopesInput), context).block(); if (response == null || response.getValue() == null) { throw logger.logExceptionAsError(new IllegalStateException("Service failed to return a response or expected value.")); } return new SimpleResponse<CommunicationUserIdentifierWithTokenResult>( response, userWithAccessTokenResultConverter(response.getValue())); } /** * Deletes a CommunicationUserIdentifier, revokes its tokens and deletes its * data. * * @param communicationUser The user to be deleted. * @return the response */ @ServiceMethod(returns = ReturnType.SINGLE) public Void deleteUser(CommunicationUserIdentifier communicationUser) { Objects.requireNonNull(communicationUser); return client.deleteAsync(communicationUser.getId()).block(); } /** * Deletes a CommunicationUserIdentifier, revokes its tokens and deletes its * data with response. * * @param communicationUser The user to be deleted. * @param context A {@link Context} representing the request context. * @return the response */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteUserWithResponse(CommunicationUserIdentifier communicationUser, Context context) { Objects.requireNonNull(communicationUser); context = context == null ? Context.NONE : context; return client.deleteWithResponseAsync(communicationUser.getId(), context).block(); } /** * Revokes all the tokens created for an identifier. * * @param communicationUser The user to be revoked token. * @return the response */ @ServiceMethod(returns = ReturnType.SINGLE) public Void revokeTokens(CommunicationUserIdentifier communicationUser) { Objects.requireNonNull(communicationUser); return client.revokeAccessTokensAsync(communicationUser.getId()).block(); } /** * Revokes all the tokens created for a user before a specific date. * * @param communicationUser The user to be revoked token. * @param context the context of the request. Can also be null or * Context.NONE. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> revokeTokensWithResponse(CommunicationUserIdentifier communicationUser, Context context) { Objects.requireNonNull(communicationUser); context = context == null ? Context.NONE : context; return client.revokeAccessTokensWithResponseAsync(communicationUser.getId(), context).block(); } /** * Generates a new token for an identity. * * @param communicationUser The user to be issued tokens. * @param scopes The scopes that the token should have. * @return the issued token. */ @ServiceMethod(returns = ReturnType.SINGLE) public AccessToken issueToken(CommunicationUserIdentifier communicationUser, Iterable<CommunicationTokenScope> scopes) { Objects.requireNonNull(communicationUser); Objects.requireNonNull(scopes); final List<CommunicationTokenScope> scopesInput = StreamSupport.stream(scopes.spliterator(), false).collect(Collectors.toList()); CommunicationIdentityAccessToken rawToken = client.issueAccessToken( communicationUser.getId(), new CommunicationIdentityAccessTokenRequest().setScopes(scopesInput)); return new AccessToken(rawToken.getToken(), rawToken.getExpiresOn()); } /** * Generates a new token for an identity. * * @param communicationUser The CommunicationUser from whom to issue a token. * @param scopes The scopes that the token should have. * @param context the context of the request. Can also be null or * Context.NONE. * @return the created CommunicationUserToken. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<AccessToken> issueTokenWithResponse(CommunicationUserIdentifier communicationUser, Iterable<CommunicationTokenScope> scopes, Context context) { Objects.requireNonNull(communicationUser); Objects.requireNonNull(scopes); context = context == null ? Context.NONE : context; final List<CommunicationTokenScope> scopesInput = StreamSupport.stream(scopes.spliterator(), false).collect(Collectors.toList()); Response<CommunicationIdentityAccessToken> response = client.issueAccessTokenWithResponseAsync( communicationUser.getId(), new CommunicationIdentityAccessTokenRequest().setScopes(scopesInput), context) .block(); if (response == null || response.getValue() == null) { throw logger.logExceptionAsError(new IllegalStateException("Service failed to return a response or expected value.")); } return new SimpleResponse<AccessToken>( response, new AccessToken(response.getValue().getToken(), response.getValue().getExpiresOn())); } private CommunicationUserIdentifierWithTokenResult userWithAccessTokenResultConverter( CommunicationIdentityAccessTokenResult identityAccessTokenResult) { CommunicationUserIdentifier user = new CommunicationUserIdentifier(identityAccessTokenResult.getIdentity().getId()); AccessToken token = new AccessToken( identityAccessTokenResult.getAccessToken().getToken(), identityAccessTokenResult.getAccessToken().getExpiresOn()); return new CommunicationUserIdentifierWithTokenResult(user, token); } }
Would it be possible to add test case for bw compatible code path?
public Params decode(InputStream inStream) throws IOException { String prefix = STRING_CODER.decode(inStream); String shardTemplate = STRING_CODER.decode(inStream); String suffix = STRING_CODER.decode(inStream); ResourceId baseFilename; if (inStream.available() > 0) { baseFilename = FileSystems.matchNewResource(prefix, BOOLEAN_CODER.decode(inStream)); } else { baseFilename = FileBasedSink.convertToFileResourceIfPossible(prefix); } return new Params() .withBaseFilename(baseFilename) .withShardTemplate(shardTemplate) .withSuffix(suffix); }
baseFilename = FileBasedSink.convertToFileResourceIfPossible(prefix);
public Params decode(InputStream inStream) throws IOException { ResourceId prefix = FileBasedSink.convertToFileResourceIfPossible(STRING_CODER.decode(inStream)); String shardTemplate = STRING_CODER.decode(inStream); String suffix = STRING_CODER.decode(inStream); return new Params() .withBaseFilename(prefix) .withShardTemplate(shardTemplate) .withSuffix(suffix); }
class ParamsCoder extends AtomicCoder<Params> { private static final ParamsCoder INSTANCE = new ParamsCoder(); private static final Coder<String> STRING_CODER = StringUtf8Coder.of(); private static final Coder<Boolean> BOOLEAN_CODER = BooleanCoder.of(); public static ParamsCoder of() { return INSTANCE; } @Override public void encode(Params value, OutputStream outStream) throws IOException { if (value == null) { throw new CoderException("cannot encode a null value"); } STRING_CODER.encode(value.baseFilename.get().toString(), outStream); STRING_CODER.encode(value.shardTemplate, outStream); STRING_CODER.encode(value.suffix, outStream); BOOLEAN_CODER.encode(value.baseFilename.get().isDirectory(), outStream); } @Override }
class ParamsCoder extends AtomicCoder<Params> { private static final ParamsCoder INSTANCE = new ParamsCoder(); private static final Coder<String> STRING_CODER = StringUtf8Coder.of(); public static ParamsCoder of() { return INSTANCE; } @Override public void encode(Params value, OutputStream outStream) throws IOException { if (value == null) { throw new CoderException("cannot encode a null value"); } STRING_CODER.encode(value.baseFilename.get().toString(), outStream); STRING_CODER.encode(value.shardTemplate, outStream); STRING_CODER.encode(value.suffix, outStream); } @Override }
After checking the implementation, I found the port is 0 in default configuration used by sqlGatewayRestEndpoint.start(), and that means server would bind a random port.
public void beforeAll(ExtensionContext context) { String address = InetAddress.getLoopbackAddress().getHostAddress(); Configuration config = getBaseConfig(getFlinkConfig(address, address, "0")); try { sqlGatewayRestEndpoint = new SqlGatewayRestEndpoint(config, serviceSupplier.get()); sqlGatewayRestEndpoint.start(); } catch (Exception e) { throw new SqlGatewayException( "Unexpected error occurred when trying to start the rest endpoint of sql gateway.", e); } InetSocketAddress serverAddress = checkNotNull(sqlGatewayRestEndpoint.getServerAddress()); targetAddress = serverAddress.getHostName(); targetPort = serverAddress.getPort(); }
targetAddress = serverAddress.getHostName();
public void beforeAll(ExtensionContext context) { String address = InetAddress.getLoopbackAddress().getHostAddress(); Configuration config = getBaseConfig(getFlinkConfig(address, address, "0")); try { sqlGatewayRestEndpoint = new SqlGatewayRestEndpoint(config, serviceSupplier.get()); sqlGatewayRestEndpoint.start(); } catch (Exception e) { throw new SqlGatewayException( "Unexpected error occurred when trying to start the rest endpoint of sql gateway.", e); } InetSocketAddress serverAddress = checkNotNull(sqlGatewayRestEndpoint.getServerAddress()); targetAddress = serverAddress.getHostName(); targetPort = serverAddress.getPort(); }
class SqlGatewayRestEndpointExtension implements BeforeAllCallback, AfterAllCallback { private final Supplier<SqlGatewayService> serviceSupplier; private SqlGatewayRestEndpoint sqlGatewayRestEndpoint; private String targetAddress; private int targetPort; public String getTargetAddress() { return targetAddress; } public int getTargetPort() { return targetPort; } public SqlGatewayRestEndpointExtension(Supplier<SqlGatewayService> serviceSupplier) { this.serviceSupplier = serviceSupplier; } @Override @Override public void afterAll(ExtensionContext context) { try { sqlGatewayRestEndpoint.stop(); } catch (Exception e) { throw new SqlGatewayException( "Unexpected error occurred when trying to stop the rest endpoint of sql gateway.", e); } } }
class SqlGatewayRestEndpointExtension implements BeforeAllCallback, AfterAllCallback { private final Supplier<SqlGatewayService> serviceSupplier; private SqlGatewayRestEndpoint sqlGatewayRestEndpoint; private String targetAddress; private int targetPort; public String getTargetAddress() { return targetAddress; } public int getTargetPort() { return targetPort; } public SqlGatewayRestEndpointExtension(Supplier<SqlGatewayService> serviceSupplier) { this.serviceSupplier = serviceSupplier; } @Override @Override public void afterAll(ExtensionContext context) { try { sqlGatewayRestEndpoint.stop(); } catch (Exception e) { throw new SqlGatewayException( "Unexpected error occurred when trying to stop the rest endpoint of sql gateway.", e); } } }
```suggestion : String.format("/file/%s/%s", account, shareName); ```
private static String getCanonicalName(String account, String shareName, String filePath) { return ImplUtils.isNullOrEmpty(filePath) ? String.format("/file/%s/%s/%s",account, shareName, filePath.replace("\\", "/")) : String.format("/file/%s/%s",account, shareName); }
: String.format("/file/%s/%s",account, shareName);
private static String getCanonicalName(String account, String shareName, String filePath) { return !ImplUtils.isNullOrEmpty(filePath) ? String.format("/file/%s/%s/%s", account, shareName, filePath.replace("\\", "/")) : String.format("/file/%s/%s", account, shareName); }
class level JavaDocs for code snippets. * * @param storageSharedKeyCredentials A {@link StorageSharedKeyCredential}
class level JavaDocs for code snippets.</p> * * @param storageSharedKeyCredentials A {@link StorageSharedKeyCredential}
We don't need to define this list here since it has 2 usages, and they are in 2 different execution paths.
private List<String> getPossibleTypes(TypeSymbol typeSymbol, CodeActionContext context) { typeSymbol = getRawType(typeSymbol, context); Set<String> possibleTypes = new HashSet<>(); List<String> finalPossibleTypeSet = new ArrayList<>(); List<TypeSymbol> errorTypes = new ArrayList<>(); if (typeSymbol.typeKind() == TypeDescKind.UNION) { ((UnionTypeSymbol) typeSymbol) .memberTypeDescriptors() .stream() .map(memberTypeSymbol -> getRawType(memberTypeSymbol, context)) .forEach(memberTypeSymbol -> { if (memberTypeSymbol.typeKind() == TypeDescKind.UNION) { possibleTypes.addAll( ((UnionTypeSymbol) memberTypeSymbol) .memberTypeDescriptors() .stream() .map(memberTSymbol -> getRawType(memberTSymbol, context)) .map(symbol -> getTypeName(symbol, context)) .collect(Collectors.toList())); } else { if (memberTypeSymbol.typeKind() == TypeDescKind.ERROR || CommonUtil.getRawType(memberTypeSymbol).typeKind() == TypeDescKind.ERROR) { errorTypes.add(memberTypeSymbol); return; } possibleTypes.add(getTypeName(memberTypeSymbol, context)); } }); } else { String type = getTypeName(typeSymbol, context); if (!type.equals("any")) { return Collections.singletonList(type); } } if (!errorTypes.isEmpty()) { String errorTypeStr = errorTypes.stream() .map(type -> getTypeName(type, context)) .collect(Collectors.joining("|")); finalPossibleTypeSet.addAll(possibleTypes.stream() .filter(type -> !type.equals("any")) .map(type -> type + "|" + errorTypeStr).collect(Collectors.toSet())); } else { finalPossibleTypeSet.addAll(possibleTypes.stream() .filter(type -> !type.equals("any")) .collect(Collectors.toSet())); } return finalPossibleTypeSet; }
List<String> finalPossibleTypeSet = new ArrayList<>();
private List<String> getPossibleTypes(TypeSymbol typeSymbol, CodeActionContext context) { typeSymbol = getRawType(typeSymbol, context); Set<String> possibleTypes = new HashSet<>(); List<TypeSymbol> errorTypes = new ArrayList<>(); if (typeSymbol.typeKind() == TypeDescKind.UNION) { ((UnionTypeSymbol) typeSymbol) .memberTypeDescriptors() .stream() .map(memberTypeSymbol -> getRawType(memberTypeSymbol, context)) .forEach(memberTypeSymbol -> { if (memberTypeSymbol.typeKind() == TypeDescKind.UNION) { possibleTypes.addAll( ((UnionTypeSymbol) memberTypeSymbol) .memberTypeDescriptors() .stream() .map(memberTSymbol -> getRawType(memberTSymbol, context)) .map(symbol -> getTypeName(symbol, context)) .collect(Collectors.toList())); } else if (memberTypeSymbol.typeKind() == TypeDescKind.ERROR || CommonUtil.getRawType(memberTypeSymbol).typeKind() == TypeDescKind.ERROR) { errorTypes.add(memberTypeSymbol); } else { possibleTypes.add(getTypeName(memberTypeSymbol, context)); } }); } else { String type = getTypeName(typeSymbol, context); if (!"any".equals(type)) { return Collections.singletonList(type); } } if (!errorTypes.isEmpty()) { String errorTypeStr = errorTypes.stream() .map(type -> getTypeName(type, context)) .collect(Collectors.joining("|")); return possibleTypes.stream() .filter(type -> !"any".equals(type)) .map(type -> type + "|" + errorTypeStr) .collect(Collectors.toList()); } return possibleTypes.stream() .filter(type -> !"any".equals(type)) .collect(Collectors.toList()); }
class CreateVariableWithTypeCodeAction extends CreateVariableCodeAction { private static final String NAME = "Create variable with type"; @Override public boolean validate(Diagnostic diagnostic, DiagBasedPositionDetails positionDetails, CodeActionContext context) { return diagnostic.diagnosticInfo().code().equals("BCE3934") && context.currentSemanticModel().isPresent() && context.nodeAtRange().kind() == SyntaxKind.CLIENT_RESOURCE_ACCESS_ACTION || context.nodeAtRange().kind() == SyntaxKind.REMOTE_METHOD_CALL_ACTION || context.nodeAtRange().parent().kind() == SyntaxKind.CLIENT_RESOURCE_ACCESS_ACTION || context.nodeAtRange().parent().kind() == SyntaxKind.REMOTE_METHOD_CALL_ACTION || context.nodeAtRange().parent().parent().kind() == SyntaxKind.CLIENT_RESOURCE_ACCESS_ACTION || context.nodeAtRange().parent().parent().kind() == SyntaxKind.REMOTE_METHOD_CALL_ACTION && CodeActionNodeValidator.validate(context.nodeAtRange()); } @Override public List<CodeAction> getCodeActions(Diagnostic diagnostic, DiagBasedPositionDetails positionDetails, CodeActionContext context) { Optional<TypeSymbol> typeSymbol = getReturnTypeDescriptorOfMethod(context); if (typeSymbol.isEmpty() || typeSymbol.get().typeKind() == TypeDescKind.ANY) { return Collections.emptyList(); } String uri = context.fileUri(); Range range = PositionUtil.toRange(diagnostic.location().lineRange()); CreateVariableCodeAction.CreateVariableOut createVarTextEdits = getCreateVariableTextEdits(range, positionDetails, typeSymbol.get(), context, new ImportsAcceptor(context)); List<String> types = createVarTextEdits.types; List<CodeAction> actions = new ArrayList<>(); for (int i = 0; i < types.size(); i++) { String commandTitle = CommandConstants.CREATE_VARIABLE_TITLE; List<TextEdit> edits = new ArrayList<>(); TextEdit variableEdit = createVarTextEdits.edits.get(i); edits.add(variableEdit); edits.addAll(createVarTextEdits.imports); String type = types.get(i); if (createVarTextEdits.types.size() > 1) { boolean isTuple = type.startsWith("[") && type.endsWith("]") && !type.endsWith("[]"); String typeLabel = isTuple && type.length() > 10 ? "Tuple" : type; commandTitle = String.format(CommandConstants.CREATE_VARIABLE_TITLE + " with '%s'", typeLabel); } actions.add(CodeActionUtil.createCodeAction(commandTitle, edits, uri, CodeActionKind.QuickFix)); } return actions; } @Override protected CreateVariableCodeAction.CreateVariableOut getCreateVariableTextEdits(Range range, DiagBasedPositionDetails posDetails, TypeSymbol typeDescriptor, CodeActionContext context, ImportsAcceptor importsAcceptor) { Symbol matchedSymbol = posDetails.matchedSymbol(); Position position = PositionUtil.toPosition(posDetails.matchedNode().lineRange().startLine()); Set<String> allNameEntries = context.visibleSymbols(position).stream() .filter(s -> s.getName().isPresent()) .map(s -> s.getName().get()) .collect(Collectors.toSet()); String name = NameUtil.generateVariableName(matchedSymbol, typeDescriptor, allNameEntries); List<TextEdit> edits = new ArrayList<>(); List<Integer> renamePositions = new ArrayList<>(); List<String> types = getPossibleTypes(typeDescriptor, context); Position pos = range.getStart(); for (String type : types) { Position insertPos = new Position(pos.getLine(), pos.getCharacter()); String edit = type + " " + name + " = "; edits.add(new TextEdit(new Range(insertPos, insertPos), edit)); } return new CreateVariableCodeAction.CreateVariableOut(name, types, edits, importsAcceptor.getNewImportTextEdits(), renamePositions); } /** * Get all possible return type combinations for the type infer required method. * Here if the type symbol is a union we will check it contains an error member. * Other members of the union will then combine with the error type and produce the output. * In here we will `any` type. * * @param typeSymbol Return type descriptor of the method. * @param context CodeActionContext * @return {@link List<String>} */ private String getTypeName(TypeSymbol symbol, CodeActionContext context) { Optional<ModuleSymbol> module = symbol.getModule(); if (module.isPresent()) { String fqPrefix = ""; if (!(ProjectConstants.ANON_ORG.equals(module.get().id().orgName()))) { ModuleID id = module.get().id(); fqPrefix = id.orgName() + "/" + id.moduleName() + ":" + id.version() + ":"; } String moduleQualifiedName = fqPrefix + (symbol.getName().isPresent() ? symbol.getName().get() : ""); return FunctionGenerator.processModuleIDsInText(new ImportsAcceptor(context), moduleQualifiedName, context); } return symbol.signature(); } private TypeSymbol getRawType(TypeSymbol typeSymbol, CodeActionContext context) { TypeSymbol rawType = CommonUtil.getRawType(typeSymbol); Types types = context.currentSemanticModel().get().types(); TypeBuilder builder = types.builder(); RecordTypeSymbol recordTypeSymbol = builder.RECORD_TYPE.withRestField(types.ANY).build(); if (rawType.subtypeOf(types.ERROR) || rawType.subtypeOf(recordTypeSymbol)) { return typeSymbol; } return rawType; } private Optional<TypeSymbol> getReturnTypeDescriptorOfMethod(CodeActionContext context) { ActionNodeFinder actionNodeFinder = new ActionNodeFinder(); context.nodeAtRange().accept(actionNodeFinder); NonTerminalNode actionNode = actionNodeFinder.actionNode; return context.currentSemanticModel() .flatMap(model -> model.symbol(actionNode)) .filter(symbol -> symbol.kind() == SymbolKind.METHOD || symbol.kind() == SymbolKind.RESOURCE_METHOD) .flatMap(symbol -> symbol.kind() == SymbolKind.METHOD ? ((MethodSymbol) symbol).typeDescriptor().returnTypeDescriptor() : ((ResourceMethodSymbol) symbol).typeDescriptor().returnTypeDescriptor()); } @Override public String getName() { return NAME; } /** * A visitor to find RemoteMethodCallNodes and ClientResourceAccessActionNodes. */ static class ActionNodeFinder extends NodeVisitor { private NonTerminalNode actionNode = null; @Override public void visit(SimpleNameReferenceNode simpleNameReferenceNode) { simpleNameReferenceNode.parent().accept(this); } @Override public void visit(NamedArgumentNode namedArgumentNode) { namedArgumentNode.parent().accept(this); } @Override public void visit(RemoteMethodCallActionNode remoteMethodCallActionNode) { this.actionNode = remoteMethodCallActionNode; } @Override public void visit(ClientResourceAccessActionNode clientResourceAccessActionNode) { this.actionNode = clientResourceAccessActionNode; } } }
class CreateVariableWithTypeCodeAction extends CreateVariableCodeAction { private static final String NAME = "Create variable with type"; @Override public boolean validate(Diagnostic diagnostic, DiagBasedPositionDetails positionDetails, CodeActionContext context) { return diagnostic.diagnosticInfo().code().equals("BCE4046") && context.currentSemanticModel().isPresent() && isInRemoteMethodCallOrResourceAccess(context) && CodeActionNodeValidator.validate(context.nodeAtRange()); } @Override public List<CodeAction> getCodeActions(Diagnostic diagnostic, DiagBasedPositionDetails positionDetails, CodeActionContext context) { Optional<TypeSymbol> typeSymbol = getReturnTypeDescriptorOfMethod(context); if (typeSymbol.isEmpty() || typeSymbol.get().typeKind() == TypeDescKind.ANY) { return Collections.emptyList(); } String uri = context.fileUri(); Range range = PositionUtil.toRange(diagnostic.location().lineRange()); CreateVariableCodeAction.CreateVariableOut createVarTextEdits = getCreateVariableTextEdits(range, positionDetails, typeSymbol.get(), context, new ImportsAcceptor(context)); List<String> types = createVarTextEdits.types; List<CodeAction> actions = new ArrayList<>(); for (int i = 0; i < types.size(); i++) { String commandTitle = CommandConstants.CREATE_VARIABLE_TITLE; List<TextEdit> edits = new ArrayList<>(); TextEdit variableEdit = createVarTextEdits.edits.get(i); edits.add(variableEdit); edits.addAll(createVarTextEdits.imports); String type = types.get(i); if (createVarTextEdits.types.size() > 1) { boolean isTuple = type.startsWith("[") && type.endsWith("]") && !type.endsWith("[]"); String typeLabel = isTuple && type.length() > 10 ? "Tuple" : type; commandTitle = String.format(CommandConstants.CREATE_VARIABLE_TITLE + " with '%s'", typeLabel); } actions.add(CodeActionUtil.createCodeAction(commandTitle, edits, uri, CodeActionKind.QuickFix)); } return actions; } @Override protected CreateVariableCodeAction.CreateVariableOut getCreateVariableTextEdits(Range range, DiagBasedPositionDetails posDetails, TypeSymbol typeDescriptor, CodeActionContext context, ImportsAcceptor importsAcceptor) { Symbol matchedSymbol = posDetails.matchedSymbol(); Position position = PositionUtil.toPosition(posDetails.matchedNode().lineRange().startLine()); Set<String> allNameEntries = context.visibleSymbols(position).stream() .filter(s -> s.getName().isPresent()) .map(s -> s.getName().get()) .collect(Collectors.toSet()); String name = NameUtil.generateVariableName(matchedSymbol, typeDescriptor, allNameEntries); List<TextEdit> edits = new ArrayList<>(); List<Integer> renamePositions = new ArrayList<>(); List<String> types = getPossibleTypes(typeDescriptor, context); Position pos = range.getStart(); for (String type : types) { Position insertPos = new Position(pos.getLine(), pos.getCharacter()); String edit = type + " " + name + " = "; edits.add(new TextEdit(new Range(insertPos, insertPos), edit)); } return new CreateVariableCodeAction.CreateVariableOut(name, types, edits, importsAcceptor.getNewImportTextEdits(), renamePositions); } /** * Get all possible return type combinations for a method with an inferred return type. * Here if the type symbol is a union we will check whether it contains an error member. * Other members of the union will be combined with the error type and returned. * If union has `any` type member, `any` type member will be discarded when combining with the error type. * * @param typeSymbol Return type descriptor of the method. * @param context CodeActionContext * @return {@link List<String>} */ private Boolean isInRemoteMethodCallOrResourceAccess(CodeActionContext context) { Node evalNode = context.nodeAtRange(); int count = 0; while (evalNode.kind() != SyntaxKind.CLIENT_RESOURCE_ACCESS_ACTION && evalNode.kind() != SyntaxKind.REMOTE_METHOD_CALL_ACTION) { count++; if (count == 3) { return false; } evalNode = evalNode.parent(); } return true; } private String getTypeName(TypeSymbol symbol, CodeActionContext context) { Optional<ModuleSymbol> module = symbol.getModule(); if (module.isPresent()) { String fqPrefix = ""; if (!(ProjectConstants.ANON_ORG.equals(module.get().id().orgName()))) { ModuleID id = module.get().id(); fqPrefix = id.orgName() + "/" + id.moduleName() + ":" + id.version() + ":"; } String moduleQualifiedName = fqPrefix + (symbol.getName().isPresent() ? symbol.getName().get() : ""); return FunctionGenerator.processModuleIDsInText(new ImportsAcceptor(context), moduleQualifiedName, context); } return symbol.signature(); } private TypeSymbol getRawType(TypeSymbol typeSymbol, CodeActionContext context) { TypeSymbol rawType = CommonUtil.getRawType(typeSymbol); Types types = context.currentSemanticModel().get().types(); TypeBuilder builder = types.builder(); RecordTypeSymbol recordTypeSymbol = builder.RECORD_TYPE.withRestField(types.ANY).build(); if (rawType.subtypeOf(types.ERROR) || rawType.subtypeOf(recordTypeSymbol)) { return typeSymbol; } return rawType; } private Optional<TypeSymbol> getReturnTypeDescriptorOfMethod(CodeActionContext context) { ActionNodeFinder actionNodeFinder = new ActionNodeFinder(); context.nodeAtRange().accept(actionNodeFinder); NonTerminalNode actionNode = actionNodeFinder.actionNode; return context.currentSemanticModel() .flatMap(model -> model.symbol(actionNode)) .filter(symbol -> symbol.kind() == SymbolKind.METHOD || symbol.kind() == SymbolKind.RESOURCE_METHOD) .flatMap(symbol -> symbol.kind() == SymbolKind.METHOD ? ((MethodSymbol) symbol).typeDescriptor().returnTypeDescriptor() : ((ResourceMethodSymbol) symbol).typeDescriptor().returnTypeDescriptor()); } @Override public String getName() { return NAME; } /** * A visitor to find RemoteMethodCallNodes and ClientResourceAccessActionNodes. */ static class ActionNodeFinder extends NodeVisitor { private NonTerminalNode actionNode = null; @Override public void visit(SimpleNameReferenceNode simpleNameReferenceNode) { simpleNameReferenceNode.parent().accept(this); } @Override public void visit(NamedArgumentNode namedArgumentNode) { namedArgumentNode.parent().accept(this); } @Override public void visit(RemoteMethodCallActionNode remoteMethodCallActionNode) { this.actionNode = remoteMethodCallActionNode; } @Override public void visit(ClientResourceAccessActionNode clientResourceAccessActionNode) { this.actionNode = clientResourceAccessActionNode; } } }
I think for clarity I'd make this just return the UUID without the `-` and have the caller handle doing the substring. Seeing `32 - length` is fairly confusing and likely could be error prone as you pass `15` for the length but you really get `17`.
private static String getRandomId(int length) { return CoreUtils.randomUuid().toString() .replace("-", "") .substring(32 - length); }
.substring(32 - length);
private static String getRandomId(int length) { return CoreUtils.randomUuid().toString() .replace("-", "") .substring(32 - length); }
class LoggingSpan { public static final LoggingSpan NOOP = new LoggingSpan(); private static final ClientLogger LOGGER = new ClientLogger(LoggingSpan.class); private final String traceId; private final String spanId; private final LoggingEventBuilder log; private final boolean enabled; private LoggingSpan() { this.traceId = null; this.spanId = null; this.log = null; this.enabled = false; } public String getTraceId() { return enabled ? traceId : "00000000000000000000000000000000"; } public String getSpanId() { return enabled ? spanId : "0000000000000000"; } LoggingSpan(String name, SpanKind kind, String traceId, String parentSpanId) { this.traceId = traceId != null ? traceId : getRandomId(32); this.spanId = getRandomId(16); this.log = LOGGER.atInfo() .addKeyValue("traceId", this.traceId) .addKeyValue("spanId", spanId) .addKeyValue("parentSpanId", parentSpanId) .addKeyValue("name", name) .addKeyValue("kind", kind.name()); log.log("span created"); this.enabled = true; } LoggingSpan(String name, SpanKind kind, LoggingSpan parent) { this(name, kind, parent.enabled ? parent.traceId : null, parent.getSpanId()); } public LoggingSpan addKeyValue(String key, Object value) { if (enabled) { log.addKeyValue(key, value); } return this; } public void end(Throwable throwable) { if (enabled) { if (throwable != null) { log.log("span ended", throwable); } else { log.log("span ended"); } } } /** * Generates random id with given length up to 32 chars. */ }
class LoggingSpan { public static final LoggingSpan NOOP = new LoggingSpan(); private static final ClientLogger LOGGER = new ClientLogger(LoggingSpan.class); private final String traceId; private final String spanId; private final LoggingEventBuilder log; private final boolean enabled; private LoggingSpan() { this.traceId = null; this.spanId = null; this.log = null; this.enabled = false; } public String getTraceId() { return enabled ? traceId : "00000000000000000000000000000000"; } public String getSpanId() { return enabled ? spanId : "0000000000000000"; } LoggingSpan(String name, SpanKind kind, String traceId, String parentSpanId) { this.traceId = traceId != null ? traceId : getRandomId(32); this.spanId = getRandomId(16); this.log = LOGGER.atInfo() .addKeyValue("traceId", this.traceId) .addKeyValue("spanId", spanId) .addKeyValue("parentSpanId", parentSpanId) .addKeyValue("name", name) .addKeyValue("kind", kind.name()); log.log("span created"); this.enabled = true; } LoggingSpan(String name, SpanKind kind, LoggingSpan parent) { this(name, kind, parent.enabled ? parent.traceId : null, parent.getSpanId()); } public LoggingSpan addKeyValue(String key, Object value) { if (enabled) { log.addKeyValue(key, value); } return this; } public void end(Throwable throwable) { if (enabled) { if (throwable != null) { log.log("span ended", throwable); } else { log.log("span ended"); } } } /** * Generates random id with given length up to 32 chars. */ }
```suggestion this.deliveryGuarantee = checkNotNull(deliveryGuarantee, "deliveryGuarantee"); ``` or leave that param out as discussed.
KafkaSinkBuilder<IN> setDeliverGuarantee(DeliveryGuarantee deliveryGuarantee) { this.deliveryGuarantee = checkNotNull(deliveryGuarantee, "semantic"); return this; }
this.deliveryGuarantee = checkNotNull(deliveryGuarantee, "semantic");
KafkaSinkBuilder<IN> setDeliverGuarantee(DeliveryGuarantee deliveryGuarantee) { this.deliveryGuarantee = checkNotNull(deliveryGuarantee, "deliveryGuarantee"); return this; }
class KafkaSinkBuilder<IN> { private static final Logger LOG = LoggerFactory.getLogger(KafkaSinkBuilder.class); private static final Duration DEFAULT_KAFKA_TRANSACTION_TIMEOUT = Duration.ofHours(1); private DeliveryGuarantee deliveryGuarantee = DeliveryGuarantee.NONE; private String transactionalIdPrefix = "kafka-sink"; private Properties kafkaProducerConfig; private KafkaRecordSerializationSchema<IN> recordSerializer; private String bootstrapServers; /** * Sets the wanted the {@link DeliveryGuarantee}. The default delivery guarantee is {@link * * * @param deliveryGuarantee * @return {@link KafkaSinkBuilder} */ /** * Sets the configuration which used to instantiate all used {@link * org.apache.kafka.clients.producer.KafkaProducer}. * * @param kafkaProducerConfig * @return {@link KafkaSinkBuilder} */ public KafkaSinkBuilder<IN> setKafkaProducerConfig(Properties kafkaProducerConfig) { this.kafkaProducerConfig = checkNotNull(kafkaProducerConfig, "kafkaProducerConfig"); if (!kafkaProducerConfig.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) { kafkaProducerConfig.put( ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); } else { LOG.warn( "Overwriting the '{}' is not recommended", ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG); } if (!kafkaProducerConfig.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) { kafkaProducerConfig.put( ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); } else { LOG.warn( "Overwriting the '{}' is not recommended", ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG); } if (!kafkaProducerConfig.containsKey(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG)) { final long timeout = DEFAULT_KAFKA_TRANSACTION_TIMEOUT.toMillis(); checkState( timeout < Integer.MAX_VALUE && timeout > 0, "timeout does not fit into 32 bit integer"); kafkaProducerConfig.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, (int) timeout); LOG.warn( "Property [{}] not specified. Setting it to {}", ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, DEFAULT_KAFKA_TRANSACTION_TIMEOUT); } return this; } /** * Sets the {@link KafkaRecordSerializationSchema} that transforms incoming records to {@link * org.apache.kafka.clients.producer.ProducerRecord}s. * * @param recordSerializer * @return {@link KafkaSinkBuilder} */ public KafkaSinkBuilder<IN> setRecordSerializer( KafkaRecordSerializationSchema<IN> recordSerializer) { this.recordSerializer = checkNotNull(recordSerializer, "recordSerializer"); ClosureCleaner.clean( this.recordSerializer, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); return this; } /** * Sets the prefix for all created transactionalIds if {@link DeliveryGuarantee * configured. * * <p>It is mandatory to always set this value with {@link DeliveryGuarantee * prevent corrupted transactions if multiple jobs using the KafkaSink run against the same * Kafka Cluster. The default prefix is {@link * * @param transactionalIdPrefix * @return {@link KafkaSinkBuilder} */ public KafkaSinkBuilder<IN> setTransactionalIdPrefix(String transactionalIdPrefix) { this.transactionalIdPrefix = checkNotNull(transactionalIdPrefix, "transactionalIdPrefix"); return this; } /** * Sets the Kafka bootstrap servers. * * @param bootstrapServers a comma separated list of valid URIs to reach the Kafka broker * @return {@link KafkaSinkBuilder} */ public KafkaSinkBuilder<IN> setBootstrapServers(String bootstrapServers) { this.bootstrapServers = checkNotNull(bootstrapServers); return this; } /** * Constructs the {@link KafkaSink} with the configured properties. * * @return {@link KafkaSink} */ public KafkaSink<IN> build() { checkNotNull(kafkaProducerConfig, "kafkaProducerConfig"); checkNotNull(bootstrapServers); if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE) { checkNotNull( transactionalIdPrefix, "EXACTLY_ONCE delivery guarantee requires a transactionIdPrefix to be set to provide unique transaction names across multiple KafkaSinks writing to the same Kafka cluster."); } kafkaProducerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); return new KafkaSink<>( deliveryGuarantee, kafkaProducerConfig, transactionalIdPrefix, checkNotNull(recordSerializer, "recordSerializer")); } }
class KafkaSinkBuilder<IN> { private static final Logger LOG = LoggerFactory.getLogger(KafkaSinkBuilder.class); private static final Duration DEFAULT_KAFKA_TRANSACTION_TIMEOUT = Duration.ofHours(1); private static final int MAXIMUM_PREFIX_BYTES = 64000; private DeliveryGuarantee deliveryGuarantee = DeliveryGuarantee.NONE; private String transactionalIdPrefix = "kafka-sink"; private Properties kafkaProducerConfig; private KafkaRecordSerializationSchema<IN> recordSerializer; private String bootstrapServers; /** * Sets the wanted the {@link DeliveryGuarantee}. The default delivery guarantee is {@link * * * @param deliveryGuarantee * @return {@link KafkaSinkBuilder} */ /** * Sets the configuration which used to instantiate all used {@link * org.apache.kafka.clients.producer.KafkaProducer}. * * @param kafkaProducerConfig * @return {@link KafkaSinkBuilder} */ public KafkaSinkBuilder<IN> setKafkaProducerConfig(Properties kafkaProducerConfig) { this.kafkaProducerConfig = checkNotNull(kafkaProducerConfig, "kafkaProducerConfig"); if (!kafkaProducerConfig.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) { kafkaProducerConfig.put( ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); } else { LOG.warn( "Overwriting the '{}' is not recommended", ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG); } if (!kafkaProducerConfig.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) { kafkaProducerConfig.put( ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); } else { LOG.warn( "Overwriting the '{}' is not recommended", ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG); } if (!kafkaProducerConfig.containsKey(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG)) { final long timeout = DEFAULT_KAFKA_TRANSACTION_TIMEOUT.toMillis(); checkState( timeout < Integer.MAX_VALUE && timeout > 0, "timeout does not fit into 32 bit integer"); kafkaProducerConfig.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, (int) timeout); LOG.warn( "Property [{}] not specified. Setting it to {}", ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, DEFAULT_KAFKA_TRANSACTION_TIMEOUT); } return this; } /** * Sets the {@link KafkaRecordSerializationSchema} that transforms incoming records to {@link * org.apache.kafka.clients.producer.ProducerRecord}s. * * @param recordSerializer * @return {@link KafkaSinkBuilder} */ public KafkaSinkBuilder<IN> setRecordSerializer( KafkaRecordSerializationSchema<IN> recordSerializer) { this.recordSerializer = checkNotNull(recordSerializer, "recordSerializer"); ClosureCleaner.clean( this.recordSerializer, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); return this; } /** * Sets the prefix for all created transactionalIds if {@link DeliveryGuarantee * configured. * * <p>It is mandatory to always set this value with {@link DeliveryGuarantee * prevent corrupted transactions if multiple jobs using the KafkaSink run against the same * Kafka Cluster. The default prefix is {@link * * <p>The size of the prefix is capped by {@link * * <p>It is important to keep the prefix stable across application restarts. If the prefix * changes it might happen that lingering transactions are not correctly aborted and newly * written messages are not immediately consumable until the transactions timeout. * * @param transactionalIdPrefix * @return {@link KafkaSinkBuilder} */ public KafkaSinkBuilder<IN> setTransactionalIdPrefix(String transactionalIdPrefix) { this.transactionalIdPrefix = checkNotNull(transactionalIdPrefix, "transactionalIdPrefix"); checkState( transactionalIdPrefix.getBytes(StandardCharsets.UTF_8).length <= MAXIMUM_PREFIX_BYTES, "The configured prefix is too long and the resulting transactionalId might exceed Kafka's transactionalIds size."); return this; } /** * Sets the Kafka bootstrap servers. * * @param bootstrapServers a comma separated list of valid URIs to reach the Kafka broker * @return {@link KafkaSinkBuilder} */ public KafkaSinkBuilder<IN> setBootstrapServers(String bootstrapServers) { this.bootstrapServers = checkNotNull(bootstrapServers); return this; } /** * Constructs the {@link KafkaSink} with the configured properties. * * @return {@link KafkaSink} */ public KafkaSink<IN> build() { checkNotNull(kafkaProducerConfig, "kafkaProducerConfig"); checkNotNull(bootstrapServers); if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE) { checkState( transactionalIdPrefix != null, "EXACTLY_ONCE delivery guarantee requires a transactionIdPrefix to be set to provide unique transaction names across multiple KafkaSinks writing to the same Kafka cluster."); } kafkaProducerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); return new KafkaSink<>( deliveryGuarantee, kafkaProducerConfig, transactionalIdPrefix, checkNotNull(recordSerializer, "recordSerializer")); } }
I think it would be helpful to print the expected signature in case the user passes wrong arguments?
public List<Signature> getExpectedSignatures(FunctionDefinition definition) { return Collections.singletonList(Signature.of(Signature.Argument.of("*"))); }
return Collections.singletonList(Signature.of(Signature.Argument.of("*")));
public List<Signature> getExpectedSignatures(FunctionDefinition definition) { return Collections.singletonList(Signature.of(Signature.Argument.of("*"))); }
class HiveUDFInputStrategy implements InputTypeStrategy { @Override public ArgumentCount getArgumentCount() { return ConstantArgumentCount.any(); } @Override public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) { setArguments(callContext); try { inferReturnType(); } catch (UDFArgumentException e) { if (throwOnFailure) { throw new ValidationException( String.format("Cannot find a suitable Hive function from %s for the input arguments", hiveFunctionWrapper.getClassName()), e); } else { return Optional.empty(); } } return Optional.of(callContext.getArgumentDataTypes()); } @Override }
class HiveUDFInputStrategy implements InputTypeStrategy { @Override public ArgumentCount getArgumentCount() { return ConstantArgumentCount.any(); } @Override public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) { setArguments(callContext); try { inferReturnType(); } catch (UDFArgumentException e) { if (throwOnFailure) { throw new ValidationException( String.format("Cannot find a suitable Hive function from %s for the input arguments", hiveFunctionWrapper.getClassName()), e); } else { return Optional.empty(); } } return Optional.of(callContext.getArgumentDataTypes()); } @Override }
nit: this comment basically repeats what the code does, I think it's unnecessary
public void testExpiredCheckpointExceedsTolerableFailureNumber() { ExecutionVertex vertex1 = mockExecutionVertex(new ExecutionAttemptID()); ExecutionVertex vertex2 = mockExecutionVertex(new ExecutionAttemptID()); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); CheckpointCoordinator coord = getCheckpointCoordinator(new JobID(), vertex1, vertex2, checkpointFailureManager); try { final CompletableFuture<CompletedCheckpoint> checkPointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkPointFuture.isCompletedExceptionally()); coord.abortPendingCheckpoints(new CheckpointException(CHECKPOINT_EXPIRED)); fail("Test failed."); } catch (Exception e) { assertTrue(e instanceof RuntimeException); assertEquals(errorMsg, e.getMessage()); } finally { try { coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } }
public void testExpiredCheckpointExceedsTolerableFailureNumber() throws Exception { ExecutionVertex vertex1 = mockExecutionVertex(new ExecutionAttemptID()); ExecutionVertex vertex2 = mockExecutionVertex(new ExecutionAttemptID()); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); CheckpointCoordinator coord = getCheckpointCoordinator(new JobID(), vertex1, vertex2, checkpointFailureManager); try { coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); coord.abortPendingCheckpoints(new CheckpointException(CHECKPOINT_EXPIRED)); fail("Test failed."); } catch (Exception e) { assertTrue(e instanceof RuntimeException); assertEquals(errorMsg, e.getMessage()); } finally { coord.shutdown(JobStatus.FINISHED); } }
class CheckpointCoordinatorTest extends TestLogger { private static final String TASK_MANAGER_LOCATION_INFO = "Unknown location"; private ManuallyTriggeredScheduledExecutor manuallyTriggeredScheduledExecutor; @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); @Before public void setUp() throws Exception { manuallyTriggeredScheduledExecutor = new ManuallyTriggeredScheduledExecutor(); } @Test public void testCheckpointAbortsIfTriggerTasksAreNotExecuted() { try { CheckpointCoordinator coord = getCheckpointCoordinator(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCheckpointAbortsIfTriggerTasksAreFinished() { try { CheckpointCoordinator coord = getCheckpointCoordinator(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCheckpointAbortsIfAckTasksAreNotExecuted() { try { CheckpointCoordinator coord = getCheckpointCoordinator(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testTriggerAndDeclineCheckpointThenFailureManagerThrowsException() { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2, checkpointFailureManager); try { final CompletableFuture<CompletedCheckpoint> checkPointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkPointFuture.isCompletedExceptionally()); long checkpointId = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = coord.getPendingCheckpoints().get(checkpointId); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId), TASK_MANAGER_LOCATION_INFO); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); fail("Test failed."); } catch (Exception e) { assertTrue(e instanceof RuntimeException); assertEquals(errorMsg, e.getMessage()); } finally { try { coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } } @Test /** * This test triggers a checkpoint and then sends a decline checkpoint message from * one of the tasks. The expected behaviour is that said checkpoint is discarded and a new * checkpoint is triggered. */ @Test public void testTriggerAndDeclineCheckpointSimple() { try { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); long checkpointId = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = coord.getPendingCheckpoints().get(checkpointId); assertNotNull(checkpoint); assertEquals(checkpointId, checkpoint.getCheckpointId()); assertEquals(jid, checkpoint.getJobId()); assertEquals(2, checkpoint.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint.getOperatorStates().size()); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(vertex1.getCurrentExecutionAttempt()).triggerCheckpoint(checkpointId, checkpoint.getCheckpointTimestamp(), CheckpointOptions.forCheckpointWithDefaultLocation()); verify(vertex2.getCurrentExecutionAttempt()).triggerCheckpoint(checkpointId, checkpoint.getCheckpointTimestamp(), CheckpointOptions.forCheckpointWithDefaultLocation()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId), "Unknown location"); assertEquals(1, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(1, checkpoint.getNumberOfNonAcknowledgedTasks()); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId), "Unknown location"); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDiscarded()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID2, checkpointId), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDiscarded()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * This test triggers two checkpoints and then sends a decline message from one of the tasks * for the first checkpoint. This should discard the first checkpoint while not triggering * a new checkpoint because a later checkpoint is already in progress. */ @Test public void testTriggerAndDeclineCheckpointComplex() { try { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture1.isCompletedExceptionally()); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture2.isCompletedExceptionally()); assertEquals(2, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(2, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); Iterator<Map.Entry<Long, PendingCheckpoint>> it = coord.getPendingCheckpoints().entrySet().iterator(); long checkpoint1Id = it.next().getKey(); long checkpoint2Id = it.next().getKey(); PendingCheckpoint checkpoint1 = coord.getPendingCheckpoints().get(checkpoint1Id); PendingCheckpoint checkpoint2 = coord.getPendingCheckpoints().get(checkpoint2Id); assertNotNull(checkpoint1); assertEquals(checkpoint1Id, checkpoint1.getCheckpointId()); assertEquals(jid, checkpoint1.getJobId()); assertEquals(2, checkpoint1.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint1.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint1.getOperatorStates().size()); assertFalse(checkpoint1.isDiscarded()); assertFalse(checkpoint1.areTasksFullyAcknowledged()); assertNotNull(checkpoint2); assertEquals(checkpoint2Id, checkpoint2.getCheckpointId()); assertEquals(jid, checkpoint2.getJobId()); assertEquals(2, checkpoint2.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint2.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint2.getOperatorStates().size()); assertFalse(checkpoint2.isDiscarded()); assertFalse(checkpoint2.areTasksFullyAcknowledged()); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpoint1Id), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpoint1Id), any(Long.class), any(CheckpointOptions.class)); } { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpoint2Id), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpoint2Id), any(Long.class), any(CheckpointOptions.class)); } coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpoint1Id), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint1.isDiscarded()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); long checkpointIdNew = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpointNew = coord.getPendingCheckpoints().get(checkpointIdNew); assertEquals(checkpoint2Id, checkpointIdNew); assertNotNull(checkpointNew); assertEquals(checkpointIdNew, checkpointNew.getCheckpointId()); assertEquals(jid, checkpointNew.getJobId()); assertEquals(2, checkpointNew.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpointNew.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpointNew.getOperatorStates().size()); assertFalse(checkpointNew.isDiscarded()); assertFalse(checkpointNew.areTasksFullyAcknowledged()); assertNotEquals(checkpoint1.getCheckpointId(), checkpointNew.getCheckpointId()); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpoint1Id), TASK_MANAGER_LOCATION_INFO); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID2, checkpoint1Id), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint1.isDiscarded()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testTriggerAndConfirmSimpleCheckpoint() { try { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); long checkpointId = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = coord.getPendingCheckpoints().get(checkpointId); assertNotNull(checkpoint); assertEquals(checkpointId, checkpoint.getCheckpointId()); assertEquals(jid, checkpoint.getJobId()); assertEquals(2, checkpoint.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint.getOperatorStates().size()); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId), any(Long.class), any(CheckpointOptions.class)); } OperatorID opID1 = OperatorID.fromJobVertexID(vertex1.getJobvertexId()); OperatorID opID2 = OperatorID.fromJobVertexID(vertex2.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStates1 = mock(TaskStateSnapshot.class); TaskStateSnapshot taskOperatorSubtaskStates2 = mock(TaskStateSnapshot.class); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); when(taskOperatorSubtaskStates1.getSubtaskStateByOperatorID(opID1)).thenReturn(subtaskState1); when(taskOperatorSubtaskStates2.getSubtaskStateByOperatorID(opID2)).thenReturn(subtaskState2); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint(jid, attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertEquals(1, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(1, checkpoint.getNumberOfNonAcknowledgedTasks()); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(taskOperatorSubtaskStates2, never()).registerSharedStates(any(SharedStateRegistry.class)); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(subtaskState2, never()).registerSharedStates(any(SharedStateRegistry.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDiscarded()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); { verify(subtaskState1, times(1)).registerSharedStates(any(SharedStateRegistry.class)); verify(subtaskState2, times(1)).registerSharedStates(any(SharedStateRegistry.class)); } { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId), any(Long.class), any(CheckpointOptions.class)); } CompletedCheckpoint success = coord.getSuccessfulCheckpoints().get(0); assertEquals(jid, success.getJobId()); assertEquals(checkpoint.getCheckpointId(), success.getCheckpointID()); assertEquals(2, success.getOperatorStates().size()); coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointIdNew = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); CompletedCheckpoint successNew = coord.getSuccessfulCheckpoints().get(0); assertEquals(jid, successNew.getJobId()); assertEquals(checkpointIdNew, successNew.getCheckpointID()); assertTrue(successNew.getOperatorStates().isEmpty()); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointIdNew), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointIdNew), any(Long.class), any(CheckpointOptions.class)); verify(vertex1.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointIdNew), any(Long.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointIdNew), any(Long.class)); } coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testMultipleConcurrentCheckpoints() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID triggerAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID3 = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex1 = mockExecutionVertex(triggerAttemptID1); ExecutionVertex triggerVertex2 = mockExecutionVertex(triggerAttemptID2); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); ExecutionVertex ackVertex3 = mockExecutionVertex(ackAttemptID3); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration.builder().setMaxConcurrentCheckpoints(Integer.MAX_VALUE).build()) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex1, triggerVertex2 }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2, ackVertex3 }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture1.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending1 = coord.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointId(); verify(triggerVertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId1), any(Long.class), any(CheckpointOptions.class)); verify(triggerVertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId1), any(Long.class), any(CheckpointOptions.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID2, checkpointId1), TASK_MANAGER_LOCATION_INFO); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture2.isCompletedExceptionally()); assertEquals(2, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = coord.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointId(); verify(triggerVertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId2), any(Long.class), any(CheckpointOptions.class)); verify(triggerVertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId2), any(Long.class), any(CheckpointOptions.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID3, checkpointId1), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpointId1), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertTrue(pending1.isDiscarded()); verify(commitVertex.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId1), any(Long.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID3, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(2, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertTrue(pending2.isDiscarded()); verify(commitVertex.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId2), any(Long.class)); List<CompletedCheckpoint> scs = coord.getSuccessfulCheckpoints(); CompletedCheckpoint sc1 = scs.get(0); assertEquals(checkpointId1, sc1.getCheckpointID()); assertEquals(jid, sc1.getJobId()); assertTrue(sc1.getOperatorStates().isEmpty()); CompletedCheckpoint sc2 = scs.get(1); assertEquals(checkpointId2, sc2.getCheckpointID()); assertEquals(jid, sc2.getJobId()); assertTrue(sc2.getOperatorStates().isEmpty()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testSuccessfulCheckpointSubsumesUnsuccessful() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID triggerAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID3 = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex1 = mockExecutionVertex(triggerAttemptID1); ExecutionVertex triggerVertex2 = mockExecutionVertex(triggerAttemptID2); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); ExecutionVertex ackVertex3 = mockExecutionVertex(ackAttemptID3); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration.builder().setMaxConcurrentCheckpoints(Integer.MAX_VALUE).build()) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex1, triggerVertex2 }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2, ackVertex3 }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(10)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture1.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending1 = coord.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointId(); verify(triggerVertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId1), any(Long.class), any(CheckpointOptions.class)); verify(triggerVertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId1), any(Long.class), any(CheckpointOptions.class)); OperatorID opID1 = OperatorID.fromJobVertexID(ackVertex1.getJobvertexId()); OperatorID opID2 = OperatorID.fromJobVertexID(ackVertex2.getJobvertexId()); OperatorID opID3 = OperatorID.fromJobVertexID(ackVertex3.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStates11 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates12 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates13 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState11 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState12 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState13 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates11.putSubtaskStateByOperatorID(opID1, subtaskState11); taskOperatorSubtaskStates12.putSubtaskStateByOperatorID(opID2, subtaskState12); taskOperatorSubtaskStates13.putSubtaskStateByOperatorID(opID3, subtaskState13); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID2, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates12), TASK_MANAGER_LOCATION_INFO); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture2.isCompletedExceptionally()); assertEquals(2, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = coord.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointId(); TaskStateSnapshot taskOperatorSubtaskStates21 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates22 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates23 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState21 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState22 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState23 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates21.putSubtaskStateByOperatorID(opID1, subtaskState21); taskOperatorSubtaskStates22.putSubtaskStateByOperatorID(opID2, subtaskState22); taskOperatorSubtaskStates23.putSubtaskStateByOperatorID(opID3, subtaskState23); verify(triggerVertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId2), any(Long.class), any(CheckpointOptions.class)); verify(triggerVertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId2), any(Long.class), any(CheckpointOptions.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID3, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates23), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates21), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates11), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID2, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates22), TASK_MANAGER_LOCATION_INFO); assertTrue(pending1.isDiscarded()); assertTrue(pending2.isDiscarded()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); verify(subtaskState11, times(1)).discardState(); verify(subtaskState12, times(1)).discardState(); verify(subtaskState21, never()).discardState(); verify(subtaskState22, never()).discardState(); verify(subtaskState23, never()).discardState(); List<CompletedCheckpoint> scs = coord.getSuccessfulCheckpoints(); CompletedCheckpoint success = scs.get(0); assertEquals(checkpointId2, success.getCheckpointID()); assertEquals(jid, success.getJobId()); assertEquals(3, success.getOperatorStates().size()); verify(commitVertex.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId2), any(Long.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID3, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates13), TASK_MANAGER_LOCATION_INFO); verify(subtaskState13, times(1)).discardState(); coord.shutdown(JobStatus.FINISHED); verify(subtaskState21, times(1)).discardState(); verify(subtaskState22, times(1)).discardState(); verify(subtaskState23, times(1)).discardState(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCheckpointTimeoutIsolated() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2 }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); PendingCheckpoint checkpoint = coord.getPendingCheckpoints().values().iterator().next(); assertFalse(checkpoint.isDiscarded()); OperatorID opID1 = OperatorID.fromJobVertexID(ackVertex1.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStates1 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpoint.getCheckpointId(), new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); manuallyTriggeredScheduledExecutor.triggerScheduledTasks(); assertTrue("Checkpoint was not canceled by the timeout", checkpoint.isDiscarded()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); verify(subtaskState1, times(1)).discardState(); verify(commitVertex.getCurrentExecutionAttempt(), times(0)).notifyCheckpointComplete(anyLong(), anyLong()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testHandleMessagesForNonExistingCheckpoints() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2 }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); long checkpointId = coord.getPendingCheckpoints().keySet().iterator().next(); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(new JobID(), ackAttemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, 1L), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, new ExecutionAttemptID(), checkpointId), TASK_MANAGER_LOCATION_INFO); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Tests that late acknowledge checkpoint messages are properly cleaned up. Furthermore it tests * that unknown checkpoint messages for the same job a are cleaned up as well. In contrast * checkpointing messages from other jobs should not be touched. A late acknowledge * message is an acknowledge message which arrives after the checkpoint has been declined. * * @throws Exception */ @Test public void testStateCleanupForLateOrUnknownMessages() throws Exception { final JobID jobId = new JobID(); final ExecutionAttemptID triggerAttemptId = new ExecutionAttemptID(); final ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptId); final ExecutionAttemptID ackAttemptId1 = new ExecutionAttemptID(); final ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptId1); final ExecutionAttemptID ackAttemptId2 = new ExecutionAttemptID(); final ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptId2); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jobId) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] {triggerVertex, ackVertex1, ackVertex2}) .setTasksToCommitTo(new ExecutionVertex[0]) .setTimer(manuallyTriggeredScheduledExecutor) .build(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); PendingCheckpoint pendingCheckpoint = coord.getPendingCheckpoints().values().iterator().next(); long checkpointId = pendingCheckpoint.getCheckpointId(); OperatorID opIDtrigger = OperatorID.fromJobVertexID(triggerVertex.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStatesTrigger = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskStateTrigger = mock(OperatorSubtaskState.class); taskOperatorSubtaskStatesTrigger.putSubtaskStateByOperatorID(opIDtrigger, subtaskStateTrigger); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, triggerAttemptId, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStatesTrigger), TASK_MANAGER_LOCATION_INFO); verify(subtaskStateTrigger, never()).discardState(); TaskStateSnapshot unknownSubtaskState = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), unknownSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(unknownSubtaskState, times(1)).discardState(); TaskStateSnapshot differentJobSubtaskState = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(new JobID(), new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(differentJobSubtaskState, never()).discardState(); TaskStateSnapshot triggerSubtaskState = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, triggerAttemptId, checkpointId, new CheckpointMetrics(), triggerSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(triggerSubtaskState, never()).discardState(); reset(subtaskStateTrigger); coord.receiveDeclineMessage(new DeclineCheckpoint(jobId, ackAttemptId1, checkpointId), TASK_MANAGER_LOCATION_INFO); assertTrue(pendingCheckpoint.isDiscarded()); verify(subtaskStateTrigger, times(1)).discardState(); TaskStateSnapshot ackSubtaskState = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, ackAttemptId2, checkpointId, new CheckpointMetrics(), ackSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(ackSubtaskState, times(1)).discardState(); reset(differentJobSubtaskState); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(new JobID(), new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(differentJobSubtaskState, never()).discardState(); TaskStateSnapshot unknownSubtaskState2 = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), unknownSubtaskState2), TASK_MANAGER_LOCATION_INFO); verify(unknownSubtaskState2, times(1)).discardState(); } @Test public void testMaxConcurrentAttempts1() { testMaxConcurrentAttempts(1); } @Test public void testMaxConcurrentAttempts2() { testMaxConcurrentAttempts(2); } @Test public void testMaxConcurrentAttempts5() { testMaxConcurrentAttempts(5); } @Test public void testTriggerAndConfirmSimpleSavepoint() throws Exception { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepointFuture = coord.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(savepointFuture.isDone()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); long checkpointId = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint pending = coord.getPendingCheckpoints().get(checkpointId); assertNotNull(pending); assertEquals(checkpointId, pending.getCheckpointId()); assertEquals(jid, pending.getJobId()); assertEquals(2, pending.getNumberOfNonAcknowledgedTasks()); assertEquals(0, pending.getNumberOfAcknowledgedTasks()); assertEquals(0, pending.getOperatorStates().size()); assertFalse(pending.isDiscarded()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(pending.canBeSubsumed()); OperatorID opID1 = OperatorID.fromJobVertexID(vertex1.getJobvertexId()); OperatorID opID2 = OperatorID.fromJobVertexID(vertex2.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStates1 = mock(TaskStateSnapshot.class); TaskStateSnapshot taskOperatorSubtaskStates2 = mock(TaskStateSnapshot.class); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); when(taskOperatorSubtaskStates1.getSubtaskStateByOperatorID(opID1)).thenReturn(subtaskState1); when(taskOperatorSubtaskStates2.getSubtaskStateByOperatorID(opID2)).thenReturn(subtaskState2); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint(jid, attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertEquals(1, pending.getNumberOfAcknowledgedTasks()); assertEquals(1, pending.getNumberOfNonAcknowledgedTasks()); assertFalse(pending.isDiscarded()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(savepointFuture.isDone()); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertFalse(pending.isDiscarded()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(savepointFuture.isDone()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); assertTrue(pending.isDiscarded()); assertNotNull(savepointFuture.get()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId), any(Long.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId), any(Long.class)); } { verify(subtaskState1, times(1)).registerSharedStates(any(SharedStateRegistry.class)); verify(subtaskState2, times(1)).registerSharedStates(any(SharedStateRegistry.class)); } CompletedCheckpoint success = coord.getSuccessfulCheckpoints().get(0); assertEquals(jid, success.getJobId()); assertEquals(pending.getCheckpointId(), success.getCheckpointID()); assertEquals(2, success.getOperatorStates().size()); savepointFuture = coord.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(savepointFuture.isDone()); long checkpointIdNew = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); CompletedCheckpoint successNew = coord.getSuccessfulCheckpoints().get(0); assertEquals(jid, successNew.getJobId()); assertEquals(checkpointIdNew, successNew.getCheckpointID()); assertTrue(successNew.getOperatorStates().isEmpty()); assertNotNull(savepointFuture.get()); verify(subtaskState1, never()).discardState(); verify(subtaskState2, never()).discardState(); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointIdNew), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointIdNew), any(Long.class), any(CheckpointOptions.class)); verify(vertex1.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointIdNew), any(Long.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointIdNew), any(Long.class)); } coord.shutdown(JobStatus.FINISHED); } /** * Triggers a savepoint and two checkpoints. The second checkpoint completes * and subsumes the first checkpoint, but not the first savepoint. Then we * trigger another checkpoint and savepoint. The 2nd savepoint completes and * subsumes the last checkpoint, but not the first savepoint. */ @Test public void testSavepointsAreNotSubsumed() throws Exception { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); StandaloneCheckpointIDCounter counter = new StandaloneCheckpointIDCounter(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration.builder().setMaxConcurrentCheckpoints(Integer.MAX_VALUE).build()) .setTasks(new ExecutionVertex[]{ vertex1, vertex2 }) .setCheckpointIDCounter(counter) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(10)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepointFuture1 = coord.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId1 = counter.getLast(); assertEquals(1, coord.getNumberOfPendingCheckpoints()); CompletableFuture<CompletedCheckpoint> checkpointFuture1 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(2, coord.getNumberOfPendingCheckpoints()); assertFalse(checkpointFuture1.isCompletedExceptionally()); CompletableFuture<CompletedCheckpoint> checkpointFuture2 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture2.isCompletedExceptionally()); long checkpointId2 = counter.getLast(); assertEquals(3, coord.getNumberOfPendingCheckpoints()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertFalse(coord.getPendingCheckpoints().get(savepointId1).isDiscarded()); assertFalse(savepointFuture1.isDone()); CompletableFuture<CompletedCheckpoint> checkpointFuture3 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture3.isCompletedExceptionally()); assertEquals(2, coord.getNumberOfPendingCheckpoints()); CompletableFuture<CompletedCheckpoint> savepointFuture2 = coord.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId2 = counter.getLast(); assertFalse(savepointFuture2.isCompletedExceptionally()); assertEquals(3, coord.getNumberOfPendingCheckpoints()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, savepointId2), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, savepointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(2, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertFalse(coord.getPendingCheckpoints().get(savepointId1).isDiscarded()); assertFalse(savepointFuture1.isDone()); assertNotNull(savepointFuture2.get()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, savepointId1), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, savepointId1), TASK_MANAGER_LOCATION_INFO); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(3, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertNotNull(savepointFuture1.get()); } private void testMaxConcurrentAttempts(int maxConcurrentAttempts) { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex = mockExecutionVertex(ackAttemptID); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); final AtomicInteger numCalls = new AtomicInteger(); final Execution execution = triggerVertex.getCurrentExecutionAttempt(); doAnswer(invocation -> { numCalls.incrementAndGet(); return null; }).when(execution).triggerCheckpoint(anyLong(), anyLong(), any(CheckpointOptions.class)); doAnswer(invocation -> { numCalls.incrementAndGet(); return null; }).when(execution).notifyCheckpointComplete(anyLong(), anyLong()); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0L) .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); coord.startCheckpointScheduler(); for (int i = 0; i < maxConcurrentAttempts; i++) { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } assertEquals(maxConcurrentAttempts, numCalls.get()); verify(triggerVertex.getCurrentExecutionAttempt(), times(maxConcurrentAttempts)) .triggerCheckpoint(anyLong(), anyLong(), any(CheckpointOptions.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID, 1L), TASK_MANAGER_LOCATION_INFO); final Collection<ScheduledFuture<?>> periodicScheduledTasks = manuallyTriggeredScheduledExecutor.getPeriodicScheduledTask(); assertEquals(1, periodicScheduledTasks.size()); final ScheduledFuture scheduledFuture = periodicScheduledTasks.iterator().next(); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(maxConcurrentAttempts + 1, numCalls.get()); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(maxConcurrentAttempts + 1, numCalls.get()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testMaxConcurrentAttempsWithSubsumption() { try { final int maxConcurrentAttempts = 2; final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex = mockExecutionVertex(ackAttemptID); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0L) .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); coord.startCheckpointScheduler(); do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (coord.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); assertEquals(maxConcurrentAttempts, coord.getNumberOfPendingCheckpoints()); assertNotNull(coord.getPendingCheckpoints().get(1L)); assertNotNull(coord.getPendingCheckpoints().get(2L)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID, 2L), TASK_MANAGER_LOCATION_INFO); do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (coord.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); assertEquals(maxConcurrentAttempts, coord.getNumberOfPendingCheckpoints()); assertNotNull(coord.getPendingCheckpoints().get(3L)); assertNotNull(coord.getPendingCheckpoints().get(4L)); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testPeriodicSchedulingWithInactiveTasks() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex = mockExecutionVertex(ackAttemptID); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); final AtomicReference<ExecutionState> currentState = new AtomicReference<>(ExecutionState.CREATED); when(triggerVertex.getCurrentExecutionAttempt().getState()).thenAnswer(invocation -> currentState.get()); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0) .setMaxConcurrentCheckpoints(2) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); coord.startCheckpointScheduler(); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); currentState.set(ExecutionState.RUNNING); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(coord.getNumberOfPendingCheckpoints() > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Tests that the savepoints can be triggered concurrently. */ @Test public void testConcurrentSavepoints() throws Exception { JobID jobId = new JobID(); int numSavepoints = 5; final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); StandaloneCheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jobId) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasks(new ExecutionVertex[] { vertex1 }) .setCheckpointIDCounter(checkpointIDCounter) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); List<CompletableFuture<CompletedCheckpoint>> savepointFutures = new ArrayList<>(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); for (int i = 0; i < numSavepoints; i++) { savepointFutures.add(coord.triggerSavepoint(savepointDir)); } for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertFalse(savepointFuture.isDone()); } manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointId = checkpointIDCounter.getLast(); for (int i = 0; i < numSavepoints; i++, checkpointId--) { coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); } for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertNotNull(savepointFuture.get()); } } /** * Tests that no minimum delay between savepoints is enforced. */ @Test public void testMinDelayBetweenSavepoints() throws Exception { CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setMinPauseBetweenCheckpoints(100000000L) .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepoint0 = coord.triggerSavepoint(savepointDir); assertFalse("Did not trigger savepoint", savepoint0.isDone()); CompletableFuture<CompletedCheckpoint> savepoint1 = coord.triggerSavepoint(savepointDir); assertFalse("Did not trigger savepoint", savepoint1.isDone()); } /** * Tests that the externalized checkpoint configuration is respected. */ @Test public void testExternalizedCheckpoints() throws Exception { try { CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setCheckpointRetentionPolicy(CheckpointRetentionPolicy.RETAIN_ON_FAILURE) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setTimer(manuallyTriggeredScheduledExecutor) .build(); CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); for (PendingCheckpoint checkpoint : coord.getPendingCheckpoints().values()) { CheckpointProperties props = checkpoint.getProps(); CheckpointProperties expected = CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.RETAIN_ON_FAILURE); assertEquals(expected, props); } coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCreateKeyGroupPartitions() { testCreateKeyGroupPartitions(1, 1); testCreateKeyGroupPartitions(13, 1); testCreateKeyGroupPartitions(13, 2); testCreateKeyGroupPartitions(Short.MAX_VALUE, 1); testCreateKeyGroupPartitions(Short.MAX_VALUE, 13); testCreateKeyGroupPartitions(Short.MAX_VALUE, Short.MAX_VALUE); Random r = new Random(1234); for (int k = 0; k < 1000; ++k) { int maxParallelism = 1 + r.nextInt(Short.MAX_VALUE - 1); int parallelism = 1 + r.nextInt(maxParallelism); testCreateKeyGroupPartitions(maxParallelism, parallelism); } } private void testCreateKeyGroupPartitions(int maxParallelism, int parallelism) { List<KeyGroupRange> ranges = StateAssignmentOperation.createKeyGroupPartitions(maxParallelism, parallelism); for (int i = 0; i < maxParallelism; ++i) { KeyGroupRange range = ranges.get(KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, parallelism, i)); if (!range.contains(i)) { Assert.fail("Could not find expected key-group " + i + " in range " + range); } } } @Test public void testPartitionableStateRepartitioning() { Random r = new Random(42); for (int run = 0; run < 10000; ++run) { int oldParallelism = 1 + r.nextInt(9); int newParallelism = 1 + r.nextInt(9); int numNamedStates = 1 + r.nextInt(9); int maxPartitionsPerState = 1 + r.nextInt(9); doTestPartitionableStateRepartitioning( r, oldParallelism, newParallelism, numNamedStates, maxPartitionsPerState); } } private void doTestPartitionableStateRepartitioning( Random r, int oldParallelism, int newParallelism, int numNamedStates, int maxPartitionsPerState) { List<List<OperatorStateHandle>> previousParallelOpInstanceStates = new ArrayList<>(oldParallelism); for (int i = 0; i < oldParallelism; ++i) { Path fakePath = new Path("/fake-" + i); Map<String, OperatorStateHandle.StateMetaInfo> namedStatesToOffsets = new HashMap<>(); int off = 0; for (int s = 0; s < numNamedStates - 1; ++s) { long[] offs = new long[1 + r.nextInt(maxPartitionsPerState)]; for (int o = 0; o < offs.length; ++o) { offs[o] = off; ++off; } OperatorStateHandle.Mode mode = r.nextInt(10) == 0 ? OperatorStateHandle.Mode.UNION : OperatorStateHandle.Mode.SPLIT_DISTRIBUTE; namedStatesToOffsets.put( "State-" + s, new OperatorStateHandle.StateMetaInfo(offs, mode)); } if (numNamedStates % 2 == 0) { long[] offs = {off + 1, off + 2, off + 3, off + 4}; namedStatesToOffsets.put( "State-" + (numNamedStates - 1), new OperatorStateHandle.StateMetaInfo(offs, OperatorStateHandle.Mode.BROADCAST)); } previousParallelOpInstanceStates.add( Collections.singletonList(new OperatorStreamStateHandle(namedStatesToOffsets, new FileStateHandle(fakePath, -1)))); } Map<StreamStateHandle, Map<String, List<Long>>> expected = new HashMap<>(); int taskIndex = 0; int expectedTotalPartitions = 0; for (List<OperatorStateHandle> previousParallelOpInstanceState : previousParallelOpInstanceStates) { Assert.assertEquals(1, previousParallelOpInstanceState.size()); for (OperatorStateHandle psh : previousParallelOpInstanceState) { Map<String, OperatorStateHandle.StateMetaInfo> offsMap = psh.getStateNameToPartitionOffsets(); Map<String, List<Long>> offsMapWithList = new HashMap<>(offsMap.size()); for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> e : offsMap.entrySet()) { long[] offs = e.getValue().getOffsets(); int replication; switch (e.getValue().getDistributionMode()) { case UNION: replication = newParallelism; break; case BROADCAST: int extra = taskIndex < (newParallelism % oldParallelism) ? 1 : 0; replication = newParallelism / oldParallelism + extra; break; case SPLIT_DISTRIBUTE: replication = 1; break; default: throw new RuntimeException("Unknown distribution mode " + e.getValue().getDistributionMode()); } if (replication > 0) { expectedTotalPartitions += replication * offs.length; List<Long> offsList = new ArrayList<>(offs.length); for (long off : offs) { for (int p = 0; p < replication; ++p) { offsList.add(off); } } offsMapWithList.put(e.getKey(), offsList); } } if (!offsMapWithList.isEmpty()) { expected.put(psh.getDelegateStateHandle(), offsMapWithList); } taskIndex++; } } OperatorStateRepartitioner repartitioner = RoundRobinOperatorStateRepartitioner.INSTANCE; List<List<OperatorStateHandle>> pshs = repartitioner.repartitionState(previousParallelOpInstanceStates, oldParallelism, newParallelism); Map<StreamStateHandle, Map<String, List<Long>>> actual = new HashMap<>(); int minCount = Integer.MAX_VALUE; int maxCount = 0; int actualTotalPartitions = 0; for (int p = 0; p < newParallelism; ++p) { int partitionCount = 0; Collection<OperatorStateHandle> pshc = pshs.get(p); for (OperatorStateHandle sh : pshc) { for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> namedState : sh.getStateNameToPartitionOffsets().entrySet()) { Map<String, List<Long>> stateToOffsets = actual.get(sh.getDelegateStateHandle()); if (stateToOffsets == null) { stateToOffsets = new HashMap<>(); actual.put(sh.getDelegateStateHandle(), stateToOffsets); } List<Long> actualOffs = stateToOffsets.get(namedState.getKey()); if (actualOffs == null) { actualOffs = new ArrayList<>(); stateToOffsets.put(namedState.getKey(), actualOffs); } long[] add = namedState.getValue().getOffsets(); for (long l : add) { actualOffs.add(l); } partitionCount += namedState.getValue().getOffsets().length; } } minCount = Math.min(minCount, partitionCount); maxCount = Math.max(maxCount, partitionCount); actualTotalPartitions += partitionCount; } for (Map<String, List<Long>> v : actual.values()) { for (List<Long> l : v.values()) { Collections.sort(l); } } if (oldParallelism != newParallelism) { int maxLoadDiff = maxCount - minCount; Assert.assertTrue("Difference in partition load is > 1 : " + maxLoadDiff, maxLoadDiff <= 1); } Assert.assertEquals(expectedTotalPartitions, actualTotalPartitions); Assert.assertEquals(expected, actual); } /** * Tests that the pending checkpoint stats callbacks are created. */ @Test public void testCheckpointStatsTrackerPendingCheckpointCallback() throws Exception { CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .build(); CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); coord.setCheckpointStatsTracker(tracker); when(tracker.reportPendingCheckpoint(anyLong(), anyLong(), any(CheckpointProperties.class))) .thenReturn(mock(PendingCheckpointStats.class)); CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); verify(tracker, times(1)) .reportPendingCheckpoint(eq(1L), any(Long.class), eq(CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION))); } /** * Tests that the restore callbacks are called if registered. */ @Test public void testCheckpointStatsTrackerRestoreCallback() throws Exception { StandaloneCompletedCheckpointStore store = new StandaloneCompletedCheckpointStore(1); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setCompletedCheckpointStore(store) .setTimer(manuallyTriggeredScheduledExecutor) .build(); store.addCheckpoint(new CompletedCheckpoint( new JobID(), 0, 0, 0, Collections.<OperatorID, OperatorState>emptyMap(), Collections.<MasterState>emptyList(), CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new TestCompletedCheckpointStorageLocation())); CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); coord.setCheckpointStatsTracker(tracker); assertTrue(coord.restoreLatestCheckpointedStateToAll(Collections.emptySet(), true)); verify(tracker, times(1)) .reportRestoredCheckpoint(any(RestoredCheckpointStats.class)); } @Test public void testSharedStateRegistrationOnRestore() throws Exception { final JobID jid = new JobID(); final JobVertexID jobVertexID1 = new JobVertexID(); int parallelism1 = 2; int maxParallelism1 = 4; final ExecutionJobVertex jobVertex1 = mockExecutionJobVertex( jobVertexID1, parallelism1, maxParallelism1); List<ExecutionVertex> allExecutionVertices = new ArrayList<>(parallelism1); allExecutionVertices.addAll(Arrays.asList(jobVertex1.getTaskVertices())); ExecutionVertex[] arrayExecutionVertices = allExecutionVertices.toArray(new ExecutionVertex[allExecutionVertices.size()]); RecoverableCompletedCheckpointStore store = new RecoverableCompletedCheckpointStore(10); final List<SharedStateRegistry> createdSharedStateRegistries = new ArrayList<>(2); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setTasks(arrayExecutionVertices) .setCompletedCheckpointStore(store) .setTimer(manuallyTriggeredScheduledExecutor) .setSharedStateRegistryFactory( deleteExecutor -> { SharedStateRegistry instance = new SharedStateRegistry(deleteExecutor); createdSharedStateRegistries.add(instance); return instance; }) .build(); final int numCheckpoints = 3; List<KeyGroupRange> keyGroupPartitions1 = StateAssignmentOperation.createKeyGroupPartitions(maxParallelism1, parallelism1); for (int i = 0; i < numCheckpoints; ++i) { performIncrementalCheckpoint(jid, coord, jobVertex1, keyGroupPartitions1, i); } List<CompletedCheckpoint> completedCheckpoints = coord.getSuccessfulCheckpoints(); assertEquals(numCheckpoints, completedCheckpoints.size()); int sharedHandleCount = 0; List<Map<StateHandleID, StreamStateHandle>> sharedHandlesByCheckpoint = new ArrayList<>(numCheckpoints); for (int i = 0; i < numCheckpoints; ++i) { sharedHandlesByCheckpoint.add(new HashMap<>(2)); } int cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { verify(keyedStateHandle, times(1)).registerSharedStates(createdSharedStateRegistries.get(0)); IncrementalRemoteKeyedStateHandle incrementalKeyedStateHandle = (IncrementalRemoteKeyedStateHandle) keyedStateHandle; sharedHandlesByCheckpoint.get(cp).putAll(incrementalKeyedStateHandle.getSharedState()); for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getSharedState().values()) { assertTrue(!(streamStateHandle instanceof PlaceholderStreamStateHandle)); verify(streamStateHandle, never()).discardState(); ++sharedHandleCount; } for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getPrivateState().values()) { verify(streamStateHandle, never()).discardState(); } verify(incrementalKeyedStateHandle.getMetaStateHandle(), never()).discardState(); } verify(subtaskState, never()).discardState(); } } ++cp; } assertEquals(10, sharedHandleCount); store.removeOldestCheckpoint(); for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (StreamStateHandle streamStateHandle : cpList.values()) { verify(streamStateHandle, never()).discardState(); } } store.shutdown(JobStatus.SUSPENDED); Set<ExecutionJobVertex> tasks = new HashSet<>(); tasks.add(jobVertex1); assertTrue(coord.restoreLatestCheckpointedStateToAll(tasks, false)); cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { VerificationMode verificationMode; if (cp > 0) { verificationMode = times(1); } else { verificationMode = never(); } verify(keyedStateHandle, verificationMode).registerSharedStates(createdSharedStateRegistries.get(1)); } } } ++cp; } store.removeOldestCheckpoint(); for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (Map.Entry<StateHandleID, StreamStateHandle> entry : cpList.entrySet()) { String key = entry.getKey().getKeyString(); int belongToCP = Integer.parseInt(String.valueOf(key.charAt(key.length() - 1))); if (belongToCP == 0) { verify(entry.getValue(), times(1)).discardState(); } else { verify(entry.getValue(), never()).discardState(); } } } store.removeOldestCheckpoint(); for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (StreamStateHandle streamStateHandle : cpList.values()) { verify(streamStateHandle, times(1)).discardState(); } } } @Test public void jobFailsIfInFlightSynchronousSavepointIsDiscarded() throws Exception { final Tuple2<Integer, Throwable> invocationCounterAndException = Tuple2.of(0, null); final Throwable expectedRootCause = new IOException("Custom-Exception"); final JobID jobId = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); final ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); final ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); final CheckpointCoordinator coordinator = getCheckpointCoordinator(jobId, vertex1, vertex2, new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { invocationCounterAndException.f0 += 1; invocationCounterAndException.f1 = cause; } @Override public void failJobDueToTaskFailure(Throwable cause, ExecutionAttemptID failingTask) { throw new AssertionError("This method should not be called for the test."); } })); final CompletableFuture<CompletedCheckpoint> savepointFuture = coordinator .triggerSynchronousSavepoint(false, "test-dir"); manuallyTriggeredScheduledExecutor.triggerAll(); final PendingCheckpoint syncSavepoint = declineSynchronousSavepoint(jobId, coordinator, attemptID1, expectedRootCause); assertTrue(syncSavepoint.isDiscarded()); try { savepointFuture.get(); fail("Expected Exception not found."); } catch (ExecutionException e) { final Throwable cause = ExceptionUtils.stripExecutionException(e); assertTrue(cause instanceof CheckpointException); assertEquals(expectedRootCause.getMessage(), cause.getCause().getMessage()); } assertEquals(1L, invocationCounterAndException.f0.intValue()); assertTrue( invocationCounterAndException.f1 instanceof CheckpointException && invocationCounterAndException.f1.getCause().getMessage().equals(expectedRootCause.getMessage())); coordinator.shutdown(JobStatus.FAILING); } /** * Tests that do not trigger checkpoint when stop the coordinator after the eager pre-check. */ @Test public void testTriggerCheckpointAfterCancel() throws Exception { TestingCheckpointIDCounter idCounter = new TestingCheckpointIDCounter(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setCheckpointIDCounter(idCounter) .setTimer(manuallyTriggeredScheduledExecutor) .build(); idCounter.setOwner(coord); try { coord.startCheckpointScheduler(); final CompletableFuture<CompletedCheckpoint> onCompletionPromise = coord.triggerCheckpoint( CheckpointProperties .forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), null, true, false); manuallyTriggeredScheduledExecutor.triggerAll(); try { onCompletionPromise.get(); fail("should not trigger periodic checkpoint after stop the coordinator."); } catch (ExecutionException e) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(e, CheckpointException.class); assertTrue(checkpointExceptionOptional.isPresent()); assertEquals(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN, checkpointExceptionOptional.get().getCheckpointFailureReason()); } } finally { coord.shutdown(JobStatus.FINISHED); } } @Test public void testSavepointScheduledInUnalignedMode() throws Exception { int maxConcurrentCheckpoints = 1; int checkpointRequestsToSend = 10; int activeRequests = 0; JobID jobId = new JobID(); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration .builder() .setUnalignedCheckpointsEnabled(true) .setMaxConcurrentCheckpoints(maxConcurrentCheckpoints) .build()) .setJobId(jobId) .setTimer(manuallyTriggeredScheduledExecutor) .build(); try { List<Future<?>> checkpointFutures = new ArrayList<>(checkpointRequestsToSend); coordinator.startCheckpointScheduler(); while (activeRequests < checkpointRequestsToSend) { checkpointFutures.add(coordinator.triggerCheckpoint(true)); activeRequests++; } assertEquals(activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); Future<?> savepointFuture = coordinator.triggerSavepoint("/tmp"); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(++activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); coordinator.receiveDeclineMessage(new DeclineCheckpoint(jobId, new ExecutionAttemptID(), 1L), "none"); manuallyTriggeredScheduledExecutor.triggerAll(); activeRequests--; assertEquals(activeRequests - maxConcurrentCheckpoints , coordinator.getNumQueuedRequests()); assertEquals(1, checkpointFutures.stream().filter(Future::isDone).count()); assertFalse(savepointFuture.isDone()); assertEquals(maxConcurrentCheckpoints, coordinator.getNumberOfPendingCheckpoints()); CheckpointProperties props = coordinator.getPendingCheckpoints().values().iterator().next().getProps(); assertTrue(props.isSavepoint()); assertFalse(props.forceCheckpoint()); } finally { coordinator.shutdown(JobStatus.FINISHED); } } private CheckpointCoordinator getCheckpointCoordinator( JobID jobId, ExecutionVertex vertex1, ExecutionVertex vertex2) { return new CheckpointCoordinatorBuilder() .setJobId(jobId) .setTasks(new ExecutionVertex[]{ vertex1, vertex2 }) .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration.builder().setMaxConcurrentCheckpoints(Integer.MAX_VALUE).build()) .setTimer(manuallyTriggeredScheduledExecutor) .build(); } private CheckpointCoordinator getCheckpointCoordinator( JobID jobId, ExecutionVertex vertex1, ExecutionVertex vertex2, CheckpointFailureManager failureManager) { return new CheckpointCoordinatorBuilder() .setJobId(jobId) .setTasks(new ExecutionVertex[]{ vertex1, vertex2 }) .setTimer(manuallyTriggeredScheduledExecutor) .setFailureManager(failureManager) .build(); } private CheckpointCoordinator getCheckpointCoordinator() { final ExecutionAttemptID triggerAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID triggerAttemptID2 = new ExecutionAttemptID(); ExecutionVertex triggerVertex1 = mockExecutionVertex(triggerAttemptID1); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionVertex triggerVertex2 = mockExecutionVertex( triggerAttemptID2, jobVertexID2, Collections.singletonList(OperatorID.fromJobVertexID(jobVertexID2)), 1, 1, ExecutionState.FINISHED); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); return new CheckpointCoordinatorBuilder() .setTasksToTrigger(new ExecutionVertex[] { triggerVertex1, triggerVertex2 }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2 }) .setTasksToCommitTo(new ExecutionVertex[] {}) .setTimer(manuallyTriggeredScheduledExecutor) .build(); } private CheckpointFailureManager getCheckpointFailureManager(String errorMsg) { return new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { throw new RuntimeException(errorMsg); } @Override public void failJobDueToTaskFailure(Throwable cause, ExecutionAttemptID failingTask) { throw new RuntimeException(errorMsg); } }); } private PendingCheckpoint declineSynchronousSavepoint( final JobID jobId, final CheckpointCoordinator coordinator, final ExecutionAttemptID attemptID, final Throwable reason) { final long checkpointId = coordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); final PendingCheckpoint checkpoint = coordinator.getPendingCheckpoints().get(checkpointId); coordinator.receiveDeclineMessage(new DeclineCheckpoint(jobId, attemptID, checkpointId, reason), TASK_MANAGER_LOCATION_INFO); return checkpoint; } private void performIncrementalCheckpoint( JobID jid, CheckpointCoordinator coord, ExecutionJobVertex jobVertex1, List<KeyGroupRange> keyGroupPartitions1, int cpSequenceNumber) throws Exception { coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(1, coord.getPendingCheckpoints().size()); long checkpointId = Iterables.getOnlyElement(coord.getPendingCheckpoints().keySet()); for (int index = 0; index < jobVertex1.getParallelism(); index++) { KeyGroupRange keyGroupRange = keyGroupPartitions1.get(index); Map<StateHandleID, StreamStateHandle> privateState = new HashMap<>(); privateState.put( new StateHandleID("private-1"), spy(new ByteStreamStateHandle("private-1", new byte[]{'p'}))); Map<StateHandleID, StreamStateHandle> sharedState = new HashMap<>(); if (cpSequenceNumber > 0) { sharedState.put( new StateHandleID("shared-" + (cpSequenceNumber - 1)), spy(new PlaceholderStreamStateHandle())); } sharedState.put( new StateHandleID("shared-" + cpSequenceNumber), spy(new ByteStreamStateHandle("shared-" + cpSequenceNumber + "-" + keyGroupRange, new byte[]{'s'}))); IncrementalRemoteKeyedStateHandle managedState = spy(new IncrementalRemoteKeyedStateHandle( new UUID(42L, 42L), keyGroupRange, checkpointId, sharedState, privateState, spy(new ByteStreamStateHandle("meta", new byte[]{'m'})))); OperatorSubtaskState operatorSubtaskState = spy(new OperatorSubtaskState( StateObjectCollection.empty(), StateObjectCollection.empty(), StateObjectCollection.singleton(managedState), StateObjectCollection.empty())); Map<OperatorID, OperatorSubtaskState> opStates = new HashMap<>(); opStates.put(jobVertex1.getOperatorIDs().get(0).getGeneratedOperatorID(), operatorSubtaskState); TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot(opStates); AcknowledgeCheckpoint acknowledgeCheckpoint = new AcknowledgeCheckpoint( jid, jobVertex1.getTaskVertices()[index].getCurrentExecutionAttempt().getAttemptId(), checkpointId, new CheckpointMetrics(), taskStateSnapshot); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint, TASK_MANAGER_LOCATION_INFO); } } private static class TestingCheckpointIDCounter extends StandaloneCheckpointIDCounter { private CheckpointCoordinator owner; @Override public long getAndIncrement() throws Exception { checkNotNull(owner); owner.stopCheckpointScheduler(); return super.getAndIncrement(); } void setOwner(CheckpointCoordinator coordinator) { this.owner = checkNotNull(coordinator); } } }
class CheckpointCoordinatorTest extends TestLogger { private static final String TASK_MANAGER_LOCATION_INFO = "Unknown location"; private ManuallyTriggeredScheduledExecutor manuallyTriggeredScheduledExecutor; @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); @Before public void setUp() throws Exception { manuallyTriggeredScheduledExecutor = new ManuallyTriggeredScheduledExecutor(); } @Test public void testCheckpointAbortsIfTriggerTasksAreNotExecuted() { try { CheckpointCoordinator coord = getCheckpointCoordinator(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCheckpointAbortsIfTriggerTasksAreFinished() { try { CheckpointCoordinator coord = getCheckpointCoordinator(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCheckpointAbortsIfAckTasksAreNotExecuted() { try { CheckpointCoordinator coord = getCheckpointCoordinator(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testTriggerAndDeclineCheckpointThenFailureManagerThrowsException() { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2, checkpointFailureManager); try { final CompletableFuture<CompletedCheckpoint> checkPointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkPointFuture.isCompletedExceptionally()); long checkpointId = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = coord.getPendingCheckpoints().get(checkpointId); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId), TASK_MANAGER_LOCATION_INFO); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); fail("Test failed."); } catch (Exception e) { assertTrue(e instanceof RuntimeException); assertEquals(errorMsg, e.getMessage()); } finally { try { coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } } @Test /** * This test triggers a checkpoint and then sends a decline checkpoint message from * one of the tasks. The expected behaviour is that said checkpoint is discarded and a new * checkpoint is triggered. */ @Test public void testTriggerAndDeclineCheckpointSimple() { try { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); long checkpointId = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = coord.getPendingCheckpoints().get(checkpointId); assertNotNull(checkpoint); assertEquals(checkpointId, checkpoint.getCheckpointId()); assertEquals(jid, checkpoint.getJobId()); assertEquals(2, checkpoint.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint.getOperatorStates().size()); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(vertex1.getCurrentExecutionAttempt()).triggerCheckpoint(checkpointId, checkpoint.getCheckpointTimestamp(), CheckpointOptions.forCheckpointWithDefaultLocation()); verify(vertex2.getCurrentExecutionAttempt()).triggerCheckpoint(checkpointId, checkpoint.getCheckpointTimestamp(), CheckpointOptions.forCheckpointWithDefaultLocation()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId), "Unknown location"); assertEquals(1, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(1, checkpoint.getNumberOfNonAcknowledgedTasks()); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId), "Unknown location"); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDiscarded()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID2, checkpointId), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDiscarded()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * This test triggers two checkpoints and then sends a decline message from one of the tasks * for the first checkpoint. This should discard the first checkpoint while not triggering * a new checkpoint because a later checkpoint is already in progress. */ @Test public void testTriggerAndDeclineCheckpointComplex() { try { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture1.isCompletedExceptionally()); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture2.isCompletedExceptionally()); assertEquals(2, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(2, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); Iterator<Map.Entry<Long, PendingCheckpoint>> it = coord.getPendingCheckpoints().entrySet().iterator(); long checkpoint1Id = it.next().getKey(); long checkpoint2Id = it.next().getKey(); PendingCheckpoint checkpoint1 = coord.getPendingCheckpoints().get(checkpoint1Id); PendingCheckpoint checkpoint2 = coord.getPendingCheckpoints().get(checkpoint2Id); assertNotNull(checkpoint1); assertEquals(checkpoint1Id, checkpoint1.getCheckpointId()); assertEquals(jid, checkpoint1.getJobId()); assertEquals(2, checkpoint1.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint1.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint1.getOperatorStates().size()); assertFalse(checkpoint1.isDiscarded()); assertFalse(checkpoint1.areTasksFullyAcknowledged()); assertNotNull(checkpoint2); assertEquals(checkpoint2Id, checkpoint2.getCheckpointId()); assertEquals(jid, checkpoint2.getJobId()); assertEquals(2, checkpoint2.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint2.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint2.getOperatorStates().size()); assertFalse(checkpoint2.isDiscarded()); assertFalse(checkpoint2.areTasksFullyAcknowledged()); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpoint1Id), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpoint1Id), any(Long.class), any(CheckpointOptions.class)); } { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpoint2Id), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpoint2Id), any(Long.class), any(CheckpointOptions.class)); } coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpoint1Id), TASK_MANAGER_LOCATION_INFO); verify(vertex1.getCurrentExecutionAttempt(), times(1)).notifyCheckpointAborted(eq(checkpoint1Id), any(Long.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).notifyCheckpointAborted(eq(checkpoint1Id), any(Long.class)); assertTrue(checkpoint1.isDiscarded()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); long checkpointIdNew = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpointNew = coord.getPendingCheckpoints().get(checkpointIdNew); assertEquals(checkpoint2Id, checkpointIdNew); assertNotNull(checkpointNew); assertEquals(checkpointIdNew, checkpointNew.getCheckpointId()); assertEquals(jid, checkpointNew.getJobId()); assertEquals(2, checkpointNew.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpointNew.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpointNew.getOperatorStates().size()); assertFalse(checkpointNew.isDiscarded()); assertFalse(checkpointNew.areTasksFullyAcknowledged()); assertNotEquals(checkpoint1.getCheckpointId(), checkpointNew.getCheckpointId()); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID1, checkpoint1Id), TASK_MANAGER_LOCATION_INFO); coord.receiveDeclineMessage(new DeclineCheckpoint(jid, attemptID2, checkpoint1Id), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint1.isDiscarded()); verify(vertex1.getCurrentExecutionAttempt(), times(1)).notifyCheckpointAborted(eq(checkpoint1Id), any(Long.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).notifyCheckpointAborted(eq(checkpoint1Id), any(Long.class)); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testTriggerAndConfirmSimpleCheckpoint() { try { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); long checkpointId = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = coord.getPendingCheckpoints().get(checkpointId); assertNotNull(checkpoint); assertEquals(checkpointId, checkpoint.getCheckpointId()); assertEquals(jid, checkpoint.getJobId()); assertEquals(2, checkpoint.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint.getOperatorStates().size()); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId), any(Long.class), any(CheckpointOptions.class)); } OperatorID opID1 = OperatorID.fromJobVertexID(vertex1.getJobvertexId()); OperatorID opID2 = OperatorID.fromJobVertexID(vertex2.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStates1 = mock(TaskStateSnapshot.class); TaskStateSnapshot taskOperatorSubtaskStates2 = mock(TaskStateSnapshot.class); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); when(taskOperatorSubtaskStates1.getSubtaskStateByOperatorID(opID1)).thenReturn(subtaskState1); when(taskOperatorSubtaskStates2.getSubtaskStateByOperatorID(opID2)).thenReturn(subtaskState2); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint(jid, attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertEquals(1, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(1, checkpoint.getNumberOfNonAcknowledgedTasks()); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(taskOperatorSubtaskStates2, never()).registerSharedStates(any(SharedStateRegistry.class)); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertFalse(checkpoint.isDiscarded()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(subtaskState2, never()).registerSharedStates(any(SharedStateRegistry.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDiscarded()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); { verify(subtaskState1, times(1)).registerSharedStates(any(SharedStateRegistry.class)); verify(subtaskState2, times(1)).registerSharedStates(any(SharedStateRegistry.class)); } { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId), any(Long.class), any(CheckpointOptions.class)); } CompletedCheckpoint success = coord.getSuccessfulCheckpoints().get(0); assertEquals(jid, success.getJobId()); assertEquals(checkpoint.getCheckpointId(), success.getCheckpointID()); assertEquals(2, success.getOperatorStates().size()); coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointIdNew = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getScheduledTasks().size()); CompletedCheckpoint successNew = coord.getSuccessfulCheckpoints().get(0); assertEquals(jid, successNew.getJobId()); assertEquals(checkpointIdNew, successNew.getCheckpointID()); assertTrue(successNew.getOperatorStates().isEmpty()); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointIdNew), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointIdNew), any(Long.class), any(CheckpointOptions.class)); verify(vertex1.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointIdNew), any(Long.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointIdNew), any(Long.class)); } coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testMultipleConcurrentCheckpoints() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID triggerAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID3 = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex1 = mockExecutionVertex(triggerAttemptID1); ExecutionVertex triggerVertex2 = mockExecutionVertex(triggerAttemptID2); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); ExecutionVertex ackVertex3 = mockExecutionVertex(ackAttemptID3); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration.builder().setMaxConcurrentCheckpoints(Integer.MAX_VALUE).build()) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex1, triggerVertex2 }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2, ackVertex3 }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture1.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending1 = coord.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointId(); verify(triggerVertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId1), any(Long.class), any(CheckpointOptions.class)); verify(triggerVertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId1), any(Long.class), any(CheckpointOptions.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID2, checkpointId1), TASK_MANAGER_LOCATION_INFO); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture2.isCompletedExceptionally()); assertEquals(2, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = coord.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointId(); verify(triggerVertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId2), any(Long.class), any(CheckpointOptions.class)); verify(triggerVertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId2), any(Long.class), any(CheckpointOptions.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID3, checkpointId1), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpointId1), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertTrue(pending1.isDiscarded()); verify(commitVertex.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId1), any(Long.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID3, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(2, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertTrue(pending2.isDiscarded()); verify(commitVertex.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId2), any(Long.class)); List<CompletedCheckpoint> scs = coord.getSuccessfulCheckpoints(); CompletedCheckpoint sc1 = scs.get(0); assertEquals(checkpointId1, sc1.getCheckpointID()); assertEquals(jid, sc1.getJobId()); assertTrue(sc1.getOperatorStates().isEmpty()); CompletedCheckpoint sc2 = scs.get(1); assertEquals(checkpointId2, sc2.getCheckpointID()); assertEquals(jid, sc2.getJobId()); assertTrue(sc2.getOperatorStates().isEmpty()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testSuccessfulCheckpointSubsumesUnsuccessful() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID triggerAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID3 = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex1 = mockExecutionVertex(triggerAttemptID1); ExecutionVertex triggerVertex2 = mockExecutionVertex(triggerAttemptID2); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); ExecutionVertex ackVertex3 = mockExecutionVertex(ackAttemptID3); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration.builder().setMaxConcurrentCheckpoints(Integer.MAX_VALUE).build()) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex1, triggerVertex2 }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2, ackVertex3 }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(10)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture1.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending1 = coord.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointId(); verify(triggerVertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId1), any(Long.class), any(CheckpointOptions.class)); verify(triggerVertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId1), any(Long.class), any(CheckpointOptions.class)); OperatorID opID1 = OperatorID.fromJobVertexID(ackVertex1.getJobvertexId()); OperatorID opID2 = OperatorID.fromJobVertexID(ackVertex2.getJobvertexId()); OperatorID opID3 = OperatorID.fromJobVertexID(ackVertex3.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStates11 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates12 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates13 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState11 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState12 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState13 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates11.putSubtaskStateByOperatorID(opID1, subtaskState11); taskOperatorSubtaskStates12.putSubtaskStateByOperatorID(opID2, subtaskState12); taskOperatorSubtaskStates13.putSubtaskStateByOperatorID(opID3, subtaskState13); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID2, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates12), TASK_MANAGER_LOCATION_INFO); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture2.isCompletedExceptionally()); assertEquals(2, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = coord.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointId(); TaskStateSnapshot taskOperatorSubtaskStates21 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates22 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates23 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState21 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState22 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState23 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates21.putSubtaskStateByOperatorID(opID1, subtaskState21); taskOperatorSubtaskStates22.putSubtaskStateByOperatorID(opID2, subtaskState22); taskOperatorSubtaskStates23.putSubtaskStateByOperatorID(opID3, subtaskState23); verify(triggerVertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId2), any(Long.class), any(CheckpointOptions.class)); verify(triggerVertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointId2), any(Long.class), any(CheckpointOptions.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID3, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates23), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates21), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates11), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID2, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates22), TASK_MANAGER_LOCATION_INFO); assertTrue(pending1.isDiscarded()); assertTrue(pending2.isDiscarded()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); verify(subtaskState11, times(1)).discardState(); verify(subtaskState12, times(1)).discardState(); verify(subtaskState21, never()).discardState(); verify(subtaskState22, never()).discardState(); verify(subtaskState23, never()).discardState(); List<CompletedCheckpoint> scs = coord.getSuccessfulCheckpoints(); CompletedCheckpoint success = scs.get(0); assertEquals(checkpointId2, success.getCheckpointID()); assertEquals(jid, success.getJobId()); assertEquals(3, success.getOperatorStates().size()); verify(commitVertex.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId2), any(Long.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID3, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates13), TASK_MANAGER_LOCATION_INFO); verify(subtaskState13, times(1)).discardState(); coord.shutdown(JobStatus.FINISHED); verify(subtaskState21, times(1)).discardState(); verify(subtaskState22, times(1)).discardState(); verify(subtaskState23, times(1)).discardState(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCheckpointTimeoutIsolated() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2 }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); PendingCheckpoint checkpoint = coord.getPendingCheckpoints().values().iterator().next(); assertFalse(checkpoint.isDiscarded()); OperatorID opID1 = OperatorID.fromJobVertexID(ackVertex1.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStates1 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, checkpoint.getCheckpointId(), new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); manuallyTriggeredScheduledExecutor.triggerScheduledTasks(); assertTrue("Checkpoint was not canceled by the timeout", checkpoint.isDiscarded()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); verify(subtaskState1, times(1)).discardState(); verify(commitVertex.getCurrentExecutionAttempt(), times(0)).notifyCheckpointComplete(anyLong(), anyLong()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testHandleMessagesForNonExistingCheckpoints() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2 }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); long checkpointId = coord.getPendingCheckpoints().keySet().iterator().next(); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(new JobID(), ackAttemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID1, 1L), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, new ExecutionAttemptID(), checkpointId), TASK_MANAGER_LOCATION_INFO); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Tests that late acknowledge checkpoint messages are properly cleaned up. Furthermore it tests * that unknown checkpoint messages for the same job a are cleaned up as well. In contrast * checkpointing messages from other jobs should not be touched. A late acknowledge * message is an acknowledge message which arrives after the checkpoint has been declined. * * @throws Exception */ @Test public void testStateCleanupForLateOrUnknownMessages() throws Exception { final JobID jobId = new JobID(); final ExecutionAttemptID triggerAttemptId = new ExecutionAttemptID(); final ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptId); final ExecutionAttemptID ackAttemptId1 = new ExecutionAttemptID(); final ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptId1); final ExecutionAttemptID ackAttemptId2 = new ExecutionAttemptID(); final ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptId2); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jobId) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] {triggerVertex, ackVertex1, ackVertex2}) .setTasksToCommitTo(new ExecutionVertex[0]) .setTimer(manuallyTriggeredScheduledExecutor) .build(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); PendingCheckpoint pendingCheckpoint = coord.getPendingCheckpoints().values().iterator().next(); long checkpointId = pendingCheckpoint.getCheckpointId(); OperatorID opIDtrigger = OperatorID.fromJobVertexID(triggerVertex.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStatesTrigger = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskStateTrigger = mock(OperatorSubtaskState.class); taskOperatorSubtaskStatesTrigger.putSubtaskStateByOperatorID(opIDtrigger, subtaskStateTrigger); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, triggerAttemptId, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStatesTrigger), TASK_MANAGER_LOCATION_INFO); verify(subtaskStateTrigger, never()).discardState(); TaskStateSnapshot unknownSubtaskState = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), unknownSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(unknownSubtaskState, times(1)).discardState(); TaskStateSnapshot differentJobSubtaskState = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(new JobID(), new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(differentJobSubtaskState, never()).discardState(); TaskStateSnapshot triggerSubtaskState = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, triggerAttemptId, checkpointId, new CheckpointMetrics(), triggerSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(triggerSubtaskState, never()).discardState(); reset(subtaskStateTrigger); coord.receiveDeclineMessage(new DeclineCheckpoint(jobId, ackAttemptId1, checkpointId), TASK_MANAGER_LOCATION_INFO); assertTrue(pendingCheckpoint.isDiscarded()); verify(subtaskStateTrigger, times(1)).discardState(); TaskStateSnapshot ackSubtaskState = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, ackAttemptId2, checkpointId, new CheckpointMetrics(), ackSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(ackSubtaskState, times(1)).discardState(); reset(differentJobSubtaskState); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(new JobID(), new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(differentJobSubtaskState, never()).discardState(); TaskStateSnapshot unknownSubtaskState2 = mock(TaskStateSnapshot.class); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), unknownSubtaskState2), TASK_MANAGER_LOCATION_INFO); verify(unknownSubtaskState2, times(1)).discardState(); } @Test public void testMaxConcurrentAttempts1() { testMaxConcurrentAttempts(1); } @Test public void testMaxConcurrentAttempts2() { testMaxConcurrentAttempts(2); } @Test public void testMaxConcurrentAttempts5() { testMaxConcurrentAttempts(5); } @Test public void testTriggerAndConfirmSimpleSavepoint() throws Exception { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); CheckpointCoordinator coord = getCheckpointCoordinator(jid, vertex1, vertex2); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints()); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepointFuture = coord.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(savepointFuture.isDone()); assertEquals(1, coord.getNumberOfPendingCheckpoints()); long checkpointId = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint pending = coord.getPendingCheckpoints().get(checkpointId); assertNotNull(pending); assertEquals(checkpointId, pending.getCheckpointId()); assertEquals(jid, pending.getJobId()); assertEquals(2, pending.getNumberOfNonAcknowledgedTasks()); assertEquals(0, pending.getNumberOfAcknowledgedTasks()); assertEquals(0, pending.getOperatorStates().size()); assertFalse(pending.isDiscarded()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(pending.canBeSubsumed()); OperatorID opID1 = OperatorID.fromJobVertexID(vertex1.getJobvertexId()); OperatorID opID2 = OperatorID.fromJobVertexID(vertex2.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStates1 = mock(TaskStateSnapshot.class); TaskStateSnapshot taskOperatorSubtaskStates2 = mock(TaskStateSnapshot.class); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); when(taskOperatorSubtaskStates1.getSubtaskStateByOperatorID(opID1)).thenReturn(subtaskState1); when(taskOperatorSubtaskStates2.getSubtaskStateByOperatorID(opID2)).thenReturn(subtaskState2); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint(jid, attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertEquals(1, pending.getNumberOfAcknowledgedTasks()); assertEquals(1, pending.getNumberOfNonAcknowledgedTasks()); assertFalse(pending.isDiscarded()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(savepointFuture.isDone()); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertFalse(pending.isDiscarded()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(savepointFuture.isDone()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); assertTrue(pending.isDiscarded()); assertNotNull(savepointFuture.get()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, coord.getNumberOfPendingCheckpoints()); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId), any(Long.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointId), any(Long.class)); } { verify(subtaskState1, times(1)).registerSharedStates(any(SharedStateRegistry.class)); verify(subtaskState2, times(1)).registerSharedStates(any(SharedStateRegistry.class)); } CompletedCheckpoint success = coord.getSuccessfulCheckpoints().get(0); assertEquals(jid, success.getJobId()); assertEquals(pending.getCheckpointId(), success.getCheckpointID()); assertEquals(2, success.getOperatorStates().size()); savepointFuture = coord.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(savepointFuture.isDone()); long checkpointIdNew = coord.getPendingCheckpoints().entrySet().iterator().next().getKey(); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); CompletedCheckpoint successNew = coord.getSuccessfulCheckpoints().get(0); assertEquals(jid, successNew.getJobId()); assertEquals(checkpointIdNew, successNew.getCheckpointID()); assertTrue(successNew.getOperatorStates().isEmpty()); assertNotNull(savepointFuture.get()); verify(subtaskState1, never()).discardState(); verify(subtaskState2, never()).discardState(); { verify(vertex1.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointIdNew), any(Long.class), any(CheckpointOptions.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).triggerCheckpoint(eq(checkpointIdNew), any(Long.class), any(CheckpointOptions.class)); verify(vertex1.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointIdNew), any(Long.class)); verify(vertex2.getCurrentExecutionAttempt(), times(1)).notifyCheckpointComplete(eq(checkpointIdNew), any(Long.class)); } coord.shutdown(JobStatus.FINISHED); } /** * Triggers a savepoint and two checkpoints. The second checkpoint completes * and subsumes the first checkpoint, but not the first savepoint. Then we * trigger another checkpoint and savepoint. The 2nd savepoint completes and * subsumes the last checkpoint, but not the first savepoint. */ @Test public void testSavepointsAreNotSubsumed() throws Exception { final JobID jid = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); StandaloneCheckpointIDCounter counter = new StandaloneCheckpointIDCounter(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration.builder().setMaxConcurrentCheckpoints(Integer.MAX_VALUE).build()) .setTasks(new ExecutionVertex[]{ vertex1, vertex2 }) .setCheckpointIDCounter(counter) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(10)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepointFuture1 = coord.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId1 = counter.getLast(); assertEquals(1, coord.getNumberOfPendingCheckpoints()); CompletableFuture<CompletedCheckpoint> checkpointFuture1 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(2, coord.getNumberOfPendingCheckpoints()); assertFalse(checkpointFuture1.isCompletedExceptionally()); CompletableFuture<CompletedCheckpoint> checkpointFuture2 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture2.isCompletedExceptionally()); long checkpointId2 = counter.getLast(); assertEquals(3, coord.getNumberOfPendingCheckpoints()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(1, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertFalse(coord.getPendingCheckpoints().get(savepointId1).isDiscarded()); assertFalse(savepointFuture1.isDone()); CompletableFuture<CompletedCheckpoint> checkpointFuture3 = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture3.isCompletedExceptionally()); assertEquals(2, coord.getNumberOfPendingCheckpoints()); CompletableFuture<CompletedCheckpoint> savepointFuture2 = coord.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId2 = counter.getLast(); assertFalse(savepointFuture2.isCompletedExceptionally()); assertEquals(3, coord.getNumberOfPendingCheckpoints()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, savepointId2), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, savepointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(1, coord.getNumberOfPendingCheckpoints()); assertEquals(2, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertFalse(coord.getPendingCheckpoints().get(savepointId1).isDiscarded()); assertFalse(savepointFuture1.isDone()); assertNotNull(savepointFuture2.get()); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, savepointId1), TASK_MANAGER_LOCATION_INFO); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, savepointId1), TASK_MANAGER_LOCATION_INFO); assertEquals(0, coord.getNumberOfPendingCheckpoints()); assertEquals(3, coord.getNumberOfRetainedSuccessfulCheckpoints()); assertNotNull(savepointFuture1.get()); } private void testMaxConcurrentAttempts(int maxConcurrentAttempts) { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex = mockExecutionVertex(ackAttemptID); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); final AtomicInteger numCalls = new AtomicInteger(); final Execution execution = triggerVertex.getCurrentExecutionAttempt(); doAnswer(invocation -> { numCalls.incrementAndGet(); return null; }).when(execution).triggerCheckpoint(anyLong(), anyLong(), any(CheckpointOptions.class)); doAnswer(invocation -> { numCalls.incrementAndGet(); return null; }).when(execution).notifyCheckpointComplete(anyLong(), anyLong()); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0L) .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); coord.startCheckpointScheduler(); for (int i = 0; i < maxConcurrentAttempts; i++) { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } assertEquals(maxConcurrentAttempts, numCalls.get()); verify(triggerVertex.getCurrentExecutionAttempt(), times(maxConcurrentAttempts)) .triggerCheckpoint(anyLong(), anyLong(), any(CheckpointOptions.class)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID, 1L), TASK_MANAGER_LOCATION_INFO); final Collection<ScheduledFuture<?>> periodicScheduledTasks = manuallyTriggeredScheduledExecutor.getPeriodicScheduledTask(); assertEquals(1, periodicScheduledTasks.size()); final ScheduledFuture scheduledFuture = periodicScheduledTasks.iterator().next(); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(maxConcurrentAttempts + 1, numCalls.get()); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(maxConcurrentAttempts + 1, numCalls.get()); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testMaxConcurrentAttempsWithSubsumption() { try { final int maxConcurrentAttempts = 2; final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex = mockExecutionVertex(ackAttemptID); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0L) .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); coord.startCheckpointScheduler(); do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (coord.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); assertEquals(maxConcurrentAttempts, coord.getNumberOfPendingCheckpoints()); assertNotNull(coord.getPendingCheckpoints().get(1L)); assertNotNull(coord.getPendingCheckpoints().get(2L)); coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, ackAttemptID, 2L), TASK_MANAGER_LOCATION_INFO); do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (coord.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); assertEquals(maxConcurrentAttempts, coord.getNumberOfPendingCheckpoints()); assertNotNull(coord.getPendingCheckpoints().get(3L)); assertNotNull(coord.getPendingCheckpoints().get(4L)); coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testPeriodicSchedulingWithInactiveTasks() { try { final JobID jid = new JobID(); final ExecutionAttemptID triggerAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID = new ExecutionAttemptID(); final ExecutionAttemptID commitAttemptID = new ExecutionAttemptID(); ExecutionVertex triggerVertex = mockExecutionVertex(triggerAttemptID); ExecutionVertex ackVertex = mockExecutionVertex(ackAttemptID); ExecutionVertex commitVertex = mockExecutionVertex(commitAttemptID); final AtomicReference<ExecutionState> currentState = new AtomicReference<>(ExecutionState.CREATED); when(triggerVertex.getCurrentExecutionAttempt().getState()).thenAnswer(invocation -> currentState.get()); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0) .setMaxConcurrentCheckpoints(2) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasksToTrigger(new ExecutionVertex[] { triggerVertex }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex }) .setTasksToCommitTo(new ExecutionVertex[] { commitVertex }) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); coord.startCheckpointScheduler(); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(0, coord.getNumberOfPendingCheckpoints()); currentState.set(ExecutionState.RUNNING); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(coord.getNumberOfPendingCheckpoints() > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Tests that the savepoints can be triggered concurrently. */ @Test public void testConcurrentSavepoints() throws Exception { JobID jobId = new JobID(); int numSavepoints = 5; final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); StandaloneCheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jobId) .setCheckpointCoordinatorConfiguration(chkConfig) .setTasks(new ExecutionVertex[] { vertex1 }) .setCheckpointIDCounter(checkpointIDCounter) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); List<CompletableFuture<CompletedCheckpoint>> savepointFutures = new ArrayList<>(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); for (int i = 0; i < numSavepoints; i++) { savepointFutures.add(coord.triggerSavepoint(savepointDir)); } for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertFalse(savepointFuture.isDone()); } manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointId = checkpointIDCounter.getLast(); for (int i = 0; i < numSavepoints; i++, checkpointId--) { coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jobId, attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); } for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertNotNull(savepointFuture.get()); } } /** * Tests that no minimum delay between savepoints is enforced. */ @Test public void testMinDelayBetweenSavepoints() throws Exception { CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setMinPauseBetweenCheckpoints(100000000L) .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepoint0 = coord.triggerSavepoint(savepointDir); assertFalse("Did not trigger savepoint", savepoint0.isDone()); CompletableFuture<CompletedCheckpoint> savepoint1 = coord.triggerSavepoint(savepointDir); assertFalse("Did not trigger savepoint", savepoint1.isDone()); } /** * Tests that the externalized checkpoint configuration is respected. */ @Test public void testExternalizedCheckpoints() throws Exception { try { CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfigurationBuilder() .setCheckpointRetentionPolicy(CheckpointRetentionPolicy.RETAIN_ON_FAILURE) .build(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setTimer(manuallyTriggeredScheduledExecutor) .build(); CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); for (PendingCheckpoint checkpoint : coord.getPendingCheckpoints().values()) { CheckpointProperties props = checkpoint.getProps(); CheckpointProperties expected = CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.RETAIN_ON_FAILURE); assertEquals(expected, props); } coord.shutdown(JobStatus.FINISHED); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testCreateKeyGroupPartitions() { testCreateKeyGroupPartitions(1, 1); testCreateKeyGroupPartitions(13, 1); testCreateKeyGroupPartitions(13, 2); testCreateKeyGroupPartitions(Short.MAX_VALUE, 1); testCreateKeyGroupPartitions(Short.MAX_VALUE, 13); testCreateKeyGroupPartitions(Short.MAX_VALUE, Short.MAX_VALUE); Random r = new Random(1234); for (int k = 0; k < 1000; ++k) { int maxParallelism = 1 + r.nextInt(Short.MAX_VALUE - 1); int parallelism = 1 + r.nextInt(maxParallelism); testCreateKeyGroupPartitions(maxParallelism, parallelism); } } private void testCreateKeyGroupPartitions(int maxParallelism, int parallelism) { List<KeyGroupRange> ranges = StateAssignmentOperation.createKeyGroupPartitions(maxParallelism, parallelism); for (int i = 0; i < maxParallelism; ++i) { KeyGroupRange range = ranges.get(KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, parallelism, i)); if (!range.contains(i)) { Assert.fail("Could not find expected key-group " + i + " in range " + range); } } } @Test public void testPartitionableStateRepartitioning() { Random r = new Random(42); for (int run = 0; run < 10000; ++run) { int oldParallelism = 1 + r.nextInt(9); int newParallelism = 1 + r.nextInt(9); int numNamedStates = 1 + r.nextInt(9); int maxPartitionsPerState = 1 + r.nextInt(9); doTestPartitionableStateRepartitioning( r, oldParallelism, newParallelism, numNamedStates, maxPartitionsPerState); } } private void doTestPartitionableStateRepartitioning( Random r, int oldParallelism, int newParallelism, int numNamedStates, int maxPartitionsPerState) { List<List<OperatorStateHandle>> previousParallelOpInstanceStates = new ArrayList<>(oldParallelism); for (int i = 0; i < oldParallelism; ++i) { Path fakePath = new Path("/fake-" + i); Map<String, OperatorStateHandle.StateMetaInfo> namedStatesToOffsets = new HashMap<>(); int off = 0; for (int s = 0; s < numNamedStates - 1; ++s) { long[] offs = new long[1 + r.nextInt(maxPartitionsPerState)]; for (int o = 0; o < offs.length; ++o) { offs[o] = off; ++off; } OperatorStateHandle.Mode mode = r.nextInt(10) == 0 ? OperatorStateHandle.Mode.UNION : OperatorStateHandle.Mode.SPLIT_DISTRIBUTE; namedStatesToOffsets.put( "State-" + s, new OperatorStateHandle.StateMetaInfo(offs, mode)); } if (numNamedStates % 2 == 0) { long[] offs = {off + 1, off + 2, off + 3, off + 4}; namedStatesToOffsets.put( "State-" + (numNamedStates - 1), new OperatorStateHandle.StateMetaInfo(offs, OperatorStateHandle.Mode.BROADCAST)); } previousParallelOpInstanceStates.add( Collections.singletonList(new OperatorStreamStateHandle(namedStatesToOffsets, new FileStateHandle(fakePath, -1)))); } Map<StreamStateHandle, Map<String, List<Long>>> expected = new HashMap<>(); int taskIndex = 0; int expectedTotalPartitions = 0; for (List<OperatorStateHandle> previousParallelOpInstanceState : previousParallelOpInstanceStates) { Assert.assertEquals(1, previousParallelOpInstanceState.size()); for (OperatorStateHandle psh : previousParallelOpInstanceState) { Map<String, OperatorStateHandle.StateMetaInfo> offsMap = psh.getStateNameToPartitionOffsets(); Map<String, List<Long>> offsMapWithList = new HashMap<>(offsMap.size()); for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> e : offsMap.entrySet()) { long[] offs = e.getValue().getOffsets(); int replication; switch (e.getValue().getDistributionMode()) { case UNION: replication = newParallelism; break; case BROADCAST: int extra = taskIndex < (newParallelism % oldParallelism) ? 1 : 0; replication = newParallelism / oldParallelism + extra; break; case SPLIT_DISTRIBUTE: replication = 1; break; default: throw new RuntimeException("Unknown distribution mode " + e.getValue().getDistributionMode()); } if (replication > 0) { expectedTotalPartitions += replication * offs.length; List<Long> offsList = new ArrayList<>(offs.length); for (long off : offs) { for (int p = 0; p < replication; ++p) { offsList.add(off); } } offsMapWithList.put(e.getKey(), offsList); } } if (!offsMapWithList.isEmpty()) { expected.put(psh.getDelegateStateHandle(), offsMapWithList); } taskIndex++; } } OperatorStateRepartitioner repartitioner = RoundRobinOperatorStateRepartitioner.INSTANCE; List<List<OperatorStateHandle>> pshs = repartitioner.repartitionState(previousParallelOpInstanceStates, oldParallelism, newParallelism); Map<StreamStateHandle, Map<String, List<Long>>> actual = new HashMap<>(); int minCount = Integer.MAX_VALUE; int maxCount = 0; int actualTotalPartitions = 0; for (int p = 0; p < newParallelism; ++p) { int partitionCount = 0; Collection<OperatorStateHandle> pshc = pshs.get(p); for (OperatorStateHandle sh : pshc) { for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> namedState : sh.getStateNameToPartitionOffsets().entrySet()) { Map<String, List<Long>> stateToOffsets = actual.get(sh.getDelegateStateHandle()); if (stateToOffsets == null) { stateToOffsets = new HashMap<>(); actual.put(sh.getDelegateStateHandle(), stateToOffsets); } List<Long> actualOffs = stateToOffsets.get(namedState.getKey()); if (actualOffs == null) { actualOffs = new ArrayList<>(); stateToOffsets.put(namedState.getKey(), actualOffs); } long[] add = namedState.getValue().getOffsets(); for (long l : add) { actualOffs.add(l); } partitionCount += namedState.getValue().getOffsets().length; } } minCount = Math.min(minCount, partitionCount); maxCount = Math.max(maxCount, partitionCount); actualTotalPartitions += partitionCount; } for (Map<String, List<Long>> v : actual.values()) { for (List<Long> l : v.values()) { Collections.sort(l); } } if (oldParallelism != newParallelism) { int maxLoadDiff = maxCount - minCount; Assert.assertTrue("Difference in partition load is > 1 : " + maxLoadDiff, maxLoadDiff <= 1); } Assert.assertEquals(expectedTotalPartitions, actualTotalPartitions); Assert.assertEquals(expected, actual); } /** * Tests that the pending checkpoint stats callbacks are created. */ @Test public void testCheckpointStatsTrackerPendingCheckpointCallback() throws Exception { CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .build(); CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); coord.setCheckpointStatsTracker(tracker); when(tracker.reportPendingCheckpoint(anyLong(), anyLong(), any(CheckpointProperties.class))) .thenReturn(mock(PendingCheckpointStats.class)); CompletableFuture<CompletedCheckpoint> checkpointFuture = coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isCompletedExceptionally()); verify(tracker, times(1)) .reportPendingCheckpoint(eq(1L), any(Long.class), eq(CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION))); } /** * Tests that the restore callbacks are called if registered. */ @Test public void testCheckpointStatsTrackerRestoreCallback() throws Exception { StandaloneCompletedCheckpointStore store = new StandaloneCompletedCheckpointStore(1); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setCompletedCheckpointStore(store) .setTimer(manuallyTriggeredScheduledExecutor) .build(); store.addCheckpoint(new CompletedCheckpoint( new JobID(), 0, 0, 0, Collections.<OperatorID, OperatorState>emptyMap(), Collections.<MasterState>emptyList(), CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new TestCompletedCheckpointStorageLocation())); CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); coord.setCheckpointStatsTracker(tracker); assertTrue(coord.restoreLatestCheckpointedStateToAll(Collections.emptySet(), true)); verify(tracker, times(1)) .reportRestoredCheckpoint(any(RestoredCheckpointStats.class)); } @Test public void testSharedStateRegistrationOnRestore() throws Exception { final JobID jid = new JobID(); final JobVertexID jobVertexID1 = new JobVertexID(); int parallelism1 = 2; int maxParallelism1 = 4; final ExecutionJobVertex jobVertex1 = mockExecutionJobVertex( jobVertexID1, parallelism1, maxParallelism1); List<ExecutionVertex> allExecutionVertices = new ArrayList<>(parallelism1); allExecutionVertices.addAll(Arrays.asList(jobVertex1.getTaskVertices())); ExecutionVertex[] arrayExecutionVertices = allExecutionVertices.toArray(new ExecutionVertex[allExecutionVertices.size()]); RecoverableCompletedCheckpointStore store = new RecoverableCompletedCheckpointStore(10); final List<SharedStateRegistry> createdSharedStateRegistries = new ArrayList<>(2); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setJobId(jid) .setTasks(arrayExecutionVertices) .setCompletedCheckpointStore(store) .setTimer(manuallyTriggeredScheduledExecutor) .setSharedStateRegistryFactory( deleteExecutor -> { SharedStateRegistry instance = new SharedStateRegistry(deleteExecutor); createdSharedStateRegistries.add(instance); return instance; }) .build(); final int numCheckpoints = 3; List<KeyGroupRange> keyGroupPartitions1 = StateAssignmentOperation.createKeyGroupPartitions(maxParallelism1, parallelism1); for (int i = 0; i < numCheckpoints; ++i) { performIncrementalCheckpoint(jid, coord, jobVertex1, keyGroupPartitions1, i); } List<CompletedCheckpoint> completedCheckpoints = coord.getSuccessfulCheckpoints(); assertEquals(numCheckpoints, completedCheckpoints.size()); int sharedHandleCount = 0; List<Map<StateHandleID, StreamStateHandle>> sharedHandlesByCheckpoint = new ArrayList<>(numCheckpoints); for (int i = 0; i < numCheckpoints; ++i) { sharedHandlesByCheckpoint.add(new HashMap<>(2)); } int cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { verify(keyedStateHandle, times(1)).registerSharedStates(createdSharedStateRegistries.get(0)); IncrementalRemoteKeyedStateHandle incrementalKeyedStateHandle = (IncrementalRemoteKeyedStateHandle) keyedStateHandle; sharedHandlesByCheckpoint.get(cp).putAll(incrementalKeyedStateHandle.getSharedState()); for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getSharedState().values()) { assertTrue(!(streamStateHandle instanceof PlaceholderStreamStateHandle)); verify(streamStateHandle, never()).discardState(); ++sharedHandleCount; } for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getPrivateState().values()) { verify(streamStateHandle, never()).discardState(); } verify(incrementalKeyedStateHandle.getMetaStateHandle(), never()).discardState(); } verify(subtaskState, never()).discardState(); } } ++cp; } assertEquals(10, sharedHandleCount); store.removeOldestCheckpoint(); for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (StreamStateHandle streamStateHandle : cpList.values()) { verify(streamStateHandle, never()).discardState(); } } store.shutdown(JobStatus.SUSPENDED); Set<ExecutionJobVertex> tasks = new HashSet<>(); tasks.add(jobVertex1); assertTrue(coord.restoreLatestCheckpointedStateToAll(tasks, false)); cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { VerificationMode verificationMode; if (cp > 0) { verificationMode = times(1); } else { verificationMode = never(); } verify(keyedStateHandle, verificationMode).registerSharedStates(createdSharedStateRegistries.get(1)); } } } ++cp; } store.removeOldestCheckpoint(); for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (Map.Entry<StateHandleID, StreamStateHandle> entry : cpList.entrySet()) { String key = entry.getKey().getKeyString(); int belongToCP = Integer.parseInt(String.valueOf(key.charAt(key.length() - 1))); if (belongToCP == 0) { verify(entry.getValue(), times(1)).discardState(); } else { verify(entry.getValue(), never()).discardState(); } } } store.removeOldestCheckpoint(); for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (StreamStateHandle streamStateHandle : cpList.values()) { verify(streamStateHandle, times(1)).discardState(); } } } @Test public void jobFailsIfInFlightSynchronousSavepointIsDiscarded() throws Exception { final Tuple2<Integer, Throwable> invocationCounterAndException = Tuple2.of(0, null); final Throwable expectedRootCause = new IOException("Custom-Exception"); final JobID jobId = new JobID(); final ExecutionAttemptID attemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID attemptID2 = new ExecutionAttemptID(); final ExecutionVertex vertex1 = mockExecutionVertex(attemptID1); final ExecutionVertex vertex2 = mockExecutionVertex(attemptID2); final CheckpointCoordinator coordinator = getCheckpointCoordinator(jobId, vertex1, vertex2, new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { invocationCounterAndException.f0 += 1; invocationCounterAndException.f1 = cause; } @Override public void failJobDueToTaskFailure(Throwable cause, ExecutionAttemptID failingTask) { throw new AssertionError("This method should not be called for the test."); } })); final CompletableFuture<CompletedCheckpoint> savepointFuture = coordinator .triggerSynchronousSavepoint(false, "test-dir"); manuallyTriggeredScheduledExecutor.triggerAll(); final PendingCheckpoint syncSavepoint = declineSynchronousSavepoint(jobId, coordinator, attemptID1, expectedRootCause); assertTrue(syncSavepoint.isDiscarded()); try { savepointFuture.get(); fail("Expected Exception not found."); } catch (ExecutionException e) { final Throwable cause = ExceptionUtils.stripExecutionException(e); assertTrue(cause instanceof CheckpointException); assertEquals(expectedRootCause.getMessage(), cause.getCause().getMessage()); } assertEquals(1L, invocationCounterAndException.f0.intValue()); assertTrue( invocationCounterAndException.f1 instanceof CheckpointException && invocationCounterAndException.f1.getCause().getMessage().equals(expectedRootCause.getMessage())); coordinator.shutdown(JobStatus.FAILING); } /** * Tests that do not trigger checkpoint when stop the coordinator after the eager pre-check. */ @Test public void testTriggerCheckpointAfterCancel() throws Exception { TestingCheckpointIDCounter idCounter = new TestingCheckpointIDCounter(); CheckpointCoordinator coord = new CheckpointCoordinatorBuilder() .setCheckpointIDCounter(idCounter) .setTimer(manuallyTriggeredScheduledExecutor) .build(); idCounter.setOwner(coord); try { coord.startCheckpointScheduler(); final CompletableFuture<CompletedCheckpoint> onCompletionPromise = coord.triggerCheckpoint( CheckpointProperties .forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), null, true, false); manuallyTriggeredScheduledExecutor.triggerAll(); try { onCompletionPromise.get(); fail("should not trigger periodic checkpoint after stop the coordinator."); } catch (ExecutionException e) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(e, CheckpointException.class); assertTrue(checkpointExceptionOptional.isPresent()); assertEquals(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN, checkpointExceptionOptional.get().getCheckpointFailureReason()); } } finally { coord.shutdown(JobStatus.FINISHED); } } @Test public void testSavepointScheduledInUnalignedMode() throws Exception { int maxConcurrentCheckpoints = 1; int checkpointRequestsToSend = 10; int activeRequests = 0; JobID jobId = new JobID(); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration .builder() .setUnalignedCheckpointsEnabled(true) .setMaxConcurrentCheckpoints(maxConcurrentCheckpoints) .build()) .setJobId(jobId) .setTimer(manuallyTriggeredScheduledExecutor) .build(); try { List<Future<?>> checkpointFutures = new ArrayList<>(checkpointRequestsToSend); coordinator.startCheckpointScheduler(); while (activeRequests < checkpointRequestsToSend) { checkpointFutures.add(coordinator.triggerCheckpoint(true)); activeRequests++; } assertEquals(activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); Future<?> savepointFuture = coordinator.triggerSavepoint("/tmp"); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(++activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); coordinator.receiveDeclineMessage(new DeclineCheckpoint(jobId, new ExecutionAttemptID(), 1L), "none"); manuallyTriggeredScheduledExecutor.triggerAll(); activeRequests--; assertEquals(activeRequests - maxConcurrentCheckpoints , coordinator.getNumQueuedRequests()); assertEquals(1, checkpointFutures.stream().filter(Future::isDone).count()); assertFalse(savepointFuture.isDone()); assertEquals(maxConcurrentCheckpoints, coordinator.getNumberOfPendingCheckpoints()); CheckpointProperties props = coordinator.getPendingCheckpoints().values().iterator().next().getProps(); assertTrue(props.isSavepoint()); assertFalse(props.forceCheckpoint()); } finally { coordinator.shutdown(JobStatus.FINISHED); } } private CheckpointCoordinator getCheckpointCoordinator( JobID jobId, ExecutionVertex vertex1, ExecutionVertex vertex2) { return new CheckpointCoordinatorBuilder() .setJobId(jobId) .setTasks(new ExecutionVertex[]{ vertex1, vertex2 }) .setCheckpointCoordinatorConfiguration(CheckpointCoordinatorConfiguration.builder().setMaxConcurrentCheckpoints(Integer.MAX_VALUE).build()) .setTimer(manuallyTriggeredScheduledExecutor) .build(); } private CheckpointCoordinator getCheckpointCoordinator( JobID jobId, ExecutionVertex vertex1, ExecutionVertex vertex2, CheckpointFailureManager failureManager) { return new CheckpointCoordinatorBuilder() .setJobId(jobId) .setTasks(new ExecutionVertex[]{ vertex1, vertex2 }) .setTimer(manuallyTriggeredScheduledExecutor) .setFailureManager(failureManager) .build(); } private CheckpointCoordinator getCheckpointCoordinator() { final ExecutionAttemptID triggerAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID triggerAttemptID2 = new ExecutionAttemptID(); ExecutionVertex triggerVertex1 = mockExecutionVertex(triggerAttemptID1); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionVertex triggerVertex2 = mockExecutionVertex( triggerAttemptID2, jobVertexID2, Collections.singletonList(OperatorID.fromJobVertexID(jobVertexID2)), 1, 1, ExecutionState.FINISHED); final ExecutionAttemptID ackAttemptID1 = new ExecutionAttemptID(); final ExecutionAttemptID ackAttemptID2 = new ExecutionAttemptID(); ExecutionVertex ackVertex1 = mockExecutionVertex(ackAttemptID1); ExecutionVertex ackVertex2 = mockExecutionVertex(ackAttemptID2); return new CheckpointCoordinatorBuilder() .setTasksToTrigger(new ExecutionVertex[] { triggerVertex1, triggerVertex2 }) .setTasksToWaitFor(new ExecutionVertex[] { ackVertex1, ackVertex2 }) .setTasksToCommitTo(new ExecutionVertex[] {}) .setTimer(manuallyTriggeredScheduledExecutor) .build(); } private CheckpointFailureManager getCheckpointFailureManager(String errorMsg) { return new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { throw new RuntimeException(errorMsg); } @Override public void failJobDueToTaskFailure(Throwable cause, ExecutionAttemptID failingTask) { throw new RuntimeException(errorMsg); } }); } private PendingCheckpoint declineSynchronousSavepoint( final JobID jobId, final CheckpointCoordinator coordinator, final ExecutionAttemptID attemptID, final Throwable reason) { final long checkpointId = coordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); final PendingCheckpoint checkpoint = coordinator.getPendingCheckpoints().get(checkpointId); coordinator.receiveDeclineMessage(new DeclineCheckpoint(jobId, attemptID, checkpointId, reason), TASK_MANAGER_LOCATION_INFO); return checkpoint; } private void performIncrementalCheckpoint( JobID jid, CheckpointCoordinator coord, ExecutionJobVertex jobVertex1, List<KeyGroupRange> keyGroupPartitions1, int cpSequenceNumber) throws Exception { coord.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(1, coord.getPendingCheckpoints().size()); long checkpointId = Iterables.getOnlyElement(coord.getPendingCheckpoints().keySet()); for (int index = 0; index < jobVertex1.getParallelism(); index++) { KeyGroupRange keyGroupRange = keyGroupPartitions1.get(index); Map<StateHandleID, StreamStateHandle> privateState = new HashMap<>(); privateState.put( new StateHandleID("private-1"), spy(new ByteStreamStateHandle("private-1", new byte[]{'p'}))); Map<StateHandleID, StreamStateHandle> sharedState = new HashMap<>(); if (cpSequenceNumber > 0) { sharedState.put( new StateHandleID("shared-" + (cpSequenceNumber - 1)), spy(new PlaceholderStreamStateHandle())); } sharedState.put( new StateHandleID("shared-" + cpSequenceNumber), spy(new ByteStreamStateHandle("shared-" + cpSequenceNumber + "-" + keyGroupRange, new byte[]{'s'}))); IncrementalRemoteKeyedStateHandle managedState = spy(new IncrementalRemoteKeyedStateHandle( new UUID(42L, 42L), keyGroupRange, checkpointId, sharedState, privateState, spy(new ByteStreamStateHandle("meta", new byte[]{'m'})))); OperatorSubtaskState operatorSubtaskState = spy(new OperatorSubtaskState( StateObjectCollection.empty(), StateObjectCollection.empty(), StateObjectCollection.singleton(managedState), StateObjectCollection.empty())); Map<OperatorID, OperatorSubtaskState> opStates = new HashMap<>(); opStates.put(jobVertex1.getOperatorIDs().get(0).getGeneratedOperatorID(), operatorSubtaskState); TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot(opStates); AcknowledgeCheckpoint acknowledgeCheckpoint = new AcknowledgeCheckpoint( jid, jobVertex1.getTaskVertices()[index].getCurrentExecutionAttempt().getAttemptId(), checkpointId, new CheckpointMetrics(), taskStateSnapshot); coord.receiveAcknowledgeMessage(acknowledgeCheckpoint, TASK_MANAGER_LOCATION_INFO); } } private static class TestingCheckpointIDCounter extends StandaloneCheckpointIDCounter { private CheckpointCoordinator owner; @Override public long getAndIncrement() throws Exception { checkNotNull(owner); owner.stopCheckpointScheduler(); return super.getAndIncrement(); } void setOwner(CheckpointCoordinator coordinator) { this.owner = checkNotNull(coordinator); } } }
? I think it's indented correctly.
private static List<SubtaskCheckpointStatistics> createSubtaskCheckpointStatistics(SubtaskStateStats[] subtaskStateStats, long triggerTimestamp) { final List<SubtaskCheckpointStatistics> result = new ArrayList<>(subtaskStateStats.length); for (int i = 0; i < subtaskStateStats.length; i++) { final SubtaskStateStats subtask = subtaskStateStats[i]; if (subtask == null) { result.add(new SubtaskCheckpointStatistics.PendingSubtaskCheckpointStatistics(i)); } else { result.add(new SubtaskCheckpointStatistics.CompletedSubtaskCheckpointStatistics( i, subtask.getAckTimestamp(), subtask.getEndToEndDuration(triggerTimestamp), subtask.getStateSize(), new SubtaskCheckpointStatistics.CompletedSubtaskCheckpointStatistics.CheckpointDuration( subtask.getSyncCheckpointDuration(), subtask.getAsyncCheckpointDuration()), new SubtaskCheckpointStatistics.CompletedSubtaskCheckpointStatistics.CheckpointAlignment( subtask.getAlignmentBuffered(), subtask.getAlignmentDuration()), subtask.getCheckpointStartDelay() )); } } return result; }
subtask.getCheckpointStartDelay()
private static List<SubtaskCheckpointStatistics> createSubtaskCheckpointStatistics(SubtaskStateStats[] subtaskStateStats, long triggerTimestamp) { final List<SubtaskCheckpointStatistics> result = new ArrayList<>(subtaskStateStats.length); for (int i = 0; i < subtaskStateStats.length; i++) { final SubtaskStateStats subtask = subtaskStateStats[i]; if (subtask == null) { result.add(new SubtaskCheckpointStatistics.PendingSubtaskCheckpointStatistics(i)); } else { result.add(new SubtaskCheckpointStatistics.CompletedSubtaskCheckpointStatistics( i, subtask.getAckTimestamp(), subtask.getEndToEndDuration(triggerTimestamp), subtask.getStateSize(), new SubtaskCheckpointStatistics.CompletedSubtaskCheckpointStatistics.CheckpointDuration( subtask.getSyncCheckpointDuration(), subtask.getAsyncCheckpointDuration()), new SubtaskCheckpointStatistics.CompletedSubtaskCheckpointStatistics.CheckpointAlignment( subtask.getAlignmentBuffered(), subtask.getAlignmentDuration()), subtask.getCheckpointStartDelay() )); } } return result; }
class TaskCheckpointStatisticDetailsHandler extends AbstractCheckpointHandler<TaskCheckpointStatisticsWithSubtaskDetails, TaskCheckpointMessageParameters> implements JsonArchivist { public TaskCheckpointStatisticDetailsHandler( GatewayRetriever<? extends RestfulGateway> leaderRetriever, Time timeout, Map<String, String> responseHeaders, MessageHeaders<EmptyRequestBody, TaskCheckpointStatisticsWithSubtaskDetails, TaskCheckpointMessageParameters> messageHeaders, ExecutionGraphCache executionGraphCache, Executor executor, CheckpointStatsCache checkpointStatsCache) { super( leaderRetriever, timeout, responseHeaders, messageHeaders, executionGraphCache, executor, checkpointStatsCache); } @Override protected TaskCheckpointStatisticsWithSubtaskDetails handleCheckpointRequest( HandlerRequest<EmptyRequestBody, TaskCheckpointMessageParameters> request, AbstractCheckpointStats checkpointStats) throws RestHandlerException { final JobVertexID jobVertexId = request.getPathParameter(JobVertexIdPathParameter.class); final TaskStateStats taskStatistics = checkpointStats.getTaskStateStats(jobVertexId); if (taskStatistics == null) { throw new NotFoundException("There is no checkpoint statistics for task " + jobVertexId + '.'); } return createCheckpointDetails(checkpointStats, taskStatistics); } @Override public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException { CheckpointStatsSnapshot stats = graph.getCheckpointStatsSnapshot(); if (stats == null) { return Collections.emptyList(); } CheckpointStatsHistory history = stats.getHistory(); List<ArchivedJson> archive = new ArrayList<>(history.getCheckpoints().size()); for (AbstractCheckpointStats checkpoint : history.getCheckpoints()) { for (TaskStateStats subtaskStats : checkpoint.getAllTaskStateStats()) { ResponseBody json = createCheckpointDetails(checkpoint, subtaskStats); String path = getMessageHeaders().getTargetRestEndpointURL() .replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString()) .replace(':' + CheckpointIdPathParameter.KEY, String.valueOf(checkpoint.getCheckpointId())) .replace(':' + JobVertexIdPathParameter.KEY, subtaskStats.getJobVertexId().toString()); archive.add(new ArchivedJson(path, json)); } } return archive; } private static TaskCheckpointStatisticsWithSubtaskDetails createCheckpointDetails(AbstractCheckpointStats checkpointStats, TaskStateStats taskStatistics) { final TaskCheckpointStatisticsWithSubtaskDetails.Summary summary = createSummary( taskStatistics.getSummaryStats(), checkpointStats.getTriggerTimestamp()); final List<SubtaskCheckpointStatistics> subtaskCheckpointStatistics = createSubtaskCheckpointStatistics( taskStatistics.getSubtaskStats(), checkpointStats.getTriggerTimestamp()); return new TaskCheckpointStatisticsWithSubtaskDetails( checkpointStats.getCheckpointId(), checkpointStats.getStatus(), taskStatistics.getLatestAckTimestamp(), taskStatistics.getStateSize(), taskStatistics.getEndToEndDuration(checkpointStats.getTriggerTimestamp()), taskStatistics.getAlignmentBuffered(), taskStatistics.getNumberOfSubtasks(), taskStatistics.getNumberOfAcknowledgedSubtasks(), summary, subtaskCheckpointStatistics); } private static TaskCheckpointStatisticsWithSubtaskDetails.Summary createSummary(TaskStateStats.TaskStateStatsSummary taskStatisticsSummary, long triggerTimestamp) { final MinMaxAvgStats ackTSStats = taskStatisticsSummary.getAckTimestampStats(); final TaskCheckpointStatisticsWithSubtaskDetails.CheckpointDuration checkpointDuration = new TaskCheckpointStatisticsWithSubtaskDetails.CheckpointDuration( MinMaxAvgStatistics.createFrom(taskStatisticsSummary.getSyncCheckpointDurationStats()), MinMaxAvgStatistics.createFrom(taskStatisticsSummary.getAsyncCheckpointDurationStats())); final TaskCheckpointStatisticsWithSubtaskDetails.CheckpointAlignment checkpointAlignment = new TaskCheckpointStatisticsWithSubtaskDetails.CheckpointAlignment( MinMaxAvgStatistics.createFrom(taskStatisticsSummary.getAlignmentBufferedStats()), MinMaxAvgStatistics.createFrom(taskStatisticsSummary.getAlignmentDurationStats())); return new TaskCheckpointStatisticsWithSubtaskDetails.Summary( MinMaxAvgStatistics.createFrom(taskStatisticsSummary.getStateSizeStats()), new MinMaxAvgStatistics( Math.max(0L, ackTSStats.getMinimum() - triggerTimestamp), Math.max(0L, ackTSStats.getMaximum() - triggerTimestamp), Math.max(0L, ackTSStats.getAverage() - triggerTimestamp)), checkpointDuration, checkpointAlignment, MinMaxAvgStatistics.createFrom(taskStatisticsSummary.getCheckpointStartDelayStats())); } }
class TaskCheckpointStatisticDetailsHandler extends AbstractCheckpointHandler<TaskCheckpointStatisticsWithSubtaskDetails, TaskCheckpointMessageParameters> implements JsonArchivist { public TaskCheckpointStatisticDetailsHandler( GatewayRetriever<? extends RestfulGateway> leaderRetriever, Time timeout, Map<String, String> responseHeaders, MessageHeaders<EmptyRequestBody, TaskCheckpointStatisticsWithSubtaskDetails, TaskCheckpointMessageParameters> messageHeaders, ExecutionGraphCache executionGraphCache, Executor executor, CheckpointStatsCache checkpointStatsCache) { super( leaderRetriever, timeout, responseHeaders, messageHeaders, executionGraphCache, executor, checkpointStatsCache); } @Override protected TaskCheckpointStatisticsWithSubtaskDetails handleCheckpointRequest( HandlerRequest<EmptyRequestBody, TaskCheckpointMessageParameters> request, AbstractCheckpointStats checkpointStats) throws RestHandlerException { final JobVertexID jobVertexId = request.getPathParameter(JobVertexIdPathParameter.class); final TaskStateStats taskStatistics = checkpointStats.getTaskStateStats(jobVertexId); if (taskStatistics == null) { throw new NotFoundException("There is no checkpoint statistics for task " + jobVertexId + '.'); } return createCheckpointDetails(checkpointStats, taskStatistics); } @Override public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException { CheckpointStatsSnapshot stats = graph.getCheckpointStatsSnapshot(); if (stats == null) { return Collections.emptyList(); } CheckpointStatsHistory history = stats.getHistory(); List<ArchivedJson> archive = new ArrayList<>(history.getCheckpoints().size()); for (AbstractCheckpointStats checkpoint : history.getCheckpoints()) { for (TaskStateStats subtaskStats : checkpoint.getAllTaskStateStats()) { ResponseBody json = createCheckpointDetails(checkpoint, subtaskStats); String path = getMessageHeaders().getTargetRestEndpointURL() .replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString()) .replace(':' + CheckpointIdPathParameter.KEY, String.valueOf(checkpoint.getCheckpointId())) .replace(':' + JobVertexIdPathParameter.KEY, subtaskStats.getJobVertexId().toString()); archive.add(new ArchivedJson(path, json)); } } return archive; } private static TaskCheckpointStatisticsWithSubtaskDetails createCheckpointDetails(AbstractCheckpointStats checkpointStats, TaskStateStats taskStatistics) { final TaskCheckpointStatisticsWithSubtaskDetails.Summary summary = createSummary( taskStatistics.getSummaryStats(), checkpointStats.getTriggerTimestamp()); final List<SubtaskCheckpointStatistics> subtaskCheckpointStatistics = createSubtaskCheckpointStatistics( taskStatistics.getSubtaskStats(), checkpointStats.getTriggerTimestamp()); return new TaskCheckpointStatisticsWithSubtaskDetails( checkpointStats.getCheckpointId(), checkpointStats.getStatus(), taskStatistics.getLatestAckTimestamp(), taskStatistics.getStateSize(), taskStatistics.getEndToEndDuration(checkpointStats.getTriggerTimestamp()), taskStatistics.getAlignmentBuffered(), taskStatistics.getNumberOfSubtasks(), taskStatistics.getNumberOfAcknowledgedSubtasks(), summary, subtaskCheckpointStatistics); } private static TaskCheckpointStatisticsWithSubtaskDetails.Summary createSummary(TaskStateStats.TaskStateStatsSummary taskStatisticsSummary, long triggerTimestamp) { final MinMaxAvgStats ackTSStats = taskStatisticsSummary.getAckTimestampStats(); final TaskCheckpointStatisticsWithSubtaskDetails.CheckpointDuration checkpointDuration = new TaskCheckpointStatisticsWithSubtaskDetails.CheckpointDuration( MinMaxAvgStatistics.valueOf(taskStatisticsSummary.getSyncCheckpointDurationStats()), MinMaxAvgStatistics.valueOf(taskStatisticsSummary.getAsyncCheckpointDurationStats())); final TaskCheckpointStatisticsWithSubtaskDetails.CheckpointAlignment checkpointAlignment = new TaskCheckpointStatisticsWithSubtaskDetails.CheckpointAlignment( MinMaxAvgStatistics.valueOf(taskStatisticsSummary.getAlignmentBufferedStats()), MinMaxAvgStatistics.valueOf(taskStatisticsSummary.getAlignmentDurationStats())); return new TaskCheckpointStatisticsWithSubtaskDetails.Summary( MinMaxAvgStatistics.valueOf(taskStatisticsSummary.getStateSizeStats()), new MinMaxAvgStatistics( Math.max(0L, ackTSStats.getMinimum() - triggerTimestamp), Math.max(0L, ackTSStats.getMaximum() - triggerTimestamp), Math.max(0L, ackTSStats.getAverage() - triggerTimestamp)), checkpointDuration, checkpointAlignment, MinMaxAvgStatistics.valueOf(taskStatisticsSummary.getCheckpointStartDelayStats())); } }
Shoud we print out the info log that we're adding automatically inferred extra packages?
public PCollection<OutputT> expand(PCollection<?> input) { Coder<OutputT> outputCoder; if (this.keyCoder == null) { outputCoder = (Coder<OutputT>) RowCoder.of(schema); } else { outputCoder = (Coder<OutputT>) KvCoder.of(keyCoder, RowCoder.of(schema)); } if (this.extraPackages.isEmpty()) { this.extraPackages.addAll(mayBeInferExtraPackagesFromModelHandler()); } return (PCollection<OutputT>) input.apply( PythonExternalTransform.<PCollection<?>, PCollection<Row>>from( "apache_beam.ml.inference.base.RunInference.from_callable", expansionService) .withKwarg("model_handler_provider", PythonCallableSource.of(modelLoader)) .withOutputCoder(outputCoder) .withExtraPackages(this.extraPackages) .withKwargs(kwargs)); }
this.extraPackages.addAll(mayBeInferExtraPackagesFromModelHandler());
public PCollection<OutputT> expand(PCollection<?> input) { Coder<OutputT> outputCoder; if (this.keyCoder == null) { outputCoder = (Coder<OutputT>) RowCoder.of(schema); } else { outputCoder = (Coder<OutputT>) KvCoder.of(keyCoder, RowCoder.of(schema)); } if (this.extraPackages.isEmpty()) { this.extraPackages.addAll(inferExtraPackagesFromModelHandler()); } return (PCollection<OutputT>) input.apply( PythonExternalTransform.<PCollection<?>, PCollection<Row>>from( "apache_beam.ml.inference.base.RunInference.from_callable", expansionService) .withKwarg("model_handler_provider", PythonCallableSource.of(modelLoader)) .withOutputCoder(outputCoder) .withExtraPackages(this.extraPackages) .withKwargs(kwargs)); }
class object. * @param schema A schema for output rows. * @param keyCoder a {@link Coder}
class object. * @param schema A schema for output rows. * @param keyCoder a {@link Coder}
```suggestion AccessExecutionVertex[] taskVertices = ejv.getTaskVertices(); ``` Could we do this renaming to improve the readability since we're dealing with different types of vertices in this code segment?
public static JobDetails createDetailsForJob(AccessExecutionGraph job) { JobStatus status = job.getState(); long started = job.getStatusTimestamp(JobStatus.INITIALIZING); long finished = status.isGloballyTerminalState() ? job.getStatusTimestamp(status) : -1L; long duration = (finished >= 0L ? finished : System.currentTimeMillis()) - started; int[] countsPerStatus = new int[ExecutionState.values().length]; long lastChanged = 0; int numTotalTasks = 0; for (AccessExecutionJobVertex ejv : job.getVerticesTopologically()) { AccessExecutionVertex[] vertices = ejv.getTaskVertices(); numTotalTasks += vertices.length; for (AccessExecutionVertex vertex : vertices) { ExecutionState state = vertex.getExecutionState(); countsPerStatus[state.ordinal()]++; lastChanged = Math.max(lastChanged, vertex.getStateTimestamp(state)); } } lastChanged = Math.max(lastChanged, finished); return new JobDetails( job.getJobID(), job.getJobName(), started, finished, duration, status, lastChanged, countsPerStatus, numTotalTasks); }
AccessExecutionVertex[] vertices = ejv.getTaskVertices();
public static JobDetails createDetailsForJob(AccessExecutionGraph job) { JobStatus status = job.getState(); long started = job.getStatusTimestamp(JobStatus.INITIALIZING); long finished = status.isGloballyTerminalState() ? job.getStatusTimestamp(status) : -1L; long duration = (finished >= 0L ? finished : System.currentTimeMillis()) - started; int[] countsPerStatus = new int[ExecutionState.values().length]; long lastChanged = 0; int numTotalTasks = 0; for (AccessExecutionJobVertex ejv : job.getVerticesTopologically()) { AccessExecutionVertex[] taskVertices = ejv.getTaskVertices(); numTotalTasks += taskVertices.length; for (AccessExecutionVertex taskVertex : taskVertices) { ExecutionState state = taskVertex.getExecutionState(); countsPerStatus[state.ordinal()]++; lastChanged = Math.max(lastChanged, taskVertex.getStateTimestamp(state)); } } lastChanged = Math.max(lastChanged, finished); return new JobDetails( job.getJobID(), job.getJobName(), started, finished, duration, status, lastChanged, countsPerStatus, numTotalTasks); }
class JobDetails implements Serializable { private static final long serialVersionUID = -3391462110304948766L; private static final String FIELD_NAME_JOB_ID = "jid"; private static final String FIELD_NAME_JOB_NAME = "name"; private static final String FIELD_NAME_START_TIME = "start-time"; private static final String FIELD_NAME_END_TIME = "end-time"; private static final String FIELD_NAME_DURATION = "duration"; private static final String FIELD_NAME_STATUS = "state"; private static final String FIELD_NAME_LAST_MODIFICATION = "last-modification"; private static final String FIELD_NAME_TOTAL_NUMBER_TASKS = "total"; private final JobID jobId; private final String jobName; private final long startTime; private final long endTime; private final long duration; private final JobStatus status; private final long lastUpdateTime; private final int[] tasksPerState; private final int numTasks; public JobDetails( JobID jobId, String jobName, long startTime, long endTime, long duration, JobStatus status, long lastUpdateTime, int[] tasksPerState, int numTasks) { this.jobId = checkNotNull(jobId); this.jobName = checkNotNull(jobName); this.startTime = startTime; this.endTime = endTime; this.duration = duration; this.status = checkNotNull(status); this.lastUpdateTime = lastUpdateTime; Preconditions.checkArgument(tasksPerState.length == ExecutionState.values().length, "tasksPerState argument must be of size %s.", ExecutionState.values().length); this.tasksPerState = checkNotNull(tasksPerState); this.numTasks = numTasks; } public JobID getJobId() { return jobId; } public String getJobName() { return jobName; } public long getStartTime() { return startTime; } public long getEndTime() { return endTime; } public long getDuration() { return duration; } public JobStatus getStatus() { return status; } public long getLastUpdateTime() { return lastUpdateTime; } public int getNumTasks() { return numTasks; } public int[] getTasksPerState() { return tasksPerState; } @Override public boolean equals(Object o) { if (this == o) { return true; } else if (o != null && o.getClass() == JobDetails.class) { JobDetails that = (JobDetails) o; return this.endTime == that.endTime && this.lastUpdateTime == that.lastUpdateTime && this.numTasks == that.numTasks && this.startTime == that.startTime && this.status == that.status && this.jobId.equals(that.jobId) && this.jobName.equals(that.jobName) && Arrays.equals(this.tasksPerState, that.tasksPerState); } else { return false; } } @Override public int hashCode() { int result = jobId.hashCode(); result = 31 * result + jobName.hashCode(); result = 31 * result + (int) (startTime ^ (startTime >>> 32)); result = 31 * result + (int) (endTime ^ (endTime >>> 32)); result = 31 * result + status.hashCode(); result = 31 * result + (int) (lastUpdateTime ^ (lastUpdateTime >>> 32)); result = 31 * result + Arrays.hashCode(tasksPerState); result = 31 * result + numTasks; return result; } @Override public String toString() { return "JobDetails {" + "jobId=" + jobId + ", jobName='" + jobName + '\'' + ", startTime=" + startTime + ", endTime=" + endTime + ", status=" + status + ", lastUpdateTime=" + lastUpdateTime + ", numVerticesPerExecutionState=" + Arrays.toString(tasksPerState) + ", numTasks=" + numTasks + '}'; } public static final class JobDetailsSerializer extends StdSerializer<JobDetails> { private static final long serialVersionUID = 7915913423515194428L; public JobDetailsSerializer() { super(JobDetails.class); } @Override public void serialize( JobDetails jobDetails, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); jsonGenerator.writeStringField(FIELD_NAME_JOB_ID, jobDetails.getJobId().toString()); jsonGenerator.writeStringField(FIELD_NAME_JOB_NAME, jobDetails.getJobName()); jsonGenerator.writeStringField(FIELD_NAME_STATUS, jobDetails.getStatus().name()); jsonGenerator.writeNumberField(FIELD_NAME_START_TIME, jobDetails.getStartTime()); jsonGenerator.writeNumberField(FIELD_NAME_END_TIME, jobDetails.getEndTime()); jsonGenerator.writeNumberField(FIELD_NAME_DURATION, jobDetails.getDuration()); jsonGenerator.writeNumberField(FIELD_NAME_LAST_MODIFICATION, jobDetails.getLastUpdateTime()); jsonGenerator.writeObjectFieldStart("tasks"); jsonGenerator.writeNumberField(FIELD_NAME_TOTAL_NUMBER_TASKS, jobDetails.getNumTasks()); final int[] perState = jobDetails.getTasksPerState(); for (ExecutionState executionState : ExecutionState.values()) { jsonGenerator.writeNumberField(executionState.name().toLowerCase(), perState[executionState.ordinal()]); } jsonGenerator.writeEndObject(); jsonGenerator.writeEndObject(); } } public static final class JobDetailsDeserializer extends StdDeserializer<JobDetails> { private static final long serialVersionUID = 6089784742093294800L; public JobDetailsDeserializer() { super(JobDetails.class); } @Override public JobDetails deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { JsonNode rootNode = jsonParser.readValueAsTree(); JobID jobId = JobID.fromHexString(rootNode.get(FIELD_NAME_JOB_ID).textValue()); String jobName = rootNode.get(FIELD_NAME_JOB_NAME).textValue(); long startTime = rootNode.get(FIELD_NAME_START_TIME).longValue(); long endTime = rootNode.get(FIELD_NAME_END_TIME).longValue(); long duration = rootNode.get(FIELD_NAME_DURATION).longValue(); JobStatus jobStatus = JobStatus.valueOf(rootNode.get(FIELD_NAME_STATUS).textValue()); long lastUpdateTime = rootNode.get(FIELD_NAME_LAST_MODIFICATION).longValue(); JsonNode tasksNode = rootNode.get("tasks"); int numTasks = tasksNode.get(FIELD_NAME_TOTAL_NUMBER_TASKS).intValue(); int[] numVerticesPerExecutionState = new int[ExecutionState.values().length]; for (ExecutionState executionState : ExecutionState.values()) { numVerticesPerExecutionState[executionState.ordinal()] = tasksNode.get(executionState.name().toLowerCase()).intValue(); } return new JobDetails( jobId, jobName, startTime, endTime, duration, jobStatus, lastUpdateTime, numVerticesPerExecutionState, numTasks); } } }
class JobDetails implements Serializable { private static final long serialVersionUID = -3391462110304948766L; private static final String FIELD_NAME_JOB_ID = "jid"; private static final String FIELD_NAME_JOB_NAME = "name"; private static final String FIELD_NAME_START_TIME = "start-time"; private static final String FIELD_NAME_END_TIME = "end-time"; private static final String FIELD_NAME_DURATION = "duration"; private static final String FIELD_NAME_STATUS = "state"; private static final String FIELD_NAME_LAST_MODIFICATION = "last-modification"; private static final String FIELD_NAME_TOTAL_NUMBER_TASKS = "total"; private final JobID jobId; private final String jobName; private final long startTime; private final long endTime; private final long duration; private final JobStatus status; private final long lastUpdateTime; private final int[] tasksPerState; private final int numTasks; public JobDetails( JobID jobId, String jobName, long startTime, long endTime, long duration, JobStatus status, long lastUpdateTime, int[] tasksPerState, int numTasks) { this.jobId = checkNotNull(jobId); this.jobName = checkNotNull(jobName); this.startTime = startTime; this.endTime = endTime; this.duration = duration; this.status = checkNotNull(status); this.lastUpdateTime = lastUpdateTime; Preconditions.checkArgument(tasksPerState.length == ExecutionState.values().length, "tasksPerState argument must be of size %s.", ExecutionState.values().length); this.tasksPerState = checkNotNull(tasksPerState); this.numTasks = numTasks; } public JobID getJobId() { return jobId; } public String getJobName() { return jobName; } public long getStartTime() { return startTime; } public long getEndTime() { return endTime; } public long getDuration() { return duration; } public JobStatus getStatus() { return status; } public long getLastUpdateTime() { return lastUpdateTime; } public int getNumTasks() { return numTasks; } public int[] getTasksPerState() { return tasksPerState; } @Override public boolean equals(Object o) { if (this == o) { return true; } else if (o != null && o.getClass() == JobDetails.class) { JobDetails that = (JobDetails) o; return this.endTime == that.endTime && this.lastUpdateTime == that.lastUpdateTime && this.numTasks == that.numTasks && this.startTime == that.startTime && this.status == that.status && this.jobId.equals(that.jobId) && this.jobName.equals(that.jobName) && Arrays.equals(this.tasksPerState, that.tasksPerState); } else { return false; } } @Override public int hashCode() { int result = jobId.hashCode(); result = 31 * result + jobName.hashCode(); result = 31 * result + (int) (startTime ^ (startTime >>> 32)); result = 31 * result + (int) (endTime ^ (endTime >>> 32)); result = 31 * result + status.hashCode(); result = 31 * result + (int) (lastUpdateTime ^ (lastUpdateTime >>> 32)); result = 31 * result + Arrays.hashCode(tasksPerState); result = 31 * result + numTasks; return result; } @Override public String toString() { return "JobDetails {" + "jobId=" + jobId + ", jobName='" + jobName + '\'' + ", startTime=" + startTime + ", endTime=" + endTime + ", status=" + status + ", lastUpdateTime=" + lastUpdateTime + ", numVerticesPerExecutionState=" + Arrays.toString(tasksPerState) + ", numTasks=" + numTasks + '}'; } public static final class JobDetailsSerializer extends StdSerializer<JobDetails> { private static final long serialVersionUID = 7915913423515194428L; public JobDetailsSerializer() { super(JobDetails.class); } @Override public void serialize( JobDetails jobDetails, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); jsonGenerator.writeStringField(FIELD_NAME_JOB_ID, jobDetails.getJobId().toString()); jsonGenerator.writeStringField(FIELD_NAME_JOB_NAME, jobDetails.getJobName()); jsonGenerator.writeStringField(FIELD_NAME_STATUS, jobDetails.getStatus().name()); jsonGenerator.writeNumberField(FIELD_NAME_START_TIME, jobDetails.getStartTime()); jsonGenerator.writeNumberField(FIELD_NAME_END_TIME, jobDetails.getEndTime()); jsonGenerator.writeNumberField(FIELD_NAME_DURATION, jobDetails.getDuration()); jsonGenerator.writeNumberField(FIELD_NAME_LAST_MODIFICATION, jobDetails.getLastUpdateTime()); jsonGenerator.writeObjectFieldStart("tasks"); jsonGenerator.writeNumberField(FIELD_NAME_TOTAL_NUMBER_TASKS, jobDetails.getNumTasks()); final int[] perState = jobDetails.getTasksPerState(); for (ExecutionState executionState : ExecutionState.values()) { jsonGenerator.writeNumberField(executionState.name().toLowerCase(), perState[executionState.ordinal()]); } jsonGenerator.writeEndObject(); jsonGenerator.writeEndObject(); } } public static final class JobDetailsDeserializer extends StdDeserializer<JobDetails> { private static final long serialVersionUID = 6089784742093294800L; public JobDetailsDeserializer() { super(JobDetails.class); } @Override public JobDetails deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { JsonNode rootNode = jsonParser.readValueAsTree(); JobID jobId = JobID.fromHexString(rootNode.get(FIELD_NAME_JOB_ID).textValue()); String jobName = rootNode.get(FIELD_NAME_JOB_NAME).textValue(); long startTime = rootNode.get(FIELD_NAME_START_TIME).longValue(); long endTime = rootNode.get(FIELD_NAME_END_TIME).longValue(); long duration = rootNode.get(FIELD_NAME_DURATION).longValue(); JobStatus jobStatus = JobStatus.valueOf(rootNode.get(FIELD_NAME_STATUS).textValue()); long lastUpdateTime = rootNode.get(FIELD_NAME_LAST_MODIFICATION).longValue(); JsonNode tasksNode = rootNode.get("tasks"); int numTasks = tasksNode.get(FIELD_NAME_TOTAL_NUMBER_TASKS).intValue(); int[] numVerticesPerExecutionState = new int[ExecutionState.values().length]; for (ExecutionState executionState : ExecutionState.values()) { numVerticesPerExecutionState[executionState.ordinal()] = tasksNode.get(executionState.name().toLowerCase()).intValue(); } return new JobDetails( jobId, jobName, startTime, endTime, duration, jobStatus, lastUpdateTime, numVerticesPerExecutionState, numTasks); } } }
For a ExecutionException, if it's not TIMEOUT, then it's THRIFT_RPC_ERROR. I think the logic is not reasonable.
private RowBatch getNextInternal(Status status, Backend backend) throws TException { long timeoutTs = System.currentTimeMillis() + timeoutMs; RowBatch rowBatch = new RowBatch(); InternalService.PTabletKeyLookupResponse pResult = null; try { Preconditions.checkNotNull(shortCircuitQueryContext.serializedDescTable); InternalService.PTabletKeyLookupRequest.Builder requestBuilder = InternalService.PTabletKeyLookupRequest.newBuilder() .setTabletId(tabletID) .setDescTbl(shortCircuitQueryContext.serializedDescTable) .setOutputExpr(shortCircuitQueryContext.serializedOutputExpr) .setQueryOptions(shortCircuitQueryContext.serializedQueryOptions) .setIsBinaryRow(ConnectContext.get().command == MysqlCommand.COM_STMT_EXECUTE); if (versions != null && !versions.isEmpty()) { requestBuilder.setVersion(versions.get(0)); } if (shortCircuitQueryContext.cacheID != null) { InternalService.UUID.Builder uuidBuilder = InternalService.UUID.newBuilder(); uuidBuilder.setUuidHigh(shortCircuitQueryContext.cacheID.getMostSignificantBits()); uuidBuilder.setUuidLow(shortCircuitQueryContext.cacheID.getLeastSignificantBits()); requestBuilder.setUuid(uuidBuilder); } addKeyTuples(requestBuilder); while (pResult == null) { InternalService.PTabletKeyLookupRequest request = requestBuilder.build(); Future<InternalService.PTabletKeyLookupResponse> futureResponse = BackendServiceProxy.getInstance().fetchTabletDataAsync(backend.getBrpcAddress(), request); long currentTs = System.currentTimeMillis(); if (currentTs >= timeoutTs) { LOG.warn("fetch result timeout {}", backend.getBrpcAddress()); status.updateStatus(TStatusCode.INTERNAL_ERROR, "query timeout"); return null; } try { pResult = futureResponse.get(timeoutTs - currentTs, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { LOG.info("future get interrupted Exception"); if (isCancel) { status.updateStatus(TStatusCode.CANCELLED, "cancelled"); return null; } } catch (TimeoutException e) { futureResponse.cancel(true); LOG.warn("fetch result timeout {}, addr {}", timeoutTs - currentTs, backend.getBrpcAddress()); status.updateStatus(TStatusCode.INTERNAL_ERROR, "query timeout"); return null; } } } catch (RpcException e) { LOG.warn("fetch result rpc exception {}, e {}", backend.getBrpcAddress(), e); status.updateStatus(TStatusCode.THRIFT_RPC_ERROR, e.getMessage()); SimpleScheduler.addToBlacklist(backend.getId(), e.getMessage()); return null; } catch (ExecutionException e) { LOG.warn("fetch result execution exception {}, addr {}", e, backend.getBrpcAddress()); if (e.getMessage().contains("time out")) { status.updateStatus(TStatusCode.TIMEOUT, e.getMessage()); } else { status.updateStatus(TStatusCode.THRIFT_RPC_ERROR, e.getMessage()); SimpleScheduler.addToBlacklist(backend.getId(), e.getMessage()); } return null; } Status resultStatus = new Status(pResult.getStatus()); if (resultStatus.getErrorCode() != TStatusCode.OK) { status.updateStatus(resultStatus.getErrorCode(), resultStatus.getErrorMsg()); return null; } if (pResult.hasEmptyBatch() && pResult.getEmptyBatch()) { LOG.info("get empty rowbatch"); rowBatch.setEos(true); return rowBatch; } else if (pResult.hasRowBatch() && pResult.getRowBatch().size() > 0) { byte[] serialResult = pResult.getRowBatch().toByteArray(); TResultBatch resultBatch = new TResultBatch(); TDeserializer deserializer = new TDeserializer( new TCustomProtocolFactory(this.maxMsgSizeOfResultReceiver)); try { deserializer.deserialize(resultBatch, serialResult); } catch (TException e) { if (e.getMessage().contains("MaxMessageSize reached")) { throw new TException("MaxMessageSize reached, try increase max_msg_size_of_result_receiver"); } else { throw e; } } rowBatch.setBatch(resultBatch); rowBatch.setEos(true); return rowBatch; } if (isCancel) { status.updateStatus(TStatusCode.CANCELLED, "cancelled"); } return rowBatch; }
private RowBatch getNextInternal(Status status, Backend backend) throws TException { long timeoutTs = System.currentTimeMillis() + timeoutMs; RowBatch rowBatch = new RowBatch(); InternalService.PTabletKeyLookupResponse pResult = null; try { Preconditions.checkNotNull(shortCircuitQueryContext.serializedDescTable); InternalService.PTabletKeyLookupRequest.Builder requestBuilder = InternalService.PTabletKeyLookupRequest.newBuilder() .setTabletId(tabletID) .setDescTbl(shortCircuitQueryContext.serializedDescTable) .setOutputExpr(shortCircuitQueryContext.serializedOutputExpr) .setQueryOptions(shortCircuitQueryContext.serializedQueryOptions) .setIsBinaryRow(ConnectContext.get().command == MysqlCommand.COM_STMT_EXECUTE); if (snapshotVisibleVersions != null && !snapshotVisibleVersions.isEmpty()) { requestBuilder.setVersion(snapshotVisibleVersions.get(0)); } if (shortCircuitQueryContext.cacheID != null) { InternalService.UUID.Builder uuidBuilder = InternalService.UUID.newBuilder(); uuidBuilder.setUuidHigh(shortCircuitQueryContext.cacheID.getMostSignificantBits()); uuidBuilder.setUuidLow(shortCircuitQueryContext.cacheID.getLeastSignificantBits()); requestBuilder.setUuid(uuidBuilder); } addKeyTuples(requestBuilder); InternalService.PTabletKeyLookupRequest request = requestBuilder.build(); Future<InternalService.PTabletKeyLookupResponse> futureResponse = BackendServiceProxy.getInstance().fetchTabletDataAsync(backend.getBrpcAddress(), request); long currentTs = System.currentTimeMillis(); if (currentTs >= timeoutTs) { LOG.warn("fetch result timeout {}", backend.getBrpcAddress()); status.updateStatus(TStatusCode.INTERNAL_ERROR, "query request timeout"); return null; } try { pResult = futureResponse.get(timeoutTs - currentTs, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { LOG.warn("future get interrupted Exception"); if (isCancel) { status.updateStatus(TStatusCode.CANCELLED, "cancelled"); return null; } } catch (TimeoutException e) { futureResponse.cancel(true); LOG.warn("fetch result timeout {}, addr {}", timeoutTs - currentTs, backend.getBrpcAddress()); status.updateStatus(TStatusCode.INTERNAL_ERROR, "query fetch result timeout"); return null; } } catch (RpcException e) { LOG.warn("query fetch rpc exception {}, e {}", backend.getBrpcAddress(), e); status.updateStatus(TStatusCode.THRIFT_RPC_ERROR, e.getMessage()); SimpleScheduler.addToBlacklist(backend.getId(), e.getMessage()); return null; } catch (ExecutionException e) { LOG.warn("query fetch execution exception {}, addr {}", e, backend.getBrpcAddress()); if (e.getMessage().contains("time out")) { status.updateStatus(TStatusCode.TIMEOUT, e.getMessage()); } else { status.updateStatus(TStatusCode.THRIFT_RPC_ERROR, e.getMessage()); SimpleScheduler.addToBlacklist(backend.getId(), e.getMessage()); } return null; } Status resultStatus = new Status(pResult.getStatus()); if (resultStatus.getErrorCode() != TStatusCode.OK) { status.updateStatus(resultStatus.getErrorCode(), resultStatus.getErrorMsg()); return null; } if (pResult.hasEmptyBatch() && pResult.getEmptyBatch()) { LOG.debug("get empty rowbatch"); rowBatch.setEos(true); status.updateStatus(TStatusCode.OK, ""); return rowBatch; } else if (pResult.hasRowBatch() && pResult.getRowBatch().size() > 0) { byte[] serialResult = pResult.getRowBatch().toByteArray(); TResultBatch resultBatch = new TResultBatch(); TDeserializer deserializer = new TDeserializer( new TCustomProtocolFactory(this.maxMsgSizeOfResultReceiver)); try { deserializer.deserialize(resultBatch, serialResult); } catch (TException e) { if (e.getMessage().contains("MaxMessageSize reached")) { throw new TException("MaxMessageSize reached, try increase max_msg_size_of_result_receiver"); } else { throw e; } } rowBatch.setBatch(resultBatch); rowBatch.setEos(true); status.updateStatus(TStatusCode.OK, ""); return rowBatch; } else { Preconditions.checkState(false, "No row batch or empty batch found"); } if (isCancel) { status.updateStatus(TStatusCode.CANCELLED, "cancelled"); } return rowBatch; }
class PointQueryExecutor implements CoordInterface { private static final Logger LOG = LogManager.getLogger(PointQueryExecutor.class); private long tabletID = 0; private long timeoutMs = Config.point_query_timeout_ms; private boolean isCancel = false; private List<Backend> candidateBackends; private final int maxMsgSizeOfResultReceiver; private List<Long> versions; private final ShortCircuitQueryContext shortCircuitQueryContext; public PointQueryExecutor(ShortCircuitQueryContext ctx, int maxMessageSize) { ctx.sanitize(); this.shortCircuitQueryContext = ctx; this.maxMsgSizeOfResultReceiver = maxMessageSize; } private void updateCloudPartitionVersions() throws RpcException { OlapScanNode planRoot = shortCircuitQueryContext.scanNode; List<CloudPartition> partitions = new ArrayList<>(); Set<Long> partitionSet = new HashSet<>(); OlapTable table = planRoot.getOlapTable(); for (Long id : planRoot.getSelectedPartitionIds()) { if (!partitionSet.contains(id)) { partitionSet.add(id); partitions.add((CloudPartition) table.getPartition(id)); } } versions = CloudPartition.getSnapshotVisibleVersion(partitions); Preconditions.checkState(versions.size() == 1); LOG.debug("set cloud version {}", versions.get(0)); } void setScanRangeLocations() throws Exception { OlapScanNode planRoot = shortCircuitQueryContext.scanNode; List<TScanRangeLocations> locations = planRoot.lazyEvaluateRangeLocations(); if (planRoot.getScanTabletIds().isEmpty()) { return; } Preconditions.checkState(planRoot.getScanTabletIds().size() == 1); this.tabletID = planRoot.getScanTabletIds().get(0); if (Config.isCloudMode() && ConnectContext.get().getSessionVariable().enableSnapshotPointQuery) { updateCloudPartitionVersions(); } Preconditions.checkNotNull(locations); candidateBackends = new ArrayList<>(); for (Long backendID : planRoot.getScanBackendIds()) { Backend backend = Env.getCurrentSystemInfo().getBackend(backendID); if (SimpleScheduler.isAvailable(backend)) { candidateBackends.add(backend); } } Collections.shuffle(this.candidateBackends); if (LOG.isDebugEnabled()) { LOG.debug("set scan locations, backend ids {}, tablet id {}", candidateBackends, tabletID); } } public static void directExecuteShortCircuitQuery(StmtExecutor executor, PreparedStatementContext preparedStmtCtx, StatementContext statementContext) throws Exception { Preconditions.checkNotNull(preparedStmtCtx.shortCircuitQueryContext); ShortCircuitQueryContext shortCircuitQueryContext = preparedStmtCtx.shortCircuitQueryContext.get(); List<Expr> conjunctVals = statementContext.getIdToPlaceholderRealExpr().values().stream().map( expression -> ( (Literal) expression).toLegacyLiteral()) .collect(Collectors.toList()); if (conjunctVals.size() != preparedStmtCtx.command.placeholderCount()) { throw new AnalysisException("Mismatched conjuncts values size with prepared" + "statement parameters size, expected " + preparedStmtCtx.command.placeholderCount() + ", but meet " + conjunctVals.size()); } updateScanNodeConjuncts(shortCircuitQueryContext.scanNode, conjunctVals); executor.executeAndSendResult(false, false, shortCircuitQueryContext.analzyedQuery, executor.getContext() .getMysqlChannel(), null, null); } private static void updateScanNodeConjuncts(OlapScanNode scanNode, List<Expr> conjunctVals) { for (int i = 0; i < conjunctVals.size(); ++i) { BinaryPredicate binaryPredicate = (BinaryPredicate) scanNode.getConjuncts().get(i); if (binaryPredicate.getChild(0) instanceof LiteralExpr) { binaryPredicate.setChild(0, conjunctVals.get(i)); } else { binaryPredicate.setChild(1, conjunctVals.get(i)); } } } public void setTimeout(long timeoutMs) { this.timeoutMs = timeoutMs; } void addKeyTuples( InternalService.PTabletKeyLookupRequest.Builder requestBuilder) { KeyTuple.Builder kBuilder = KeyTuple.newBuilder(); for (Expr expr : shortCircuitQueryContext.scanNode.getConjuncts()) { BinaryPredicate predicate = (BinaryPredicate) expr; kBuilder.addKeyColumnRep(predicate.getChild(1).getStringValue()); } requestBuilder.addKeyTuples(kBuilder); } @Override public void cancel(Status cancelReason) { } @Override public RowBatch getNext() throws Exception { setScanRangeLocations(); if (candidateBackends == null || candidateBackends.isEmpty()) { return new RowBatch(); } Iterator<Backend> backendIter = candidateBackends.iterator(); RowBatch rowBatch = null; int tryCount = 0; int maxTry = Math.min(Config.max_point_query_retry_time, candidateBackends.size()); Status status = new Status(); do { Backend backend = backendIter.next(); rowBatch = getNextInternal(status, backend); ++tryCount; if (rowBatch != null) { break; } if (tryCount >= maxTry) { break; } status.updateStatus(TStatusCode.OK, ""); } while (true); if (!status.ok()) { if (Strings.isNullOrEmpty(status.getErrorMsg())) { status.rewriteErrorMsg(); } if (status.isRpcError()) { throw new RpcException(null, status.getErrorMsg()); } else { String errMsg = status.getErrorMsg(); LOG.warn("query failed: {}", errMsg); int hostIndex = errMsg.indexOf("host"); if (hostIndex != -1) { errMsg = errMsg.substring(0, hostIndex); } throw new UserException(errMsg); } } return rowBatch; } @Override public void exec() throws Exception { } public void cancel() { isCancel = true; } @Override public List<TNetworkAddress> getInvolvedBackends() { return Lists.newArrayList(); } }
class PointQueryExecutor implements CoordInterface { private static final Logger LOG = LogManager.getLogger(PointQueryExecutor.class); private long tabletID = 0; private long timeoutMs = Config.point_query_timeout_ms; private boolean isCancel = false; private List<Backend> candidateBackends; private final int maxMsgSizeOfResultReceiver; private List<Long> snapshotVisibleVersions; private final ShortCircuitQueryContext shortCircuitQueryContext; public PointQueryExecutor(ShortCircuitQueryContext ctx, int maxMessageSize) { ctx.sanitize(); this.shortCircuitQueryContext = ctx; this.maxMsgSizeOfResultReceiver = maxMessageSize; } private void updateCloudPartitionVersions() throws RpcException { OlapScanNode scanNode = shortCircuitQueryContext.scanNode; List<CloudPartition> partitions = new ArrayList<>(); Set<Long> partitionSet = new HashSet<>(); OlapTable table = scanNode.getOlapTable(); for (Long id : scanNode.getSelectedPartitionIds()) { if (!partitionSet.contains(id)) { partitionSet.add(id); partitions.add((CloudPartition) table.getPartition(id)); } } snapshotVisibleVersions = CloudPartition.getSnapshotVisibleVersion(partitions); Preconditions.checkState(snapshotVisibleVersions.size() == 1); LOG.debug("set cloud version {}", snapshotVisibleVersions.get(0)); } void setScanRangeLocations() throws Exception { OlapScanNode scanNode = shortCircuitQueryContext.scanNode; List<TScanRangeLocations> locations = scanNode.lazyEvaluateRangeLocations(); Preconditions.checkNotNull(locations); if (scanNode.getScanTabletIds().isEmpty()) { return; } Preconditions.checkState(scanNode.getScanTabletIds().size() == 1); this.tabletID = scanNode.getScanTabletIds().get(0); if (Config.isCloudMode() && ConnectContext.get().getSessionVariable().enableSnapshotPointQuery) { updateCloudPartitionVersions(); } candidateBackends = new ArrayList<>(); for (Long backendID : scanNode.getScanBackendIds()) { Backend backend = Env.getCurrentSystemInfo().getBackend(backendID); if (SimpleScheduler.isAvailable(backend)) { candidateBackends.add(backend); } } Collections.shuffle(this.candidateBackends); if (LOG.isDebugEnabled()) { LOG.debug("set scan locations, backend ids {}, tablet id {}", candidateBackends, tabletID); } } public static void directExecuteShortCircuitQuery(StmtExecutor executor, PreparedStatementContext preparedStmtCtx, StatementContext statementContext) throws Exception { Preconditions.checkNotNull(preparedStmtCtx.shortCircuitQueryContext); ShortCircuitQueryContext shortCircuitQueryContext = preparedStmtCtx.shortCircuitQueryContext.get(); List<Expr> conjunctVals = statementContext.getIdToPlaceholderRealExpr().values().stream().map( expression -> ( (Literal) expression).toLegacyLiteral()) .collect(Collectors.toList()); if (conjunctVals.size() != preparedStmtCtx.command.placeholderCount()) { throw new AnalysisException("Mismatched conjuncts values size with prepared" + "statement parameters size, expected " + preparedStmtCtx.command.placeholderCount() + ", but meet " + conjunctVals.size()); } updateScanNodeConjuncts(shortCircuitQueryContext.scanNode, conjunctVals); executor.executeAndSendResult(false, false, shortCircuitQueryContext.analzyedQuery, executor.getContext() .getMysqlChannel(), null, null); } private static void updateScanNodeConjuncts(OlapScanNode scanNode, List<Expr> conjunctVals) { for (int i = 0; i < conjunctVals.size(); ++i) { BinaryPredicate binaryPredicate = (BinaryPredicate) scanNode.getConjuncts().get(i); if (binaryPredicate.getChild(0) instanceof LiteralExpr) { binaryPredicate.setChild(0, conjunctVals.get(i)); } else if (binaryPredicate.getChild(1) instanceof LiteralExpr) { binaryPredicate.setChild(1, conjunctVals.get(i)); } else { Preconditions.checkState(false, "Should conatains literal in " + binaryPredicate.toSqlImpl()); } } } public void setTimeout(long timeoutMs) { this.timeoutMs = timeoutMs; } void addKeyTuples( InternalService.PTabletKeyLookupRequest.Builder requestBuilder) { KeyTuple.Builder kBuilder = KeyTuple.newBuilder(); for (Expr expr : shortCircuitQueryContext.scanNode.getConjuncts()) { BinaryPredicate predicate = (BinaryPredicate) expr; kBuilder.addKeyColumnRep(predicate.getChild(1).getStringValue()); } requestBuilder.addKeyTuples(kBuilder); } @Override public void cancel(Status cancelReason) { } @Override public RowBatch getNext() throws Exception { setScanRangeLocations(); if (candidateBackends == null || candidateBackends.isEmpty()) { return new RowBatch(); } Iterator<Backend> backendIter = candidateBackends.iterator(); RowBatch rowBatch = null; int tryCount = 0; int maxTry = Math.min(Config.max_point_query_retry_time, candidateBackends.size()); Status status = new Status(); do { Backend backend = backendIter.next(); rowBatch = getNextInternal(status, backend); if (rowBatch != null) { break; } if (++tryCount >= maxTry) { break; } } while (true); if (!status.ok()) { if (Strings.isNullOrEmpty(status.getErrorMsg())) { status.rewriteErrorMsg(); } String errMsg = status.getErrorMsg(); LOG.warn("query failed: {}", errMsg); if (status.isRpcError()) { throw new RpcException(null, errMsg); } else { int hostIndex = errMsg.indexOf("host"); if (hostIndex != -1) { errMsg = errMsg.substring(0, hostIndex); } throw new UserException(errMsg); } } return rowBatch; } @Override public void exec() throws Exception { } public void cancel() { isCancel = true; } @Override public List<TNetworkAddress> getInvolvedBackends() { return Lists.newArrayList(); } }
why only the first? not all children?
public boolean isSelfMonotonic() { return children.get(0).isSelfMonotonic(); }
return children.get(0).isSelfMonotonic();
public boolean isSelfMonotonic() { return children.get(0).isSelfMonotonic(); }
class SubfieldExpr extends Expr { private final List<String> fieldNames; public SubfieldExpr(Expr child, List<String> fieldNames) { this(child, null, fieldNames); } public SubfieldExpr(Expr child, List<String> fieldNames, NodePosition pos) { this(child, null, fieldNames, pos); } public SubfieldExpr(Expr child, Type type, List<String> fieldNames) { this(child, type, fieldNames, NodePosition.ZERO); } public SubfieldExpr(Expr child, Type type, List<String> fieldNames, NodePosition pos) { super(pos); if (type != null) { Preconditions.checkArgument(child.getType().isStructType()); } children.add(child); this.type = type; this.fieldNames = fieldNames.stream().map(String::toLowerCase).collect(ImmutableList.toImmutableList()); } public SubfieldExpr(SubfieldExpr other) { super(other); fieldNames = other.fieldNames; } public List<String> getFieldNames() { return fieldNames; } public <R, C> R accept(AstVisitor<R, C> visitor, C context) { return visitor.visitSubfieldExpr(this, context); } @Override protected void analyzeImpl(Analyzer analyzer) throws AnalysisException { Preconditions.checkState(false, "unreachable"); } @Override protected String toSqlImpl() { return getChild(0).toSqlImpl() + "." + Joiner.on('.').join(fieldNames); } @Override protected void toThrift(TExprNode msg) { msg.setNode_type(TExprNodeType.SUBFIELD_EXPR); msg.setUsed_subfield_names(fieldNames); } @Override public Expr clone() { return new SubfieldExpr(this); } @Override }
class SubfieldExpr extends Expr { private final List<String> fieldNames; public SubfieldExpr(Expr child, List<String> fieldNames) { this(child, null, fieldNames); } public SubfieldExpr(Expr child, List<String> fieldNames, NodePosition pos) { this(child, null, fieldNames, pos); } public SubfieldExpr(Expr child, Type type, List<String> fieldNames) { this(child, type, fieldNames, NodePosition.ZERO); } public SubfieldExpr(Expr child, Type type, List<String> fieldNames, NodePosition pos) { super(pos); if (type != null) { Preconditions.checkArgument(child.getType().isStructType()); } children.add(child); this.type = type; this.fieldNames = fieldNames.stream().map(String::toLowerCase).collect(ImmutableList.toImmutableList()); } public SubfieldExpr(SubfieldExpr other) { super(other); fieldNames = other.fieldNames; } public List<String> getFieldNames() { return fieldNames; } public <R, C> R accept(AstVisitor<R, C> visitor, C context) { return visitor.visitSubfieldExpr(this, context); } @Override protected void analyzeImpl(Analyzer analyzer) throws AnalysisException { Preconditions.checkState(false, "unreachable"); } @Override protected String toSqlImpl() { return getChild(0).toSqlImpl() + "." + Joiner.on('.').join(fieldNames); } @Override protected void toThrift(TExprNode msg) { msg.setNode_type(TExprNodeType.SUBFIELD_EXPR); msg.setUsed_subfield_names(fieldNames); } @Override public Expr clone() { return new SubfieldExpr(this); } @Override }
This input stream could be closed properly.
public void testStreamWithEmptyByteArray() throws IOException { final byte[] data = new byte[0]; final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", data); FSDataInputStream in = handle.openInputStream(); in.seek(0); byte[] dataGot = new byte[1]; assertEquals(0, in.read(dataGot, 0, 0)); }
FSDataInputStream in = handle.openInputStream();
public void testStreamWithEmptyByteArray() throws IOException { final byte[] data = new byte[0]; final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", data); try (FSDataInputStream in = handle.openInputStream()) { byte[] dataGot = new byte[1]; assertEquals(0, in.read(dataGot, 0, 0)); assertEquals(-1, in.read()); } }
class ByteStreamStateHandleTest { @Test public void testStreamSeekAndPos() throws IOException { final byte[] data = {34, 25, 22, 66, 88, 54}; final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", data); for (int i = data.length; i >= 0; i--) { FSDataInputStream in = handle.openInputStream(); in.seek(i); assertEquals(i, (int) in.getPos()); if (i < data.length) { assertEquals((int) data[i], in.read()); assertEquals(i + 1, (int) in.getPos()); } else { assertEquals(-1, in.read()); assertEquals(i, (int) in.getPos()); } } FSDataInputStream in = handle.openInputStream(); in.seek(data.length); assertEquals(-1, in.read()); assertEquals(-1, in.read()); assertEquals(-1, in.read()); assertEquals(data.length, (int) in.getPos()); } @Test public void testStreamSeekOutOfBounds() throws IOException { final int len = 10; final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", new byte[len]); FSDataInputStream in = handle.openInputStream(); try { in.seek(-2); fail("should fail with an exception"); } catch (IOException e) { } in = handle.openInputStream(); try { in.seek(len + 1); fail("should fail with an exception"); } catch (IOException e) { } in = handle.openInputStream(); try { in.seek(((long) Integer.MAX_VALUE) + 100L); fail("should fail with an exception"); } catch (IOException e) { } } @Test public void testBulkRead() throws IOException { final byte[] data = {34, 25, 22, 66}; final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", data); final int targetLen = 8; for (int start = 0; start < data.length; start++) { for (int num = 0; num < targetLen; num++) { FSDataInputStream in = handle.openInputStream(); in.seek(start); final byte[] target = new byte[targetLen]; final int read = in.read(target, targetLen - num, num); assertEquals(Math.min(num, data.length - start), read); for (int i = 0; i < read; i++) { assertEquals(data[start + i], target[targetLen - num + i]); } int newPos = start + read; assertEquals(newPos, (int) in.getPos()); assertEquals(newPos < data.length ? data[newPos] : -1, in.read()); } } } @SuppressWarnings("ResultOfMethodCallIgnored") @Test public void testBulkReadINdexOutOfBounds() throws IOException { final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", new byte[10]); FSDataInputStream in = handle.openInputStream(); try { in.read(new byte[10], -1, 5); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } in = handle.openInputStream(); try { in.read(new byte[10], 10, 5); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } in = handle.openInputStream(); try { in.read(new byte[10], 0, -2); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } in = handle.openInputStream(); try { in.read(new byte[10], 5, 6); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } in = handle.openInputStream(); try { in.read(new byte[10], 5, Integer.MAX_VALUE); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } } @Test }
class ByteStreamStateHandleTest { @Test public void testStreamSeekAndPos() throws IOException { final byte[] data = {34, 25, 22, 66, 88, 54}; final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", data); for (int i = data.length; i >= 0; i--) { FSDataInputStream in = handle.openInputStream(); in.seek(i); assertEquals(i, (int) in.getPos()); if (i < data.length) { assertEquals((int) data[i], in.read()); assertEquals(i + 1, (int) in.getPos()); } else { assertEquals(-1, in.read()); assertEquals(i, (int) in.getPos()); } } FSDataInputStream in = handle.openInputStream(); in.seek(data.length); assertEquals(-1, in.read()); assertEquals(-1, in.read()); assertEquals(-1, in.read()); assertEquals(data.length, (int) in.getPos()); } @Test public void testStreamSeekOutOfBounds() throws IOException { final int len = 10; final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", new byte[len]); FSDataInputStream in = handle.openInputStream(); try { in.seek(-2); fail("should fail with an exception"); } catch (IOException e) { } in = handle.openInputStream(); try { in.seek(len + 1); fail("should fail with an exception"); } catch (IOException e) { } in = handle.openInputStream(); try { in.seek(((long) Integer.MAX_VALUE) + 100L); fail("should fail with an exception"); } catch (IOException e) { } } @Test public void testBulkRead() throws IOException { final byte[] data = {34, 25, 22, 66}; final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", data); final int targetLen = 8; for (int start = 0; start < data.length; start++) { for (int num = 0; num < targetLen; num++) { FSDataInputStream in = handle.openInputStream(); in.seek(start); final byte[] target = new byte[targetLen]; final int read = in.read(target, targetLen - num, num); assertEquals(Math.min(num, data.length - start), read); for (int i = 0; i < read; i++) { assertEquals(data[start + i], target[targetLen - num + i]); } int newPos = start + read; assertEquals(newPos, (int) in.getPos()); assertEquals(newPos < data.length ? data[newPos] : -1, in.read()); } } } @SuppressWarnings("ResultOfMethodCallIgnored") @Test public void testBulkReadINdexOutOfBounds() throws IOException { final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", new byte[10]); FSDataInputStream in = handle.openInputStream(); try { in.read(new byte[10], -1, 5); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } in = handle.openInputStream(); try { in.read(new byte[10], 10, 5); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } in = handle.openInputStream(); try { in.read(new byte[10], 0, -2); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } in = handle.openInputStream(); try { in.read(new byte[10], 5, 6); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } in = handle.openInputStream(); try { in.read(new byte[10], 5, Integer.MAX_VALUE); fail("should fail with an exception"); } catch (IndexOutOfBoundsException e) { } } @Test }
No idea, please look at the most up-to-date close() functionality to make sure it is fine.
public void close() throws IOException { if (session == null) { return; } try (Closer closer = Closer.create()) { session.close(); } finally { session = null; } }
}
public void close() throws IOException { if (session == null) { return; } try (Closer closer = Closer.create()) { if (results != null) { closer.register(results); results = null; } session.close(); } finally { session = null; } }
class BigtableReaderImpl implements Reader { private BigtableSession session; private final BigtableSource source; private ResultScanner<Row> results; private Row currentRow; @VisibleForTesting BigtableReaderImpl(BigtableSession session, BigtableSource source) { this.session = session; this.source = source; } @Override public boolean start() throws IOException { RowSet.Builder rowSetBuilder = RowSet.newBuilder(); for (ByteKeyRange sourceRange : source.getRanges()) { rowSetBuilder = rowSetBuilder.addRowRanges( RowRange.newBuilder() .setStartKeyClosed(ByteString.copyFrom(sourceRange.getStartKey().getValue())) .setEndKeyOpen(ByteString.copyFrom(sourceRange.getEndKey().getValue()))); } RowSet rowSet = rowSetBuilder.build(); String tableNameSr = session.getOptions().getInstanceName().toTableNameStr(source.getTableId().get()); ServiceCallMetric serviceCallMetric = populateReaderCallMetric(session, source.getTableId().get()); ReadRowsRequest.Builder requestB = ReadRowsRequest.newBuilder().setRows(rowSet).setTableName(tableNameSr); if (source.getRowFilter() != null) { requestB.setFilter(source.getRowFilter()); } try { results = session.getDataClient().readRows(requestB.build()); serviceCallMetric.call("ok"); } catch (StatusRuntimeException e) { serviceCallMetric.call(e.getStatus().getCode().value()); throw e; } return advance(); } @Override public boolean advance() throws IOException { currentRow = results.next(); return currentRow != null; } @Override @Override public Row getCurrentRow() throws NoSuchElementException { if (currentRow == null) { throw new NoSuchElementException(); } return currentRow; } }
class BigtableReaderImpl implements Reader { private BigtableSession session; private final BigtableSource source; private ResultScanner<Row> results; private Row currentRow; @VisibleForTesting BigtableReaderImpl(BigtableSession session, BigtableSource source) { this.session = session; this.source = source; } @Override public boolean start() throws IOException { RowSet.Builder rowSetBuilder = RowSet.newBuilder(); for (ByteKeyRange sourceRange : source.getRanges()) { rowSetBuilder = rowSetBuilder.addRowRanges( RowRange.newBuilder() .setStartKeyClosed(ByteString.copyFrom(sourceRange.getStartKey().getValue())) .setEndKeyOpen(ByteString.copyFrom(sourceRange.getEndKey().getValue()))); } RowSet rowSet = rowSetBuilder.build(); String tableNameSr = session.getOptions().getInstanceName().toTableNameStr(source.getTableId().get()); ServiceCallMetric serviceCallMetric = createCallMetric(session, source.getTableId().get()); ReadRowsRequest.Builder requestB = ReadRowsRequest.newBuilder().setRows(rowSet).setTableName(tableNameSr); if (source.getRowFilter() != null) { requestB.setFilter(source.getRowFilter()); } try { results = session.getDataClient().readRows(requestB.build()); serviceCallMetric.call("ok"); } catch (StatusRuntimeException e) { serviceCallMetric.call(e.getStatus().getCode().toString()); throw e; } return advance(); } @Override public boolean advance() throws IOException { currentRow = results.next(); return currentRow != null; } @Override @Override public Row getCurrentRow() throws NoSuchElementException { if (currentRow == null) { throw new NoSuchElementException(); } return currentRow; } }
should we also do this optimisation for other strategies to compare performance in a cleaner way?
private void maybeScheduleRegion(final SchedulingPipelinedRegion region) { if (!areRegionInputsAllConsumable(region)) { return; } checkState(areRegionVerticesAllInCreatedState(region), "BUG: trying to schedule a region which is not in CREATED state"); final List<ExecutionVertexDeploymentOption> vertexDeploymentOptions = SchedulingStrategyUtils.createExecutionVertexDeploymentOptions( regionVerticesSorted.get(region), id -> deploymentOption); schedulerOperations.allocateSlotsAndDeploy(vertexDeploymentOptions); }
SchedulingStrategyUtils.createExecutionVertexDeploymentOptions(
private void maybeScheduleRegion(final SchedulingPipelinedRegion region) { if (!areRegionInputsAllConsumable(region)) { return; } checkState(areRegionVerticesAllInCreatedState(region), "BUG: trying to schedule a region which is not in CREATED state"); final List<ExecutionVertexDeploymentOption> vertexDeploymentOptions = SchedulingStrategyUtils.createExecutionVertexDeploymentOptions( regionVerticesSorted.get(region), id -> deploymentOption); schedulerOperations.allocateSlotsAndDeploy(vertexDeploymentOptions); }
class PipelinedRegionSchedulingStrategy implements SchedulingStrategy { private final SchedulerOperations schedulerOperations; private final SchedulingTopology schedulingTopology; private final DeploymentOption deploymentOption = new DeploymentOption(false); /** Result partitions are correlated if they have the same result id. */ private final Map<IntermediateDataSetID, Set<SchedulingResultPartition>> correlatedResultPartitions = new HashMap<>(); private final Map<IntermediateResultPartitionID, Set<SchedulingPipelinedRegion>> partitionConsumerRegions = new HashMap<>(); private final Map<SchedulingPipelinedRegion, List<ExecutionVertexID>> regionVerticesSorted = new IdentityHashMap<>(); public PipelinedRegionSchedulingStrategy( final SchedulerOperations schedulerOperations, final SchedulingTopology schedulingTopology) { this.schedulerOperations = checkNotNull(schedulerOperations); this.schedulingTopology = checkNotNull(schedulingTopology); init(); } private void init() { for (SchedulingPipelinedRegion region : schedulingTopology.getAllPipelinedRegions()) { for (SchedulingResultPartition partition : region.getConsumedResults()) { checkState(partition.getResultType() == ResultPartitionType.BLOCKING); partitionConsumerRegions.computeIfAbsent(partition.getId(), pid -> new HashSet<>()).add(region); correlatedResultPartitions.computeIfAbsent(partition.getResultId(), rid -> new HashSet<>()).add(partition); } } for (SchedulingExecutionVertex vertex : schedulingTopology.getVertices()) { final SchedulingPipelinedRegion region = schedulingTopology.getPipelinedRegionOfVertex(vertex.getId()); regionVerticesSorted.computeIfAbsent(region, r -> new ArrayList<>()).add(vertex.getId()); } } @Override public void startScheduling() { final Set<SchedulingPipelinedRegion> sourceRegions = IterableUtils .toStream(schedulingTopology.getAllPipelinedRegions()) .filter(region -> !region.getConsumedResults().iterator().hasNext()) .collect(Collectors.toSet()); maybeScheduleRegions(sourceRegions); } @Override public void restartTasks(final Set<ExecutionVertexID> verticesToRestart) { final Set<SchedulingPipelinedRegion> regionsToRestart = verticesToRestart.stream() .map(schedulingTopology::getPipelinedRegionOfVertex) .collect(Collectors.toSet()); maybeScheduleRegions(regionsToRestart); } @Override public void onExecutionStateChange(final ExecutionVertexID executionVertexId, final ExecutionState executionState) { if (executionState == ExecutionState.FINISHED) { final Set<SchedulingResultPartition> finishedPartitions = IterableUtils .toStream(schedulingTopology.getVertex(executionVertexId).getProducedResults()) .filter(partition -> partitionConsumerRegions.containsKey(partition.getId())) .filter(partition -> partition.getState() == ResultPartitionState.CONSUMABLE) .flatMap(partition -> correlatedResultPartitions.get(partition.getResultId()).stream()) .collect(Collectors.toSet()); final Set<SchedulingPipelinedRegion> consumerRegions = finishedPartitions.stream() .flatMap(partition -> partitionConsumerRegions.get(partition.getId()).stream()) .collect(Collectors.toSet()); maybeScheduleRegions(consumerRegions); } } @Override public void onPartitionConsumable(final IntermediateResultPartitionID resultPartitionId) { } private void maybeScheduleRegions(final Set<SchedulingPipelinedRegion> regions) { final List<SchedulingPipelinedRegion> regionsSorted = SchedulingStrategyUtils.sortPipelinedRegionsInTopologicalOrder(schedulingTopology, regions); for (SchedulingPipelinedRegion region : regionsSorted) { maybeScheduleRegion(region); } } private boolean areRegionInputsAllConsumable(final SchedulingPipelinedRegion region) { for (SchedulingResultPartition partition : region.getConsumedResults()) { if (partition.getState() != ResultPartitionState.CONSUMABLE) { return false; } } return true; } private boolean areRegionVerticesAllInCreatedState(final SchedulingPipelinedRegion region) { for (SchedulingExecutionVertex vertex : region.getVertices()) { if (vertex.getState() != ExecutionState.CREATED) { return false; } } return true; } /** * The factory for creating {@link PipelinedRegionSchedulingStrategy}. */ public static class Factory implements SchedulingStrategyFactory { @Override public SchedulingStrategy createInstance( final SchedulerOperations schedulerOperations, final SchedulingTopology schedulingTopology) { return new PipelinedRegionSchedulingStrategy(schedulerOperations, schedulingTopology); } } }
class PipelinedRegionSchedulingStrategy implements SchedulingStrategy { private final SchedulerOperations schedulerOperations; private final SchedulingTopology schedulingTopology; private final DeploymentOption deploymentOption = new DeploymentOption(false); /** Result partitions are correlated if they have the same result id. */ private final Map<IntermediateDataSetID, Set<SchedulingResultPartition>> correlatedResultPartitions = new HashMap<>(); private final Map<IntermediateResultPartitionID, Set<SchedulingPipelinedRegion>> partitionConsumerRegions = new HashMap<>(); private final Map<SchedulingPipelinedRegion, List<ExecutionVertexID>> regionVerticesSorted = new IdentityHashMap<>(); public PipelinedRegionSchedulingStrategy( final SchedulerOperations schedulerOperations, final SchedulingTopology schedulingTopology) { this.schedulerOperations = checkNotNull(schedulerOperations); this.schedulingTopology = checkNotNull(schedulingTopology); init(); } private void init() { for (SchedulingPipelinedRegion region : schedulingTopology.getAllPipelinedRegions()) { for (SchedulingResultPartition partition : region.getConsumedResults()) { checkState(partition.getResultType() == ResultPartitionType.BLOCKING); partitionConsumerRegions.computeIfAbsent(partition.getId(), pid -> new HashSet<>()).add(region); correlatedResultPartitions.computeIfAbsent(partition.getResultId(), rid -> new HashSet<>()).add(partition); } } for (SchedulingExecutionVertex vertex : schedulingTopology.getVertices()) { final SchedulingPipelinedRegion region = schedulingTopology.getPipelinedRegionOfVertex(vertex.getId()); regionVerticesSorted.computeIfAbsent(region, r -> new ArrayList<>()).add(vertex.getId()); } } @Override public void startScheduling() { final Set<SchedulingPipelinedRegion> sourceRegions = IterableUtils .toStream(schedulingTopology.getAllPipelinedRegions()) .filter(region -> !region.getConsumedResults().iterator().hasNext()) .collect(Collectors.toSet()); maybeScheduleRegions(sourceRegions); } @Override public void restartTasks(final Set<ExecutionVertexID> verticesToRestart) { final Set<SchedulingPipelinedRegion> regionsToRestart = verticesToRestart.stream() .map(schedulingTopology::getPipelinedRegionOfVertex) .collect(Collectors.toSet()); maybeScheduleRegions(regionsToRestart); } @Override public void onExecutionStateChange(final ExecutionVertexID executionVertexId, final ExecutionState executionState) { if (executionState == ExecutionState.FINISHED) { final Set<SchedulingResultPartition> finishedPartitions = IterableUtils .toStream(schedulingTopology.getVertex(executionVertexId).getProducedResults()) .filter(partition -> partitionConsumerRegions.containsKey(partition.getId())) .filter(partition -> partition.getState() == ResultPartitionState.CONSUMABLE) .flatMap(partition -> correlatedResultPartitions.get(partition.getResultId()).stream()) .collect(Collectors.toSet()); final Set<SchedulingPipelinedRegion> consumerRegions = finishedPartitions.stream() .flatMap(partition -> partitionConsumerRegions.get(partition.getId()).stream()) .collect(Collectors.toSet()); maybeScheduleRegions(consumerRegions); } } @Override public void onPartitionConsumable(final IntermediateResultPartitionID resultPartitionId) { } private void maybeScheduleRegions(final Set<SchedulingPipelinedRegion> regions) { final List<SchedulingPipelinedRegion> regionsSorted = SchedulingStrategyUtils.sortPipelinedRegionsInTopologicalOrder(schedulingTopology, regions); for (SchedulingPipelinedRegion region : regionsSorted) { maybeScheduleRegion(region); } } private boolean areRegionInputsAllConsumable(final SchedulingPipelinedRegion region) { for (SchedulingResultPartition partition : region.getConsumedResults()) { if (partition.getState() != ResultPartitionState.CONSUMABLE) { return false; } } return true; } private boolean areRegionVerticesAllInCreatedState(final SchedulingPipelinedRegion region) { for (SchedulingExecutionVertex vertex : region.getVertices()) { if (vertex.getState() != ExecutionState.CREATED) { return false; } } return true; } /** * The factory for creating {@link PipelinedRegionSchedulingStrategy}. */ public static class Factory implements SchedulingStrategyFactory { @Override public SchedulingStrategy createInstance( final SchedulerOperations schedulerOperations, final SchedulingTopology schedulingTopology) { return new PipelinedRegionSchedulingStrategy(schedulerOperations, schedulingTopology); } } }
Ah, I see what you mean. Yes, we can certainly do that.
public Iterable<ConfigSource> getConfigSources(ClassLoader forClassLoader) { if (serviceBindings.isEmpty()) { return Collections.emptyList(); } List<ConfigSource> result = new ArrayList<>(); for (ServiceBindingConverter converter : serviceBindingConverters) { Optional<ServiceBindingConfigSource> optional = converter.convert(serviceBindings); if (optional.isPresent()) { result.add(optional.get()); } } return result; }
if (optional.isPresent()) {
public Iterable<ConfigSource> getConfigSources(ClassLoader forClassLoader) { if (serviceBindings.isEmpty()) { return Collections.emptyList(); } List<ConfigSource> result = new ArrayList<>(); for (ServiceBindingConverter converter : serviceBindingConverters) { Optional<ServiceBindingConfigSource> optional = converter.convert(serviceBindings); if (optional.isPresent()) { result.add(optional.get()); } } for (ServiceBinding serviceBinding : serviceBindings) { Map<String, String> serviceBindingProperties = serviceBinding.getProperties(); Map<String, String> rawConfigSourceProperties = new HashMap<>(); for (Map.Entry<String, String> entry : serviceBindingProperties.entrySet()) { rawConfigSourceProperties.put("quarkus." + serviceBinding.getName() + "." + entry.getKey(), entry.getValue()); } result.add(new ServiceBindingConfigSource("service-binding-" + serviceBinding.getName() + "-raw", rawConfigSourceProperties)); } return result; }
class KubernetesServiceBindingConfigSourceProvider implements ConfigSourceProvider { private final List<ServiceBinding> serviceBindings; private final List<ServiceBindingConverter> serviceBindingConverters; public KubernetesServiceBindingConfigSourceProvider(String bindingRoot) { this(bindingRoot, determineConverters()); } KubernetesServiceBindingConfigSourceProvider(String bindingRoot, List<ServiceBindingConverter> serviceBindingConverters) { this.serviceBindingConverters = serviceBindingConverters; Path p = Paths.get(bindingRoot); if (!Files.exists(p)) { serviceBindings = Collections.emptyList(); return; } if (!Files.isDirectory(p)) { throw new IllegalArgumentException("Service Binding root '" + p + "' is not a directory"); } File[] files = p.toFile().listFiles(); if (files == null) { serviceBindings = Collections.emptyList(); } else { serviceBindings = new ArrayList<>(files.length); for (File f : files) { serviceBindings.add(new ServiceBinding(f.toPath())); } serviceBindings.sort(new Comparator<ServiceBinding>() { @Override public int compare(ServiceBinding o1, ServiceBinding o2) { if (!o1.getName().equals(o2.getName())) { return o1.getName().compareTo(o2.getName()); } return o1.getProvider().compareTo(o2.getProvider()); } }); } } private static List<ServiceBindingConverter> determineConverters() { List<ServiceBindingConverter> result = new ArrayList<>(); ServiceLoader<ServiceBindingConverter> loader = ServiceLoader.load(ServiceBindingConverter.class, Thread.currentThread().getContextClassLoader()); for (ServiceBindingConverter c : loader) { result.add(c); } return result; } @Override }
class KubernetesServiceBindingConfigSourceProvider implements ConfigSourceProvider { private final List<ServiceBinding> serviceBindings; private final List<ServiceBindingConverter> serviceBindingConverters; public KubernetesServiceBindingConfigSourceProvider(String bindingRoot) { this(bindingRoot, determineConverters()); } KubernetesServiceBindingConfigSourceProvider(String bindingRoot, List<ServiceBindingConverter> serviceBindingConverters) { this.serviceBindingConverters = serviceBindingConverters; Path p = Paths.get(bindingRoot); if (!Files.exists(p)) { serviceBindings = Collections.emptyList(); return; } if (!Files.isDirectory(p)) { throw new IllegalArgumentException("Service Binding root '" + p + "' is not a directory"); } File[] files = p.toFile().listFiles(); if (files == null) { serviceBindings = Collections.emptyList(); } else { serviceBindings = new ArrayList<>(files.length); for (File f : files) { serviceBindings.add(new ServiceBinding(f.toPath())); } serviceBindings.sort(new Comparator<ServiceBinding>() { @Override public int compare(ServiceBinding o1, ServiceBinding o2) { if (!o1.getName().equals(o2.getName())) { return o1.getName().compareTo(o2.getName()); } return o1.getProvider().compareTo(o2.getProvider()); } }); } } private static List<ServiceBindingConverter> determineConverters() { List<ServiceBindingConverter> result = new ArrayList<>(); ServiceLoader<ServiceBindingConverter> loader = ServiceLoader.load(ServiceBindingConverter.class, Thread.currentThread().getContextClassLoader()); for (ServiceBindingConverter c : loader) { result.add(c); } return result; } @Override }
thoughts on moving 429 and 503 checks to `RetryStrategy::calculateRetryDelay(HttpResponse, int)` as well? this way we ensure `RetryStrategy::calculateRetryDelay(HttpResponse, int)` is always gets called
private Duration determineDelayDuration(HttpResponse response, int tryCount) { int code = response.getStatusCode(); if (code != 429 && code != 503) { return retryStrategy.calculateRetryDelay(tryCount); } return retryStrategy.calculateRetryDelay(response, tryCount); }
&& code != 503) {
private Duration determineDelayDuration(HttpResponse response, int tryCount) { int code = response.getStatusCode(); if (code != 429 && code != 503) { return retryStrategy.calculateRetryDelay(tryCount); } String retryHeaderValue = null; if (!isNullOrEmpty(this.retryAfterHeader)) { retryHeaderValue = response.getHeaderValue(this.retryAfterHeader); } if (isNullOrEmpty(retryHeaderValue)) { return this.retryStrategy.calculateRetryDelay(tryCount); } return Duration.of(Integer.parseInt(retryHeaderValue), this.retryAfterTimeUnit); }
class RetryPolicy implements HttpPipelinePolicy { private final ClientLogger logger = new ClientLogger(RetryPolicy.class); private final RetryStrategy retryStrategy; /** * Creates {@link RetryPolicy} with default {@link ExponentialBackoff} as {@link RetryStrategy} and use * 'retry-after-ms' in {@link HttpResponse} header for calculating retry delay. */ public RetryPolicy() { this(new ExponentialBackoff()); } /** * Creates a {@link RetryPolicy} with the provided {@link RetryStrategy}. * * @param retryStrategy The {@link RetryStrategy} used for retries. */ public RetryPolicy(RetryStrategy retryStrategy) { this.retryStrategy = Objects.requireNonNull(retryStrategy, "'retryStrategy' cannot be null"); } @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { return attemptAsync(context, next, context.getHttpRequest(), 0); } private Mono<HttpResponse> attemptAsync(final HttpPipelineCallContext context, final HttpPipelineNextPolicy next, final HttpRequest originalHttpRequest, final int tryCount) { context.setHttpRequest(originalHttpRequest.copy()); return next.clone().process() .flatMap(httpResponse -> { if (shouldRetry(httpResponse, tryCount)) { final Duration delayDuration = determineDelayDuration(httpResponse, tryCount); logger.verbose("[Retrying] Try count: {}, Delay duration in seconds: {}", tryCount, delayDuration.getSeconds()); return attemptAsync(context, next, originalHttpRequest, tryCount + 1) .delaySubscription(delayDuration); } else { return Mono.just(httpResponse); } }) .onErrorResume(err -> { int maxRetries = retryStrategy.getMaxRetries(); if (tryCount < maxRetries) { logger.verbose("[Error Resume] Try count: {}, Error: {}", tryCount, err); return attemptAsync(context, next, originalHttpRequest, tryCount + 1) .delaySubscription(retryStrategy.calculateRetryDelay(tryCount)); } else { return Mono.error(new RuntimeException( String.format("Max retries %d times exceeded. Error Details: %s", maxRetries, err.getMessage()), err)); } }); } private boolean shouldRetry(HttpResponse response, int tryCount) { return tryCount < retryStrategy.getMaxRetries() && retryStrategy.shouldRetry(response); } /** * Determines the delay duration that should be waited before retrying. * @param response HTTP response * @return If the HTTP response has a retry-after-ms header that will be returned, * otherwise the duration used during the construction of the policy. */ }
class RetryPolicy implements HttpPipelinePolicy { private final ClientLogger logger = new ClientLogger(RetryPolicy.class); private final RetryStrategy retryStrategy; private final String retryAfterHeader; private final ChronoUnit retryAfterTimeUnit; /** * Creates {@link RetryPolicy} with default {@link ExponentialBackoff} as {@link RetryStrategy} and ignore the * delay provided in response header. */ public RetryPolicy() { this(new ExponentialBackoff(), null, null); } /** * Creates {@link RetryPolicy} with default {@link ExponentialBackoff} as {@link RetryStrategy} and use * provided {@code retryAfterHeader} in {@link HttpResponse} headers for calculating retry delay. * * @param retryAfterHeader The HTTP header, such as 'Retry-After' or 'x-ms-retry-after-ms', to lookup for the * retry delay. If the value is {@code null}, {@link RetryPolicy} will use the retry strategy to compute the delay * and ignore the delay provided in response header. * @param retryAfterTimeUnit The time unit to use when applying the retry delay. {@code null} is valid if, and only * if, {@code retryAfterHeader} is {@code null}. * @throws NullPointerException When {@code retryAfterTimeUnit} is {@code null} and {@code retryAfterHeader} is * not {@code null}. */ public RetryPolicy(String retryAfterHeader, ChronoUnit retryAfterTimeUnit) { this(new ExponentialBackoff(), retryAfterHeader, retryAfterTimeUnit); } /** * Creates {@link RetryPolicy} with the provided {@link RetryStrategy} and default {@link ExponentialBackoff} * as {@link RetryStrategy}. It will use provided {@code retryAfterHeader} in {@link HttpResponse} headers for * calculating retry delay. * * @param retryStrategy The {@link RetryStrategy} used for retries. * @param retryAfterHeader The HTTP header, such as 'Retry-After' or 'x-ms-retry-after-ms', to lookup for the * retry delay. If the value is {@code null}, {@link RetryPolicy} will use the retry strategy to compute the delay * and ignore the delay provided in response header. * @param retryAfterTimeUnit The time unit to use when applying the retry delay. {@code null} is valid if, and only * if, {@code retryAfterHeader} is {@code null}. * * @throws NullPointerException When {@code retryStrategy} is {@code null}. Also when {@code retryAfterTimeUnit} * is {@code null} and {@code retryAfterHeader} is not {@code null}. */ public RetryPolicy(RetryStrategy retryStrategy, String retryAfterHeader, ChronoUnit retryAfterTimeUnit) { this.retryStrategy = Objects.requireNonNull(retryStrategy, "'retryStrategy' cannot be null."); this.retryAfterHeader = retryAfterHeader; this.retryAfterTimeUnit = retryAfterTimeUnit; if (!isNullOrEmpty(retryAfterHeader)) { Objects.requireNonNull(retryAfterTimeUnit, "'retryAfterTimeUnit' cannot be null."); } } /** * Creates a {@link RetryPolicy} with the provided {@link RetryStrategy} and ignore the delay provided in * response header. * * @param retryStrategy The {@link RetryStrategy} used for retries. * * @throws NullPointerException When {@code retryStrategy} is {@code null}. */ public RetryPolicy(RetryStrategy retryStrategy) { this(retryStrategy, null, null); } @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { return attemptAsync(context, next, context.getHttpRequest(), 0); } private Mono<HttpResponse> attemptAsync(final HttpPipelineCallContext context, final HttpPipelineNextPolicy next, final HttpRequest originalHttpRequest, final int tryCount) { context.setHttpRequest(originalHttpRequest.copy()); return next.clone().process() .flatMap(httpResponse -> { if (shouldRetry(httpResponse, tryCount)) { final Duration delayDuration = determineDelayDuration(httpResponse, tryCount); logger.verbose("[Retrying] Try count: {}, Delay duration in seconds: {}", tryCount, delayDuration.getSeconds()); return attemptAsync(context, next, originalHttpRequest, tryCount + 1) .delaySubscription(delayDuration); } else { return Mono.just(httpResponse); } }) .onErrorResume(err -> { int maxRetries = retryStrategy.getMaxRetries(); if (tryCount < maxRetries) { logger.verbose("[Error Resume] Try count: {}, Error: {}", tryCount, err); return attemptAsync(context, next, originalHttpRequest, tryCount + 1) .delaySubscription(retryStrategy.calculateRetryDelay(tryCount)); } else { return Mono.error(new RuntimeException( String.format("Max retries %d times exceeded. Error Details: %s", maxRetries, err.getMessage()), err)); } }); } private boolean shouldRetry(HttpResponse response, int tryCount) { return tryCount < retryStrategy.getMaxRetries() && retryStrategy.shouldRetry(response); } /** * Determines the delay duration that should be waited before retrying. * @param response HTTP response * @return If the HTTP response has a retry-after-ms header that will be returned, * otherwise the duration used during the construction of the policy. */ }
I thick there will be read/write conflict if you do not get the read lock here.
public void removeOldTaskInfo() { long currentTimeMs = System.currentTimeMillis(); List<Task> currentTask = showTasks(null); Iterator<Task> iterator = currentTask.iterator(); List<Long> taskIdToDelete = Lists.newArrayList(); while (iterator.hasNext()) { Task task = iterator.next(); Long expireTime = task.getExpireTime(); if (expireTime > 0 && currentTimeMs > expireTime) { taskIdToDelete.add(task.getId()); } } dropTasks(taskIdToDelete, true); }
Task task = iterator.next();
public void removeOldTaskInfo() { long currentTimeMs = System.currentTimeMillis(); List<Long> taskIdToDelete = Lists.newArrayList(); if (!tryTaskLock()) { return; } try { List<Task> currentTask = showTasks(null); for (Task task : currentTask) { Long expireTime = task.getExpireTime(); if (expireTime > 0 && currentTimeMs > expireTime) { taskIdToDelete.add(task.getId()); } } } finally { taskUnlock(); } dropTasks(taskIdToDelete, true); }
class TaskManager { private static final Logger LOG = LogManager.getLogger(TaskManager.class); public static final long TASK_EXISTS = -1L; public static final long DUPLICATE_CREATE_TASK = -2L; public static final long GET_TASK_LOCK_FAILED = -3L; private final Map<Long, Task> manualTaskMap; private final Map<String, Task> nameToTaskMap; private final TaskRunManager taskRunManager; private final ScheduledExecutorService dispatchScheduler = Executors.newScheduledThreadPool(1); private final QueryableReentrantLock lock; private AtomicBoolean isStart = new AtomicBoolean(false); public TaskManager() { manualTaskMap = Maps.newConcurrentMap(); nameToTaskMap = Maps.newConcurrentMap(); taskRunManager = new TaskRunManager(); lock = new QueryableReentrantLock(true); } public void start() { if (isStart.compareAndSet(false, true)) { clearUnfinishedTaskRun(); dispatchScheduler.scheduleAtFixedRate(() -> { if (!tryLock()) { return; } try { taskRunManager.checkRunningTaskRun(); taskRunManager.scheduledPendingTaskRun(); } catch (Exception ex) { LOG.warn("failed to dispatch job.", ex); } finally { unlock(); } }, 0, 1, TimeUnit.SECONDS); } } private void clearUnfinishedTaskRun() { if (!tryLock()) { return; } try { Iterator<Long> pendingIter = taskRunManager.getPendingTaskRunMap().keySet().iterator(); while (pendingIter.hasNext()) { Queue<TaskRun> taskRuns = taskRunManager.getPendingTaskRunMap().get(pendingIter.next()); for (TaskRun taskRun : taskRuns) { taskRun.getStatus().setErrorMessage("Fe restart abort the task"); taskRun.getStatus().setErrorCode(-1); taskRun.getStatus().setState(Constants.TaskRunState.FAILED); taskRunManager.getTaskRunHistory().addHistory(taskRun.getStatus()); } pendingIter.remove(); } Iterator<Long> runningIter = taskRunManager.getRunningTaskRunMap().keySet().iterator(); while (runningIter.hasNext()) { TaskRun taskRun = taskRunManager.getRunningTaskRunMap().get(runningIter.next()); taskRun.getStatus().setErrorMessage("Fe restart abort the task"); taskRun.getStatus().setErrorCode(-1); taskRun.getStatus().setState(Constants.TaskRunState.FAILED); runningIter.remove(); taskRunManager.getTaskRunHistory().addHistory(taskRun.getStatus()); } } finally { unlock(); } } public long createTask(Task task, boolean isReplay) { if (!tryLock()) { return GET_TASK_LOCK_FAILED; } try { if (nameToTaskMap.containsKey(task.getName())) { return TASK_EXISTS; } nameToTaskMap.put(task.getName(), task); if (manualTaskMap.containsKey(task.getId())) { return DUPLICATE_CREATE_TASK; } manualTaskMap.put(task.getId(), task); if (!isReplay) { GlobalStateMgr.getCurrentState().getEditLog().logCreateTask(task); } return task.getId(); } finally { unlock(); } } public SubmitResult executeTask(String taskName) { Task task = nameToTaskMap.get(taskName); if (task == null) { return new SubmitResult(null, SubmitResult.SubmitStatus.FAILED); } return taskRunManager.submitTaskRun(TaskRunBuilder.newBuilder(task).build()); } public void dropTasks(List<Long> taskIdList, boolean isReplay) { if (!tryLock()) { return; } try { for (long taskId : taskIdList) { Task task = manualTaskMap.get(taskId); if (task == null) { LOG.warn("drop taskId {} failed because task is null", taskId); continue; } nameToTaskMap.remove(task.getName()); manualTaskMap.remove(task.getId()); } if (!isReplay) { GlobalStateMgr.getCurrentState().getEditLog().logDropTasks(taskIdList); } } finally { unlock(); } LOG.info("drop tasks:{}", taskIdList); } public List<Task> showTasks(String dbName) { List<Task> taskList = Lists.newArrayList(); if (dbName == null) { taskList.addAll(manualTaskMap.values()); } else { taskList.addAll(manualTaskMap.values().stream() .filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList())); } return taskList; } private boolean tryLock() { try { if (!lock.tryLock(1, TimeUnit.SECONDS)) { Thread owner = lock.getOwner(); if (owner != null) { LOG.warn("task lock is held by: {}", Util.dumpThread(owner, 50)); } else { LOG.warn("task lock owner is null"); } return false; } return true; } catch (InterruptedException e) { LOG.warn("got exception while getting task lock", e); } return lock.isHeldByCurrentThread(); } private void unlock() { this.lock.unlock(); } public void replayCreateTask(Task task) { if (task.getExpireTime() > 0 && System.currentTimeMillis() > task.getExpireTime()) { return; } createTask(task, true); } public void replayDropTasks(List<Long> taskIdList) { dropTasks(taskIdList, true); } public TaskRunManager getTaskRunManager() { return taskRunManager; } public ShowResultSet handleSubmitTaskStmt(SubmitTaskStmt submitTaskStmt) throws DdlException { Task task = TaskBuilder.buildTask(submitTaskStmt, ConnectContext.get()); long createResult = createTask(task, false); String taskName = task.getName(); SubmitResult submitResult; if (createResult < 0) { if (createResult == TASK_EXISTS) { throw new DdlException("Task " + taskName + " already exist."); } else { LOG.warn("Failed to create Task: " + taskName + ", ErrorCode: " + createResult); submitResult = new SubmitResult(null, SubmitResult.SubmitStatus.REJECTED); } } else { submitResult = executeTask(taskName); if (submitResult.getStatus() != SubmitResult.SubmitStatus.SUBMITTED) { dropTasks(ImmutableList.of(task.getId()), false); } } ShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder(); builder.addColumn(new Column("TaskName", ScalarType.createVarchar(40))); builder.addColumn(new Column("Status", ScalarType.createVarchar(10))); List<String> item = ImmutableList.of(taskName, submitResult.getStatus().toString()); List<List<String>> result = ImmutableList.of(item); return new ShowResultSet(builder.build(), result); } public long loadTasks(DataInputStream dis, long checksum) throws IOException { int taskCount = 0; try { String s = Text.readString(dis); SerializeData data = GsonUtils.GSON.fromJson(s, SerializeData.class); if (data != null) { if (data.tasks != null) { for (Task task : data.tasks) { replayCreateTask(task); } taskCount = data.tasks.size(); } if (data.runStatus != null) { for (TaskRunStatus runStatus : data.runStatus) { replayCreateTaskRun(runStatus); } } } checksum ^= taskCount; LOG.info("finished replaying TaskManager from image"); } catch (EOFException e) { LOG.info("no TaskManager to replay."); } return checksum; } public long saveTasks(DataOutputStream dos, long checksum) throws IOException { SerializeData data = new SerializeData(); data.tasks = new ArrayList<>(nameToTaskMap.values()); checksum ^= data.tasks.size(); data.runStatus = showTaskRunStatus(null); String s = GsonUtils.GSON.toJson(data); Text.writeString(dos, s); return checksum; } public List<TaskRunStatus> showTaskRunStatus(String dbName) { List<TaskRunStatus> taskRunList = Lists.newArrayList(); if (dbName == null) { for (Queue<TaskRun> pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) { taskRunList.addAll(pTaskRunQueue.stream().map(TaskRun::getStatus).collect(Collectors.toList())); } taskRunList.addAll(taskRunManager.getRunningTaskRunMap().values().stream().map(TaskRun::getStatus) .collect(Collectors.toList())); taskRunList.addAll(taskRunManager.getTaskRunHistory().getAllHistory()); } else { for (Queue<TaskRun> pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) { taskRunList.addAll(pTaskRunQueue.stream().map(TaskRun::getStatus) .filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList())); } taskRunList.addAll(taskRunManager.getRunningTaskRunMap().values().stream().map(TaskRun::getStatus) .filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList())); taskRunList.addAll(taskRunManager.getTaskRunHistory().getAllHistory().stream() .filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList())); } return taskRunList; } public void replayCreateTaskRun(TaskRunStatus status) { if (status.getState() == Constants.TaskRunState.SUCCESS || status.getState() == Constants.TaskRunState.FAILED) { if (System.currentTimeMillis() > status.getExpireTime()) { return; } } switch (status.getState()) { case PENDING: String taskName = status.getTaskName(); Task task = nameToTaskMap.get(taskName); if (task == null) { LOG.warn("fail to obtain task name {} because task is null", taskName); return; } TaskRun taskRun = TaskRunBuilder.newBuilder(task).build(); taskRun.initStatus(status.getQueryId(), status.getCreateTime()); Queue<TaskRun> taskRuns = taskRunManager.getPendingTaskRunMap().computeIfAbsent(taskRun.getTaskId(), u -> Queues.newConcurrentLinkedQueue()); taskRuns.offer(taskRun); break; case RUNNING: status.setState(Constants.TaskRunState.FAILED); taskRunManager.getTaskRunHistory().addHistory(status); break; case FAILED: case SUCCESS: taskRunManager.getTaskRunHistory().addHistory(status); break; } } public void replayUpdateTaskRun(TaskRunStatusChange statusChange) { Constants.TaskRunState toStatus = statusChange.getToStatus(); Long taskId = statusChange.getTaskId(); Queue<TaskRun> taskRunQueue = taskRunManager.getPendingTaskRunMap().get(taskId); if (taskRunQueue != null) { if (taskRunQueue.size() == 0) { taskRunManager.getPendingTaskRunMap().remove(taskId); return; } TaskRun pendingTaskRun = taskRunQueue.poll(); TaskRunStatus status = pendingTaskRun.getStatus(); if (status.getQueryId().equals(statusChange.getQueryId())) { if (toStatus == Constants.TaskRunState.FAILED) { status.setErrorMessage(statusChange.getErrorMessage()); status.setErrorCode(statusChange.getErrorCode()); } status.setState(toStatus); status.setFinishTime(statusChange.getFinishTime()); taskRunManager.getTaskRunHistory().addHistory(status); } } } public void replayDropTaskRuns(List<String> queryIdList) { Map<String, String> index = Maps.newHashMapWithExpectedSize(queryIdList.size()); for (String queryId : queryIdList) { index.put(queryId, null); } taskRunManager.getTaskRunHistory().getAllHistory().removeIf(runStatus -> index.containsKey(runStatus.getQueryId())); } public void removeOldTaskRunHistory() { long currentTimeMs = System.currentTimeMillis(); Deque<TaskRunStatus> taskRunHistory = taskRunManager.getTaskRunHistory().getAllHistory(); List<String> historyToDelete = Lists.newArrayList(); if (!tryLock()) { return; } try { Iterator<TaskRunStatus> iterator = taskRunHistory.iterator(); while (iterator.hasNext()) { TaskRunStatus taskRunStatus = iterator.next(); long expireTime = taskRunStatus.getExpireTime(); if (currentTimeMs > expireTime) { historyToDelete.add(taskRunStatus.getQueryId()); iterator.remove(); } } } finally { unlock(); } LOG.info("remove run history:{}", historyToDelete); } private static class SerializeData { @SerializedName("tasks") public List<Task> tasks; @SerializedName("runStatus") public List<TaskRunStatus> runStatus; } }
class TaskManager { private static final Logger LOG = LogManager.getLogger(TaskManager.class); public static final long TASK_EXISTS = -1L; public static final long DUPLICATE_CREATE_TASK = -2L; public static final long GET_TASK_LOCK_FAILED = -3L; private final Map<Long, Task> manualTaskMap; private final Map<String, Task> nameToTaskMap; private final TaskRunManager taskRunManager; private final ScheduledExecutorService dispatchScheduler = Executors.newScheduledThreadPool(1); private final QueryableReentrantLock taskLock; private final QueryableReentrantLock taskRunLock; private AtomicBoolean isStart = new AtomicBoolean(false); public TaskManager() { manualTaskMap = Maps.newConcurrentMap(); nameToTaskMap = Maps.newConcurrentMap(); taskRunManager = new TaskRunManager(); taskLock = new QueryableReentrantLock(true); taskRunLock = new QueryableReentrantLock(true); } public void start() { if (isStart.compareAndSet(false, true)) { clearUnfinishedTaskRun(); dispatchScheduler.scheduleAtFixedRate(() -> { if (!tryTaskRunLock()) { return; } try { taskRunManager.checkRunningTaskRun(); taskRunManager.scheduledPendingTaskRun(); } catch (Exception ex) { LOG.warn("failed to dispatch job.", ex); } finally { taskRunUnlock(); } }, 0, 1, TimeUnit.SECONDS); } } private void clearUnfinishedTaskRun() { if (!tryTaskRunLock()) { return; } try { Iterator<Long> pendingIter = taskRunManager.getPendingTaskRunMap().keySet().iterator(); while (pendingIter.hasNext()) { Queue<TaskRun> taskRuns = taskRunManager.getPendingTaskRunMap().get(pendingIter.next()); for (TaskRun taskRun : taskRuns) { taskRun.getStatus().setErrorMessage("Fe restart abort the task"); taskRun.getStatus().setErrorCode(-1); taskRun.getStatus().setState(Constants.TaskRunState.FAILED); taskRunManager.getTaskRunHistory().addHistory(taskRun.getStatus()); } pendingIter.remove(); } Iterator<Long> runningIter = taskRunManager.getRunningTaskRunMap().keySet().iterator(); while (runningIter.hasNext()) { TaskRun taskRun = taskRunManager.getRunningTaskRunMap().get(runningIter.next()); taskRun.getStatus().setErrorMessage("Fe restart abort the task"); taskRun.getStatus().setErrorCode(-1); taskRun.getStatus().setState(Constants.TaskRunState.FAILED); runningIter.remove(); taskRunManager.getTaskRunHistory().addHistory(taskRun.getStatus()); } } finally { taskRunUnlock(); } } public long createTask(Task task, boolean isReplay) { if (!tryTaskLock()) { return GET_TASK_LOCK_FAILED; } try { if (nameToTaskMap.containsKey(task.getName())) { return TASK_EXISTS; } nameToTaskMap.put(task.getName(), task); if (manualTaskMap.containsKey(task.getId())) { return DUPLICATE_CREATE_TASK; } manualTaskMap.put(task.getId(), task); if (!isReplay) { GlobalStateMgr.getCurrentState().getEditLog().logCreateTask(task); } return task.getId(); } finally { taskUnlock(); } } public SubmitResult executeTask(String taskName) { Task task = nameToTaskMap.get(taskName); if (task == null) { return new SubmitResult(null, SubmitResult.SubmitStatus.FAILED); } return taskRunManager.submitTaskRun(TaskRunBuilder.newBuilder(task).build()); } public void dropTasks(List<Long> taskIdList, boolean isReplay) { if (!tryTaskLock()) { return; } try { for (long taskId : taskIdList) { Task task = manualTaskMap.get(taskId); if (task == null) { LOG.warn("drop taskId {} failed because task is null", taskId); continue; } nameToTaskMap.remove(task.getName()); manualTaskMap.remove(task.getId()); } if (!isReplay) { GlobalStateMgr.getCurrentState().getEditLog().logDropTasks(taskIdList); } } finally { taskUnlock(); } LOG.info("drop tasks:{}", taskIdList); } public List<Task> showTasks(String dbName) { List<Task> taskList = Lists.newArrayList(); if (dbName == null) { taskList.addAll(manualTaskMap.values()); } else { taskList.addAll(manualTaskMap.values().stream() .filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList())); } return taskList; } private boolean tryTaskLock() { try { if (!taskLock.tryLock(5, TimeUnit.SECONDS)) { Thread owner = taskLock.getOwner(); if (owner != null) { LOG.warn("task lock is held by: {}", Util.dumpThread(owner, 50)); } else { LOG.warn("task lock owner is null"); } return false; } return true; } catch (InterruptedException e) { LOG.warn("got exception while getting task lock", e); } return false; } public void taskUnlock() { this.taskLock.unlock(); } private boolean tryTaskRunLock() { try { if (!taskRunLock.tryLock(5, TimeUnit.SECONDS)) { Thread owner = taskRunLock.getOwner(); if (owner != null) { LOG.warn("task run lock is held by: {}", Util.dumpThread(owner, 50)); } else { LOG.warn("task run lock owner is null"); } return false; } return true; } catch (InterruptedException e) { LOG.warn("got exception while getting task run lock", e); } return false; } public void taskRunUnlock() { this.taskRunLock.unlock(); } public void replayCreateTask(Task task) { if (task.getExpireTime() > 0 && System.currentTimeMillis() > task.getExpireTime()) { return; } createTask(task, true); } public void replayDropTasks(List<Long> taskIdList) { dropTasks(taskIdList, true); } public TaskRunManager getTaskRunManager() { return taskRunManager; } public ShowResultSet handleSubmitTaskStmt(SubmitTaskStmt submitTaskStmt) throws DdlException { Task task = TaskBuilder.buildTask(submitTaskStmt, ConnectContext.get()); long createResult = createTask(task, false); String taskName = task.getName(); SubmitResult submitResult; if (createResult < 0) { if (createResult == TASK_EXISTS) { throw new DdlException("Task " + taskName + " already exist."); } else { LOG.warn("Failed to create Task: " + taskName + ", ErrorCode: " + createResult); submitResult = new SubmitResult(null, SubmitResult.SubmitStatus.REJECTED); } } else { submitResult = executeTask(taskName); if (submitResult.getStatus() != SubmitResult.SubmitStatus.SUBMITTED) { dropTasks(ImmutableList.of(task.getId()), false); } } ShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder(); builder.addColumn(new Column("TaskName", ScalarType.createVarchar(40))); builder.addColumn(new Column("Status", ScalarType.createVarchar(10))); List<String> item = ImmutableList.of(taskName, submitResult.getStatus().toString()); List<List<String>> result = ImmutableList.of(item); return new ShowResultSet(builder.build(), result); } public long loadTasks(DataInputStream dis, long checksum) throws IOException { int taskCount = 0; try { String s = Text.readString(dis); SerializeData data = GsonUtils.GSON.fromJson(s, SerializeData.class); if (data != null) { if (data.tasks != null) { for (Task task : data.tasks) { replayCreateTask(task); } taskCount = data.tasks.size(); } if (data.runStatus != null) { for (TaskRunStatus runStatus : data.runStatus) { replayCreateTaskRun(runStatus); } } } checksum ^= taskCount; LOG.info("finished replaying TaskManager from image"); } catch (EOFException e) { LOG.info("no TaskManager to replay."); } return checksum; } public long saveTasks(DataOutputStream dos, long checksum) throws IOException { SerializeData data = new SerializeData(); data.tasks = new ArrayList<>(nameToTaskMap.values()); checksum ^= data.tasks.size(); data.runStatus = showTaskRunStatus(null); String s = GsonUtils.GSON.toJson(data); Text.writeString(dos, s); return checksum; } public List<TaskRunStatus> showTaskRunStatus(String dbName) { List<TaskRunStatus> taskRunList = Lists.newArrayList(); if (dbName == null) { for (Queue<TaskRun> pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) { taskRunList.addAll(pTaskRunQueue.stream().map(TaskRun::getStatus).collect(Collectors.toList())); } taskRunList.addAll(taskRunManager.getRunningTaskRunMap().values().stream().map(TaskRun::getStatus) .collect(Collectors.toList())); taskRunList.addAll(taskRunManager.getTaskRunHistory().getAllHistory()); } else { for (Queue<TaskRun> pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) { taskRunList.addAll(pTaskRunQueue.stream().map(TaskRun::getStatus) .filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList())); } taskRunList.addAll(taskRunManager.getRunningTaskRunMap().values().stream().map(TaskRun::getStatus) .filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList())); taskRunList.addAll(taskRunManager.getTaskRunHistory().getAllHistory().stream() .filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList())); } return taskRunList; } public void replayCreateTaskRun(TaskRunStatus status) { if (status.getState() == Constants.TaskRunState.SUCCESS || status.getState() == Constants.TaskRunState.FAILED) { if (System.currentTimeMillis() > status.getExpireTime()) { return; } } switch (status.getState()) { case PENDING: String taskName = status.getTaskName(); Task task = nameToTaskMap.get(taskName); if (task == null) { LOG.warn("fail to obtain task name {} because task is null", taskName); return; } TaskRun taskRun = TaskRunBuilder.newBuilder(task).build(); taskRun.initStatus(status.getQueryId(), status.getCreateTime()); Queue<TaskRun> taskRuns = taskRunManager.getPendingTaskRunMap().computeIfAbsent(taskRun.getTaskId(), u -> Queues.newConcurrentLinkedQueue()); taskRuns.offer(taskRun); break; case RUNNING: status.setState(Constants.TaskRunState.FAILED); taskRunManager.getTaskRunHistory().addHistory(status); break; case FAILED: case SUCCESS: taskRunManager.getTaskRunHistory().addHistory(status); break; } } public void replayUpdateTaskRun(TaskRunStatusChange statusChange) { Constants.TaskRunState toStatus = statusChange.getToStatus(); Long taskId = statusChange.getTaskId(); Queue<TaskRun> taskRunQueue = taskRunManager.getPendingTaskRunMap().get(taskId); if (taskRunQueue != null) { if (taskRunQueue.size() == 0) { taskRunManager.getPendingTaskRunMap().remove(taskId); return; } TaskRun pendingTaskRun = taskRunQueue.poll(); TaskRunStatus status = pendingTaskRun.getStatus(); if (status.getQueryId().equals(statusChange.getQueryId())) { if (toStatus == Constants.TaskRunState.FAILED) { status.setErrorMessage(statusChange.getErrorMessage()); status.setErrorCode(statusChange.getErrorCode()); } status.setState(toStatus); status.setFinishTime(statusChange.getFinishTime()); taskRunManager.getTaskRunHistory().addHistory(status); } } } public void replayDropTaskRuns(List<String> queryIdList) { Map<String, String> index = Maps.newHashMapWithExpectedSize(queryIdList.size()); for (String queryId : queryIdList) { index.put(queryId, null); } taskRunManager.getTaskRunHistory().getAllHistory().removeIf(runStatus -> index.containsKey(runStatus.getQueryId())); } public void removeOldTaskRunHistory() { long currentTimeMs = System.currentTimeMillis(); List<String> historyToDelete = Lists.newArrayList(); if (!tryTaskRunLock()) { return; } try { Deque<TaskRunStatus> taskRunHistory = taskRunManager.getTaskRunHistory().getAllHistory(); Iterator<TaskRunStatus> iterator = taskRunHistory.iterator(); while (iterator.hasNext()) { TaskRunStatus taskRunStatus = iterator.next(); long expireTime = taskRunStatus.getExpireTime(); if (currentTimeMs > expireTime) { historyToDelete.add(taskRunStatus.getQueryId()); iterator.remove(); } } } finally { taskRunUnlock(); } LOG.info("remove run history:{}", historyToDelete); } private static class SerializeData { @SerializedName("tasks") public List<Task> tasks; @SerializedName("runStatus") public List<TaskRunStatus> runStatus; } }
Just to be sure - there is no way we can be on the I/O thread here, right?
public Iterable<ConfigSource> getConfigSources(ClassLoader cl) { Map<String, ValueType> keys = config.keysAsMap(); if (keys.isEmpty()) { log.debug("No keys were configured for config source lookup"); return Collections.emptyList(); } List<ConfigSource> result = new ArrayList<>(keys.size()); List<Uni<?>> allUnis = new ArrayList<>(); for (Map.Entry<String, ValueType> entry : keys.entrySet()) { String fullKey = config.prefix.isPresent() ? config.prefix.get() + "/" + entry.getKey() : entry.getKey(); allUnis.add(consulConfigGateway.getValue(fullKey).chain(new Function<Response, Uni<?>>() { @Override public Uni<?> apply(Response response) { if (response != null) { result.add( responseConfigSourceUtil.toConfigSource(response, entry.getValue(), config.prefix)); } else { String message = "Key '" + fullKey + "' not found in Consul."; if (config.failOnMissingKey) { return Uni.createFrom().failure(new RuntimeException(message)); } else { log.info(message); return Uni.createFrom().nullItem(); } } return Uni.createFrom().nullItem(); } })); } try { Uni.combine().all().unis(allUnis).discardItems().await() .atMost(config.agent.connectionTimeout.plus(config.agent.readTimeout.multipliedBy(2))); } catch (CompletionException e) { throw new RuntimeException("An error occurred while attempting to fetch configuration from Consul.", e); } finally { consulConfigGateway.close(); } return result; }
}
public Iterable<ConfigSource> getConfigSources(ClassLoader cl) { Map<String, ValueType> keys = config.keysAsMap(); if (keys.isEmpty()) { log.debug("No keys were configured for config source lookup"); return Collections.emptyList(); } List<ConfigSource> result = new ArrayList<>(keys.size()); List<Uni<?>> allUnis = new ArrayList<>(); for (Map.Entry<String, ValueType> entry : keys.entrySet()) { String fullKey = config.prefix.isPresent() ? config.prefix.get() + "/" + entry.getKey() : entry.getKey(); allUnis.add(consulConfigGateway.getValue(fullKey).invoke(new Consumer<Response>() { @Override public void accept(Response response) { if (response != null) { result.add( responseConfigSourceUtil.toConfigSource(response, entry.getValue(), config.prefix)); } else { String message = "Key '" + fullKey + "' not found in Consul."; if (config.failOnMissingKey) { throw new RuntimeException(message); } else { log.info(message); } } } })); } try { Uni.combine().all().unis(allUnis).discardItems().await() .atMost(config.agent.connectionTimeout.plus(config.agent.readTimeout.multipliedBy(2))); } catch (CompletionException e) { throw new RuntimeException("An error occurred while attempting to fetch configuration from Consul.", e); } finally { consulConfigGateway.close(); } return result; }
class ConsulConfigSourceProvider implements ConfigSourceProvider { private static final Logger log = Logger.getLogger(ConsulConfigSourceProvider.class); private final ConsulConfig config; private final ConsulConfigGateway consulConfigGateway; private final ResponseConfigSourceUtil responseConfigSourceUtil; public ConsulConfigSourceProvider(ConsulConfig config) { this(config, new VertxConsulConfigGateway(config), new ResponseConfigSourceUtil()); } ConsulConfigSourceProvider(ConsulConfig config, ConsulConfigGateway consulConfigGateway) { this(config, consulConfigGateway, new ResponseConfigSourceUtil()); } private ConsulConfigSourceProvider(ConsulConfig config, ConsulConfigGateway consulConfigGateway, ResponseConfigSourceUtil responseConfigSourceUtil) { this.config = config; this.consulConfigGateway = consulConfigGateway; this.responseConfigSourceUtil = responseConfigSourceUtil; } @Override }
class ConsulConfigSourceProvider implements ConfigSourceProvider { private static final Logger log = Logger.getLogger(ConsulConfigSourceProvider.class); private final ConsulConfig config; private final ConsulConfigGateway consulConfigGateway; private final ResponseConfigSourceUtil responseConfigSourceUtil; public ConsulConfigSourceProvider(ConsulConfig config) { this(config, new VertxConsulConfigGateway(config), new ResponseConfigSourceUtil()); } ConsulConfigSourceProvider(ConsulConfig config, ConsulConfigGateway consulConfigGateway) { this(config, consulConfigGateway, new ResponseConfigSourceUtil()); } private ConsulConfigSourceProvider(ConsulConfig config, ConsulConfigGateway consulConfigGateway, ResponseConfigSourceUtil responseConfigSourceUtil) { this.config = config; this.consulConfigGateway = consulConfigGateway; this.responseConfigSourceUtil = responseConfigSourceUtil; } @Override }
You should close it before set client null
private void close() { logger.trace("Connect status before close with '{}' is '{}'.", routing, isConnected); isConnected = false; if (null != client) { client = null; } if ((transport != null) && transport.isOpen()) { transport.close(); logger.info("Closed a connection to {}.", routing); } }
if (null != client) {
private void close() { logger.trace("Connect status before close with '{}' is '{}'.", routing, isConnected); isConnected = false; if ((transport != null) && transport.isOpen()) { transport.close(); logger.info("Closed a connection to {}.", routing); } if (null != client) { client = null; } }
class BackendClient { private static Logger logger = LoggerFactory.getLogger(BackendClient.class); private Routing routing; private TDorisExternalService.Client client; private TTransport transport; private boolean isConnected = false; private final int retries; private final int socketTimeout; private final int connectTimeout; public BackendClient(Routing routing, Settings settings) throws ConnectedFailedException { this.routing = routing; this.connectTimeout = settings.getIntegerProperty(ConfigurationOptions.DORIS_REQUEST_CONNECT_TIMEOUT_MS, ConfigurationOptions.DORIS_REQUEST_CONNECT_TIMEOUT_MS_DEFAULT); this.socketTimeout = settings.getIntegerProperty(ConfigurationOptions.DORIS_REQUEST_READ_TIMEOUT_MS, ConfigurationOptions.DORIS_REQUEST_READ_TIMEOUT_MS_DEFAULT); this.retries = settings.getIntegerProperty(ConfigurationOptions.DORIS_REQUEST_RETRIES, ConfigurationOptions.DORIS_REQUEST_RETRIES_DEFAULT); logger.trace("connect timeout set to '{}'. socket timeout set to '{}'. retries set to '{}'.", this.connectTimeout, this.socketTimeout, this.retries); open(); } private void open() throws ConnectedFailedException { logger.debug("Open client to Doris BE '{}'.", routing); TException ex = null; for (int attempt = 0; !isConnected && attempt < retries; ++attempt) { logger.debug("Attempt {} to connect {}.", attempt, routing); TBinaryProtocol.Factory factory = new TBinaryProtocol.Factory(); transport = new TSocket(routing.getHost(), routing.getPort(), socketTimeout, connectTimeout); TProtocol protocol = factory.getProtocol(transport); client = new TDorisExternalService.Client(protocol); try { logger.trace("Connect status before open transport to {} is '{}'.", routing, isConnected); if (!transport.isOpen()) { transport.open(); isConnected = true; } } catch (TTransportException e) { logger.warn(ErrorMessages.CONNECT_FAILED_MESSAGE, routing, e); ex = e; } if (isConnected) { logger.info("Success connect to {}.", routing); break; } } if (!isConnected) { logger.error(ErrorMessages.CONNECT_FAILED_MESSAGE, routing); throw new ConnectedFailedException(routing.toString(), ex); } } /** * Open a scanner for reading Doris data. * @param openParams thrift struct to required by request * @return scan open result * @throws ConnectedFailedException throw if cannot connect to Doris BE */ public TScanOpenResult openScanner(TScanOpenParams openParams) throws ConnectedFailedException { logger.debug("OpenScanner to '{}', parameter is '{}'.", routing, openParams); if (!isConnected) { open(); } TException ex = null; for (int attempt = 0; attempt < retries; ++attempt) { logger.debug("Attempt {} to openScanner {}.", attempt, routing); try { TScanOpenResult result = client.open_scanner(openParams); if (result == null) { logger.warn("Open scanner result from {} is null.", routing); continue; } if (!TStatusCode.OK.equals(result.getStatus().getStatus_code())) { logger.warn("The status of open scanner result from {} is '{}', error message is: {}.", routing, result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); continue; } return result; } catch (TException e) { logger.warn("Open scanner from {} failed.", routing, e); ex = e; } } logger.error(ErrorMessages.CONNECT_FAILED_MESSAGE, routing); throw new ConnectedFailedException(routing.toString(), ex); } /** * get next row batch from Doris BE * @param nextBatchParams thrift struct to required by request * @return scan batch result * @throws ConnectedFailedException throw if cannot connect to Doris BE */ public TScanBatchResult getNext(TScanNextBatchParams nextBatchParams) throws DorisException { logger.debug("GetNext to '{}', parameter is '{}'.", routing, nextBatchParams); if (!isConnected) { open(); } TException ex = null; TScanBatchResult result = null; for (int attempt = 0; attempt < retries; ++attempt) { logger.debug("Attempt {} to getNext {}.", attempt, routing); try { result = client.get_next(nextBatchParams); if (result == null) { logger.warn("GetNext result from {} is null.", routing); continue; } if (!TStatusCode.OK.equals(result.getStatus().getStatus_code())) { logger.warn("The status of get next result from {} is '{}', error message is: {}.", routing, result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); continue; } return result; } catch (TException e) { logger.warn("Get next from {} failed.", routing, e); ex = e; } } if (result != null && (TStatusCode.OK != (result.getStatus().getStatus_code()))) { logger.error(ErrorMessages.DORIS_INTERNAL_FAIL_MESSAGE, routing, result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); throw new DorisInternalException(routing.toString(), result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); } logger.error(ErrorMessages.CONNECT_FAILED_MESSAGE, routing); throw new ConnectedFailedException(routing.toString(), ex); } /** * close an scanner. * @param closeParams thrift struct to required by request */ public void closeScanner(TScanCloseParams closeParams) { logger.debug("CloseScanner to '{}', parameter is '{}'.", routing, closeParams); if (!isConnected) { try { open(); } catch (ConnectedFailedException e) { logger.warn("Cannot connect to Doris BE {} when close scanner.", routing); return; } } for (int attempt = 0; attempt < retries; ++attempt) { logger.debug("Attempt {} to closeScanner {}.", attempt, routing); try { TScanCloseResult result = client.close_scanner(closeParams); if (result == null) { logger.warn("CloseScanner result from {} is null.", routing); continue; } if (!TStatusCode.OK.equals(result.getStatus().getStatus_code())) { logger.warn("The status of get next result from {} is '{}', error message is: {}.", routing, result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); continue; } break; } catch (TException e) { logger.warn("Close scanner from {} failed.", routing, e); } } logger.info("CloseScanner to Doris BE '{}' success.", routing); close(); } }
class BackendClient { private static Logger logger = LoggerFactory.getLogger(BackendClient.class); private Routing routing; private TDorisExternalService.Client client; private TTransport transport; private boolean isConnected = false; private final int retries; private final int socketTimeout; private final int connectTimeout; public BackendClient(Routing routing, DorisReadOptions readOptions) throws ConnectedFailedException { this.routing = routing; this.connectTimeout = readOptions.getRequestConnectTimeoutMs() == null ? ConfigurationOptions.DORIS_REQUEST_CONNECT_TIMEOUT_MS_DEFAULT : readOptions.getRequestConnectTimeoutMs(); this.socketTimeout = readOptions.getRequestReadTimeoutMs() == null ? ConfigurationOptions.DORIS_REQUEST_READ_TIMEOUT_MS_DEFAULT : readOptions.getRequestReadTimeoutMs(); this.retries = readOptions.getRequestRetries() == null ? ConfigurationOptions.DORIS_REQUEST_RETRIES_DEFAULT : readOptions.getRequestRetries(); logger.trace("connect timeout set to '{}'. socket timeout set to '{}'. retries set to '{}'.", this.connectTimeout, this.socketTimeout, this.retries); open(); } private void open() throws ConnectedFailedException { logger.debug("Open client to Doris BE '{}'.", routing); TException ex = null; for (int attempt = 0; !isConnected && attempt < retries; ++attempt) { logger.debug("Attempt {} to connect {}.", attempt, routing); TBinaryProtocol.Factory factory = new TBinaryProtocol.Factory(); transport = new TSocket(routing.getHost(), routing.getPort(), socketTimeout, connectTimeout); TProtocol protocol = factory.getProtocol(transport); client = new TDorisExternalService.Client(protocol); if (isConnected) { logger.info("Success connect to {}.", routing); return; } try { logger.trace("Connect status before open transport to {} is '{}'.", routing, isConnected); if (!transport.isOpen()) { transport.open(); isConnected = true; } } catch (TTransportException e) { logger.warn(ErrorMessages.CONNECT_FAILED_MESSAGE, routing, e); ex = e; } } if (!isConnected) { logger.error(ErrorMessages.CONNECT_FAILED_MESSAGE, routing); throw new ConnectedFailedException(routing.toString(), ex); } } /** * Open a scanner for reading Doris data. * @param openParams thrift struct to required by request * @return scan open result * @throws ConnectedFailedException throw if cannot connect to Doris BE */ public TScanOpenResult openScanner(TScanOpenParams openParams) throws ConnectedFailedException { logger.debug("OpenScanner to '{}', parameter is '{}'.", routing, openParams); if (!isConnected) { open(); } TException ex = null; for (int attempt = 0; attempt < retries; ++attempt) { logger.debug("Attempt {} to openScanner {}.", attempt, routing); try { TScanOpenResult result = client.open_scanner(openParams); if (result == null) { logger.warn("Open scanner result from {} is null.", routing); continue; } if (!TStatusCode.OK.equals(result.getStatus().getStatus_code())) { logger.warn("The status of open scanner result from {} is '{}', error message is: {}.", routing, result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); continue; } return result; } catch (TException e) { logger.warn("Open scanner from {} failed.", routing, e); ex = e; } } logger.error(ErrorMessages.CONNECT_FAILED_MESSAGE, routing); throw new ConnectedFailedException(routing.toString(), ex); } /** * get next row batch from Doris BE * @param nextBatchParams thrift struct to required by request * @return scan batch result * @throws ConnectedFailedException throw if cannot connect to Doris BE */ public TScanBatchResult getNext(TScanNextBatchParams nextBatchParams) throws DorisException { logger.debug("GetNext to '{}', parameter is '{}'.", routing, nextBatchParams); if (!isConnected) { open(); } TException ex = null; TScanBatchResult result = null; for (int attempt = 0; attempt < retries; ++attempt) { logger.debug("Attempt {} to getNext {}.", attempt, routing); try { result = client.get_next(nextBatchParams); if (result == null) { logger.warn("GetNext result from {} is null.", routing); continue; } if (!TStatusCode.OK.equals(result.getStatus().getStatus_code())) { logger.warn("The status of get next result from {} is '{}', error message is: {}.", routing, result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); continue; } return result; } catch (TException e) { logger.warn("Get next from {} failed.", routing, e); ex = e; } } if (result != null && (TStatusCode.OK != (result.getStatus().getStatus_code()))) { logger.error(ErrorMessages.DORIS_INTERNAL_FAIL_MESSAGE, routing, result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); throw new DorisInternalException(routing.toString(), result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); } logger.error(ErrorMessages.CONNECT_FAILED_MESSAGE, routing); throw new ConnectedFailedException(routing.toString(), ex); } /** * close an scanner. * @param closeParams thrift struct to required by request */ public void closeScanner(TScanCloseParams closeParams) { logger.debug("CloseScanner to '{}', parameter is '{}'.", routing, closeParams); for (int attempt = 0; attempt < retries; ++attempt) { logger.debug("Attempt {} to closeScanner {}.", attempt, routing); try { TScanCloseResult result = client.close_scanner(closeParams); if (result == null) { logger.warn("CloseScanner result from {} is null.", routing); continue; } if (!TStatusCode.OK.equals(result.getStatus().getStatus_code())) { logger.warn("The status of get next result from {} is '{}', error message is: {}.", routing, result.getStatus().getStatus_code(), result.getStatus().getError_msgs()); continue; } break; } catch (TException e) { logger.warn("Close scanner from {} failed.", routing, e); } } logger.info("CloseScanner to Doris BE '{}' success.", routing); close(); } }
> It shouldn't be calling `toString` unless it was enabled already. ah right. Sorry I was thinking of the method whith takes String. nevermind then. > However you bring up a different point, ... JBoss Logger was actually enhanced to specifically allow dead code removal from ranges which are disabled at compile time. I suppose adding the guard I suggested makes the whole block eligible for removal - including the string message should be removed.
BeanContainerListenerBuildItem build(InfinispanTemplate template, PropertiesBuildItem builderBuildItem) { Properties properties = builderBuildItem.getProperties(); InfinispanClientConfiguration conf = infinispanClient; final Optional<String> serverList = conf.serverList; log.debugf("Applying micro profile configuration: %s", conf); if (serverList.isPresent()) { properties.putIfAbsent(ConfigurationProperties.SERVER_LIST, serverList.get()); } int maxEntries = conf.nearCacheMaxEntries; if (maxEntries > 0 && !properties.containsKey(ConfigurationProperties.NEAR_CACHE_MODE)) { properties.put(ConfigurationProperties.NEAR_CACHE_MODE, NearCacheMode.INVALIDATED); properties.putIfAbsent(ConfigurationProperties.NEAR_CACHE_MAX_ENTRIES, maxEntries); } final Optional<String> namePattern = conf.nearCacheNamePattern; if (namePattern.isPresent()) { properties.putIfAbsent(ConfigurationProperties.NEAR_CACHE_NAME_PATTERN, namePattern); } return new BeanContainerListenerBuildItem(template.configureInfinispan(properties)); }
log.debugf("Applying micro profile configuration: %s", conf);
BeanContainerListenerBuildItem build(InfinispanTemplate template, PropertiesBuildItem builderBuildItem) { Properties properties = builderBuildItem.getProperties(); InfinispanClientConfiguration conf = infinispanClient; final Optional<String> serverList = conf.serverList; if (log.isDebugEnabled()) { log.debugf("Applying micro profile configuration: %s", conf); } if (serverList.isPresent()) { properties.putIfAbsent(ConfigurationProperties.SERVER_LIST, serverList.get()); } int maxEntries = conf.nearCacheMaxEntries; if (maxEntries > 0 && !properties.containsKey(ConfigurationProperties.NEAR_CACHE_MODE)) { properties.put(ConfigurationProperties.NEAR_CACHE_MODE, NearCacheMode.INVALIDATED); properties.putIfAbsent(ConfigurationProperties.NEAR_CACHE_MAX_ENTRIES, maxEntries); } return new BeanContainerListenerBuildItem(template.configureInfinispan(properties)); }
class InfinispanClientProcessor { private static final Log log = LogFactory.getLog(InfinispanClientProcessor.class); private static final String META_INF = "META-INF"; private static final String HOTROD_CLIENT_PROPERTIES = META_INF + File.separator + "/hotrod-client.properties"; private static final String PROTO_EXTENSION = ".proto"; @BuildStep PropertiesBuildItem setup(ApplicationArchivesBuildItem applicationArchivesBuildItem, BuildProducer<ReflectiveClassBuildItem> reflectiveClass, BuildProducer<HotDeploymentConfigFileBuildItem> hotDeployment, BuildProducer<SystemPropertyBuildItem> systemProperties, BuildProducer<AdditionalBeanBuildItem> additionalBeans, ApplicationIndexBuildItem applicationIndexBuildItem) throws ClassNotFoundException, IOException { additionalBeans.produce(new AdditionalBeanBuildItem(InfinispanClientProducer.class)); systemProperties.produce(new SystemPropertyBuildItem("io.netty.noUnsafe", "true")); hotDeployment.produce(new HotDeploymentConfigFileBuildItem(HOTROD_CLIENT_PROPERTIES)); ClassLoader cl = Thread.currentThread().getContextClassLoader(); InputStream stream = cl.getResourceAsStream(HOTROD_CLIENT_PROPERTIES); Properties properties; if (stream == null) { properties = new Properties(); log.tracef("There was no hotrod-client.properties file found - using defaults"); } else { try { properties = loadFromStream(stream); log.debugf("Found HotRod properties of %s", properties); } finally { Util.close(stream); } InfinispanClientProducer.replaceProperties(properties); if (properties.containsKey(ConfigurationProperties.NEAR_CACHE_MAX_ENTRIES)) { reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, "com.github.benmanes.caffeine.cache.SSMS")); reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, "com.github.benmanes.caffeine.cache.PSMS")); } Object marshaller = properties.get(ConfigurationProperties.MARSHALLER); if (InfinispanClientProducer.isProtoBufAvailable(marshaller)) { ApplicationArchive applicationArchive = applicationArchivesBuildItem.getRootArchive(); Path metaPath = applicationArchive.getChildPath(META_INF); Iterator<Path> protoFiles = Files.list(metaPath) .filter(Files::isRegularFile) .filter(p -> p.toString().endsWith(PROTO_EXTENSION)) .iterator(); if (protoFiles.hasNext()) { } while (protoFiles.hasNext()) { Path path = protoFiles.next(); byte[] bytes = Files.readAllBytes(path); properties.put(InfinispanClientProducer.PROTOBUF_FILE_PREFIX + path.getFileName().toString(), new String(bytes)); } InfinispanClientProducer.handleQueryRequirements(properties); } } Index index = applicationIndexBuildItem.getIndex(); List<AnnotationInstance> listenerInstances = index.getAnnotations( DotName.createSimple("org.infinispan.client.hotrod.annotation.ClientListener")); for (AnnotationInstance instance : listenerInstances) { AnnotationTarget target = instance.target(); if (target.kind() == AnnotationTarget.Kind.CLASS) { reflectiveClass.produce(new ReflectiveClassBuildItem(true, false, target.asClass().name().toString())); } } reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, "io.netty.channel.socket.nio.NioSocketChannel")); reflectiveClass.produce(new ReflectiveClassBuildItem(true, false, "org.infinispan.client.hotrod.event.ContinuousQueryImpl$ClientEntryListener")); reflectiveClass.produce(new ReflectiveClassBuildItem(true, false, "org.infinispan.client.hotrod.near.NearCacheService$InvalidatedNearCacheListener")); reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, "org.infinispan.client.hotrod.impl.consistenthash.SegmentConsistentHash")); return new PropertiesBuildItem(properties); } private Properties loadFromStream(InputStream stream) { Properties properties = new Properties(); try { properties.load(stream); } catch (IOException e) { throw new HotRodClientException("Issues configuring from client hotrod-client.properties", e); } return properties; } /** * The Infinispan client configuration, if set. */ InfinispanClientConfiguration infinispanClient; @BuildStep @Record(ExecutionTime.STATIC_INIT) private static final Set<DotName> UNREMOVABLE_BEANS = Collections.unmodifiableSet( new HashSet<>(Arrays.asList( DotName.createSimple("org.infinispan.protostream.MessageMarshaller"), DotName.createSimple("org.infinispan.protostream.FileDescriptorSource") ))); @BuildStep UnremovableBeanBuildItem ensureBeanLookupAvailable() { return new UnremovableBeanBuildItem(beanInfo -> { Set<Type> types = beanInfo.getTypes(); for (Type t : types) { if (UNREMOVABLE_BEANS.contains(t.name())) { return true; } } return false; }); } }
class InfinispanClientProcessor { private static final Log log = LogFactory.getLog(InfinispanClientProcessor.class); private static final String META_INF = "META-INF"; private static final String HOTROD_CLIENT_PROPERTIES = META_INF + File.separator + "/hotrod-client.properties"; private static final String PROTO_EXTENSION = ".proto"; @BuildStep PropertiesBuildItem setup(ApplicationArchivesBuildItem applicationArchivesBuildItem, BuildProducer<ReflectiveClassBuildItem> reflectiveClass, BuildProducer<HotDeploymentConfigFileBuildItem> hotDeployment, BuildProducer<SystemPropertyBuildItem> systemProperties, BuildProducer<AdditionalBeanBuildItem> additionalBeans, ApplicationIndexBuildItem applicationIndexBuildItem) throws ClassNotFoundException, IOException { additionalBeans.produce(new AdditionalBeanBuildItem(InfinispanClientProducer.class)); systemProperties.produce(new SystemPropertyBuildItem("io.netty.noUnsafe", "true")); hotDeployment.produce(new HotDeploymentConfigFileBuildItem(HOTROD_CLIENT_PROPERTIES)); ClassLoader cl = Thread.currentThread().getContextClassLoader(); InputStream stream = cl.getResourceAsStream(HOTROD_CLIENT_PROPERTIES); Properties properties; if (stream == null) { properties = new Properties(); if (log.isTraceEnabled()) { log.tracef("There was no hotrod-client.properties file found - using defaults"); } } else { try { properties = loadFromStream(stream); if (log.isDebugEnabled()) { log.debugf("Found HotRod properties of %s", properties); } } finally { Util.close(stream); } InfinispanClientProducer.replaceProperties(properties); if (properties.containsKey(ConfigurationProperties.NEAR_CACHE_MAX_ENTRIES)) { reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, "com.github.benmanes.caffeine.cache.SSMS")); reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, "com.github.benmanes.caffeine.cache.PSMS")); } Object marshaller = properties.get(ConfigurationProperties.MARSHALLER); if (marshaller instanceof ProtoStreamMarshaller) { ApplicationArchive applicationArchive = applicationArchivesBuildItem.getRootArchive(); Path metaPath = applicationArchive.getChildPath(META_INF); Iterator<Path> protoFiles = Files.list(metaPath) .filter(Files::isRegularFile) .filter(p -> p.toString().endsWith(PROTO_EXTENSION)) .iterator(); if (protoFiles.hasNext()) { } while (protoFiles.hasNext()) { Path path = protoFiles.next(); byte[] bytes = Files.readAllBytes(path); properties.put(InfinispanClientProducer.PROTOBUF_FILE_PREFIX + path.getFileName().toString(), new String(bytes)); } InfinispanClientProducer.handleProtoStreamRequirements(properties); } } Index index = applicationIndexBuildItem.getIndex(); List<AnnotationInstance> listenerInstances = index.getAnnotations( DotName.createSimple("org.infinispan.client.hotrod.annotation.ClientListener")); for (AnnotationInstance instance : listenerInstances) { AnnotationTarget target = instance.target(); if (target.kind() == AnnotationTarget.Kind.CLASS) { reflectiveClass.produce(new ReflectiveClassBuildItem(true, false, target.asClass().name().toString())); } } reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, "io.netty.channel.socket.nio.NioSocketChannel")); reflectiveClass.produce(new ReflectiveClassBuildItem(true, false, "org.infinispan.client.hotrod.event.ContinuousQueryImpl$ClientEntryListener")); reflectiveClass.produce(new ReflectiveClassBuildItem(true, false, "org.infinispan.client.hotrod.near.NearCacheService$InvalidatedNearCacheListener")); reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, "org.infinispan.client.hotrod.impl.consistenthash.SegmentConsistentHash")); return new PropertiesBuildItem(properties); } private Properties loadFromStream(InputStream stream) { Properties properties = new Properties(); try { properties.load(stream); } catch (IOException e) { throw new HotRodClientException("Issues configuring from client hotrod-client.properties", e); } return properties; } /** * The Infinispan client configuration, if set. */ InfinispanClientConfiguration infinispanClient; @BuildStep @Record(ExecutionTime.STATIC_INIT) private static final Set<DotName> UNREMOVABLE_BEANS = Collections.unmodifiableSet( new HashSet<>(Arrays.asList( DotName.createSimple("org.infinispan.protostream.MessageMarshaller"), DotName.createSimple("org.infinispan.protostream.FileDescriptorSource") ))); @BuildStep UnremovableBeanBuildItem ensureBeanLookupAvailable() { return new UnremovableBeanBuildItem(beanInfo -> { Set<Type> types = beanInfo.getTypes(); for (Type t : types) { if (UNREMOVABLE_BEANS.contains(t.name())) { return true; } } return false; }); } }
`containsExactlyInAnyOrder` would be more precise here
public void testLeaderInformationChangeNotifiesListener() throws Exception { new TestFixture() { { runTest( () -> { leaderCallbackGrantLeadership(); final String componentA = "componentA"; final LeaderInformation leaderInformationA = LeaderInformation.known(UUID.randomUUID(), "localhost"); final String componentB = "componentB"; final LeaderInformation leaderInformationB = LeaderInformation.known(UUID.randomUUID(), "localhost"); leaderElectionDriver.publishLeaderInformation( componentA, leaderInformationA); leaderElectionDriver.publishLeaderInformation( componentB, leaderInformationB); notifyLeaderElectionWatchOnModifiedConfigMap(); final LeaderElectionEvent.AllKnownLeaderInformationEvent allKnownLeaderInformationEvent = leaderElectionListener.await( LeaderElectionEvent .AllKnownLeaderInformationEvent.class); assertThat( allKnownLeaderInformationEvent .getLeaderInformationWithComponentIds()) .contains( LeaderInformationWithComponentId.create( componentA, leaderInformationA), LeaderInformationWithComponentId.create( componentB, leaderInformationB)); }); } }; }
.contains(
public void testLeaderInformationChangeNotifiesListener() throws Exception { new TestFixture() { { runTest( () -> { leaderCallbackGrantLeadership(); final String componentA = "componentA"; final LeaderInformation leaderInformationA = LeaderInformation.known(UUID.randomUUID(), "localhost"); final String componentB = "componentB"; final LeaderInformation leaderInformationB = LeaderInformation.known(UUID.randomUUID(), "localhost"); leaderElectionDriver.publishLeaderInformation( componentA, leaderInformationA); leaderElectionDriver.publishLeaderInformation( componentB, leaderInformationB); notifyLeaderElectionWatchOnModifiedConfigMap(); final LeaderElectionEvent.AllKnownLeaderInformationEvent allKnownLeaderInformationEvent = leaderElectionListener.await( LeaderElectionEvent .AllKnownLeaderInformationEvent.class); assertThat( allKnownLeaderInformationEvent .getLeaderInformationWithComponentIds()) .containsExactlyInAnyOrder( LeaderInformationWithComponentId.create( componentA, leaderInformationA), LeaderInformationWithComponentId.create( componentB, leaderInformationB)); }); } }; }
class KubernetesMultipleComponentLeaderElectionDriverTest { private static final String CLUSTER_ID = "test-cluster"; private static final String LEADER_CONFIGMAP_NAME = "foobar"; private static final String LOCK_IDENTITY = "barfoo"; @RegisterExtension private final TestingFatalErrorHandlerExtension testingFatalErrorHandlerExtension = new TestingFatalErrorHandlerExtension(); @RegisterExtension private final TestExecutorExtension<ExecutorService> testExecutorExtension = new TestExecutorExtension<>(Executors::newSingleThreadScheduledExecutor); @Test public void testElectionDriverGainsLeadership() throws Exception { new TestFixture() { { runTest( () -> { leaderCallbackGrantLeadership(); leaderElectionListener.await(LeaderElectionEvent.IsLeaderEvent.class); }); } }; } @Test public void testElectionDriverLosesLeadership() throws Exception { new TestFixture() { { runTest( () -> { leaderCallbackGrantLeadership(); leaderElectionListener.await(LeaderElectionEvent.IsLeaderEvent.class); getLeaderCallback().notLeader(); leaderElectionListener.await(LeaderElectionEvent.NotLeaderEvent.class); }); } }; } @Test public void testPublishLeaderInformation() throws Exception { new TestFixture() { { runTest( () -> { leaderCallbackGrantLeadership(); leaderElectionListener.await(LeaderElectionEvent.IsLeaderEvent.class); final LeaderInformation leaderInformation = LeaderInformation.known(UUID.randomUUID(), "localhost"); final String componentId = "foobar"; final DefaultLeaderRetrievalService leaderRetrievalService = new DefaultLeaderRetrievalService( new KubernetesMultipleComponentLeaderRetrievalDriverFactory( getFlinkKubeClient(), getConfigMapSharedWatcher(), testExecutorExtension.getExecutor(), LEADER_CONFIGMAP_NAME, componentId)); final TestingListener leaderRetrievalListener = new TestingListener(); leaderRetrievalService.start(leaderRetrievalListener); leaderElectionDriver.publishLeaderInformation( componentId, leaderInformation); notifyLeaderRetrievalWatchOnModifiedConfigMap(); leaderRetrievalListener.waitForNewLeader(10_000L); assertThat(leaderRetrievalListener.getLeader()) .isEqualTo(leaderInformation); }); } }; } @Test /** Test fixture for the {@link KubernetesMultipleComponentLeaderElectionDriverTest}. */ protected class TestFixture { private final KubernetesTestFixture kubernetesTestFixture; final TestingLeaderElectionListener leaderElectionListener; final KubernetesMultipleComponentLeaderElectionDriver leaderElectionDriver; TestFixture() { kubernetesTestFixture = new KubernetesTestFixture(CLUSTER_ID, LEADER_CONFIGMAP_NAME, LOCK_IDENTITY); leaderElectionListener = new TestingLeaderElectionListener(); leaderElectionDriver = createLeaderElectionDriver(); } private KubernetesMultipleComponentLeaderElectionDriver createLeaderElectionDriver() { final KubernetesLeaderElectionConfiguration leaderElectionConfiguration = new KubernetesLeaderElectionConfiguration( LEADER_CONFIGMAP_NAME, LOCK_IDENTITY, kubernetesTestFixture.getConfiguration()); return new KubernetesMultipleComponentLeaderElectionDriver( leaderElectionConfiguration, kubernetesTestFixture.getFlinkKubeClient(), leaderElectionListener, kubernetesTestFixture.getConfigMapSharedWatcher(), testExecutorExtension.getExecutor(), testingFatalErrorHandlerExtension.getTestingFatalErrorHandler()); } void leaderCallbackGrantLeadership() throws Exception { kubernetesTestFixture.leaderCallbackGrantLeadership(); } KubernetesLeaderElector.LeaderCallbackHandler getLeaderCallback() throws Exception { return kubernetesTestFixture.getLeaderCallback(); } FlinkKubeClient getFlinkKubeClient() { return kubernetesTestFixture.getFlinkKubeClient(); } KubernetesConfigMapSharedWatcher getConfigMapSharedWatcher() { return kubernetesTestFixture.getConfigMapSharedWatcher(); } FlinkKubeClient.WatchCallbackHandler<KubernetesConfigMap> getLeaderRetrievalConfigMapCallback() throws Exception { return kubernetesTestFixture.getLeaderRetrievalConfigMapCallback(); } void notifyLeaderRetrievalWatchOnModifiedConfigMap() throws Exception { kubernetesTestFixture .getLeaderRetrievalConfigMapCallback() .onModified( Collections.singletonList(kubernetesTestFixture.getLeaderConfigMap())); } void notifyLeaderElectionWatchOnModifiedConfigMap() throws Exception { kubernetesTestFixture .getLeaderElectionConfigMapCallback() .onModified( Collections.singletonList(kubernetesTestFixture.getLeaderConfigMap())); } void runTest(RunnableWithException testMethod) throws Exception { try { testMethod.run(); } finally { leaderElectionDriver.close(); kubernetesTestFixture.close(); } } } }
class KubernetesMultipleComponentLeaderElectionDriverTest { private static final String CLUSTER_ID = "test-cluster"; private static final String LEADER_CONFIGMAP_NAME = "leader-configmap-name"; private static final String LOCK_IDENTITY = "lock-identity"; @RegisterExtension private final TestingFatalErrorHandlerExtension testingFatalErrorHandlerExtension = new TestingFatalErrorHandlerExtension(); @RegisterExtension private static final TestExecutorExtension<ExecutorService> testExecutorExtension = new TestExecutorExtension<>(Executors::newSingleThreadScheduledExecutor); @Test public void testElectionDriverGainsLeadership() throws Exception { new TestFixture() { { runTest( () -> { leaderCallbackGrantLeadership(); leaderElectionListener.await(LeaderElectionEvent.IsLeaderEvent.class); }); } }; } @Test public void testElectionDriverLosesLeadership() throws Exception { new TestFixture() { { runTest( () -> { leaderCallbackGrantLeadership(); leaderElectionListener.await(LeaderElectionEvent.IsLeaderEvent.class); getLeaderCallback().notLeader(); leaderElectionListener.await(LeaderElectionEvent.NotLeaderEvent.class); }); } }; } @Test public void testPublishLeaderInformation() throws Exception { new TestFixture() { { runTest( () -> { leaderCallbackGrantLeadership(); leaderElectionListener.await(LeaderElectionEvent.IsLeaderEvent.class); final LeaderInformation leaderInformation = LeaderInformation.known(UUID.randomUUID(), "localhost"); final String componentId = "componentId"; final DefaultLeaderRetrievalService leaderRetrievalService = new DefaultLeaderRetrievalService( new KubernetesMultipleComponentLeaderRetrievalDriverFactory( getFlinkKubeClient(), getConfigMapSharedWatcher(), testExecutorExtension.getExecutor(), LEADER_CONFIGMAP_NAME, componentId)); final TestingListener leaderRetrievalListener = new TestingListener(); leaderRetrievalService.start(leaderRetrievalListener); leaderElectionDriver.publishLeaderInformation( componentId, leaderInformation); notifyLeaderRetrievalWatchOnModifiedConfigMap(); leaderRetrievalListener.waitForNewLeader(10_000L); assertThat(leaderRetrievalListener.getLeader()) .isEqualTo(leaderInformation); }); } }; } @Test /** Test fixture for the {@link KubernetesMultipleComponentLeaderElectionDriverTest}. */ protected class TestFixture { private final KubernetesTestFixture kubernetesTestFixture; final TestingLeaderElectionListener leaderElectionListener; final KubernetesMultipleComponentLeaderElectionDriver leaderElectionDriver; TestFixture() { kubernetesTestFixture = new KubernetesTestFixture(CLUSTER_ID, LEADER_CONFIGMAP_NAME, LOCK_IDENTITY); leaderElectionListener = new TestingLeaderElectionListener(); leaderElectionDriver = createLeaderElectionDriver(); } private KubernetesMultipleComponentLeaderElectionDriver createLeaderElectionDriver() { final KubernetesLeaderElectionConfiguration leaderElectionConfiguration = new KubernetesLeaderElectionConfiguration( LEADER_CONFIGMAP_NAME, LOCK_IDENTITY, kubernetesTestFixture.getConfiguration()); return new KubernetesMultipleComponentLeaderElectionDriver( leaderElectionConfiguration, kubernetesTestFixture.getFlinkKubeClient(), leaderElectionListener, kubernetesTestFixture.getConfigMapSharedWatcher(), testExecutorExtension.getExecutor(), testingFatalErrorHandlerExtension.getTestingFatalErrorHandler()); } void leaderCallbackGrantLeadership() throws Exception { kubernetesTestFixture.leaderCallbackGrantLeadership(); } KubernetesLeaderElector.LeaderCallbackHandler getLeaderCallback() throws Exception { return kubernetesTestFixture.getLeaderCallback(); } FlinkKubeClient getFlinkKubeClient() { return kubernetesTestFixture.getFlinkKubeClient(); } KubernetesConfigMapSharedWatcher getConfigMapSharedWatcher() { return kubernetesTestFixture.getConfigMapSharedWatcher(); } FlinkKubeClient.WatchCallbackHandler<KubernetesConfigMap> getLeaderRetrievalConfigMapCallback() throws Exception { return kubernetesTestFixture.getLeaderRetrievalConfigMapCallback(); } void notifyLeaderRetrievalWatchOnModifiedConfigMap() throws Exception { kubernetesTestFixture .getLeaderRetrievalConfigMapCallback() .onModified( Collections.singletonList(kubernetesTestFixture.getLeaderConfigMap())); } void notifyLeaderElectionWatchOnModifiedConfigMap() throws Exception { kubernetesTestFixture .getLeaderElectionConfigMapCallback() .onModified( Collections.singletonList(kubernetesTestFixture.getLeaderConfigMap())); } void runTest(RunnableWithException testMethod) throws Exception { try { testMethod.run(); } finally { leaderElectionDriver.close(); kubernetesTestFixture.close(); } } } }
Lets use System.out to OUT static variable
private static void handleProfilerArguments(String[] args) { String invalidArgument = "Invalid CLI Argument"; if (args.length != 0) { for (int i = 0; i < args.length; i++) { switch (args[i]) { case "--file": balJarName = args[i + 1]; if (balJarName.startsWith("[") && balJarName.endsWith("]")) { balJarName = balJarName.substring(1, balJarName.length() - 1); } else { System.out.printf(invalidArgument + "\n"); } break; case "--args": balJarArgs = args[i + 1]; if (balJarArgs != null && balJarArgs.startsWith("[") && balJarArgs.endsWith("]")) { balJarArgs = balJarArgs.substring(1, balJarArgs.length() - 1); } else { System.out.printf(invalidArgument + "\n"); } break; case "--skip": skipFunctionString = args[i + 1]; if (skipFunctionString != null && skipFunctionString.matches("\\[.*\\]")) { skipFunctionString = skipFunctionString.substring(1, skipFunctionString.length() - 1); } else { System.out.printf(invalidArgument + "\n"); } break; default: System.out.printf(invalidArgument + "\n"); break; } } } }
System.out.printf(invalidArgument + "\n");
private static void handleProfilerArguments(String[] args) { String invalidArgument = "Invalid CLI Argument"; if (args.length != 0) { for (int i = 0; i < args.length; i++) { switch (args[i]) { case "--file": balJarName = args[i + 1]; if (balJarName.startsWith("[") && balJarName.endsWith("]")) { balJarName = balJarName.substring(1, balJarName.length() - 1); } else { OUT.printf(invalidArgument + "\n"); } break; case "--args": balJarArgs = args[i + 1]; if (balJarArgs != null && balJarArgs.startsWith("[") && balJarArgs.endsWith("]")) { balJarArgs = balJarArgs.substring(1, balJarArgs.length() - 1); } else { OUT.printf(invalidArgument + "\n"); } break; case "--skip": skipFunctionString = args[i + 1]; if (skipFunctionString != null && skipFunctionString.matches("\\[.*\\]")) { skipFunctionString = skipFunctionString.substring(1, skipFunctionString.length() - 1); } else { OUT.printf(invalidArgument + "\n"); } break; default: break; } } } }
class Main { public static final String ANSI_RESET = "\u001B[0m"; public static final String ANSI_GRAY = "\033[37m"; public static final String ANSI_CYAN = "\033[1;38;2;32;182;176m"; static long profilerStartTime; static int exitCode = 0; public static final String TEMPJARFILENAME = "temp.jar"; private static String balJarArgs = null; static String balJarName = null; static String skipFunctionString = null; private static int balFunctionCount = 0; static int moduleCount = 0; static final List<String> INSTRUMENTEDPATHS = new ArrayList<>(); static final List<String> INSTRUMENTEDFILES = new ArrayList<>(); static final List<String> UTILINITPATHS = new ArrayList<>(); static final List<String> UTILPATHS = new ArrayList<>(); public static void main(String[] args) throws CustomException { profilerStartTime = TimeUnit.MILLISECONDS.convert(System.nanoTime(), TimeUnit.NANOSECONDS); tempFileCleanupShutdownHook(); printHeader(); handleProfilerArguments(args); extractTheProfiler(); createTempJar(balJarName); initialize(balJarName); } private static void printHeader() { String header = "%n" + ANSI_GRAY + "================================================================================" + ANSI_RESET + "%n" + ANSI_CYAN + "Ballerina Profiler" + ANSI_RESET + ": Profiling..." + "%n" + ANSI_GRAY + "================================================================================" + ANSI_RESET + "%n" + "WARNING : Ballerina Profiler is an experimental feature."; System.out.printf(header + "%n"); } private static void extractTheProfiler() throws CustomException { System.out.printf(ANSI_CYAN + "[1/6] Initializing Profiler..." + ANSI_RESET + "%n"); try { new ProcessBuilder("jar", "xvf", "Profiler.jar", "io/ballerina/runtime/profiler/runtime") .start() .waitFor(); } catch (IOException | InterruptedException exception) { throw new CustomException(exception); } } public static void createTempJar(String balJarName) { try { System.out.printf(ANSI_CYAN + "[2/6] Copying Executable..." + ANSI_RESET + "%n"); Path sourcePath = Paths.get(balJarName); Path destinationPath = Paths.get(TEMPJARFILENAME); Files.copy(sourcePath, destinationPath); } catch (IOException e) { exitCode = 2; System.out.printf("Error occurred while copying the file: %s%n", e.getMessage()); } } private static void initialize(String balJarName) throws CustomException { System.out.printf(ANSI_CYAN + "[3/6] Performing Analysis..." + ANSI_RESET + "%n"); ArrayList<String> classNames = new ArrayList<>(); try { findAllClassNames(balJarName, classNames); findUtilityClasses(classNames); } catch (Exception e) { System.out.printf("(No such file or directory)" + "%n"); } System.out.printf(ANSI_CYAN + "[4/6] Instrumenting Functions..." + ANSI_RESET + "%n"); try (JarFile jarFile = new JarFile(balJarName)) { String mainClassPackage = MethodWrapper.mainClassFinder( new URLClassLoader(new URL[]{new File(balJarName).toURI().toURL()})); CustomClassLoader customClassLoader = new CustomClassLoader( new URLClassLoader(new URL[]{new File(balJarName).toURI().toURL()})); Set<String> usedPaths = new HashSet<>(); for (String className : classNames) { if (mainClassPackage == null) { continue; } if (className.startsWith(mainClassPackage.split("/")[0]) || UTILPATHS.contains(className)) { try (InputStream inputStream = jarFile.getInputStream(jarFile.getJarEntry(className))) { byte[] code = MethodWrapper.modifyMethods(inputStream); customClassLoader.loadClass(code); usedPaths.add(className.replace(".class", "").replace("/", ".")); MethodWrapper.printCode(className, code); } } if (className.endsWith("/$_init.class")) { moduleCount++; } } System.out.printf(" ○ Instrumented Module Count: " + moduleCount + "%n"); try (PrintWriter printWriter = new PrintWriter("usedPathsList.txt", StandardCharsets.UTF_8)) { printWriter.println(String.join(", ", usedPaths)); } System.out.printf(" ○ Instrumented Function Count: " + balFunctionCount + "%n"); } catch (Throwable throwable) { throw new CustomException(throwable); } try { modifyTheJar(); } catch (Throwable throwable) { throw new CustomException(throwable); } } private static void modifyTheJar() throws InterruptedException, IOException { try { final File userDirectory = new File(System.getProperty("user.dir")); listAllFiles(userDirectory); List<String> changedDirectories = INSTRUMENTEDFILES.stream().distinct().collect(Collectors.toList()); loadDirectories(changedDirectories); } finally { for (String instrumentedFilePath : INSTRUMENTEDPATHS) { FileUtils.deleteDirectory(new File(instrumentedFilePath)); } FileUtils.deleteDirectory(new File("io/ballerina/runtime/profiler/runtime")); MethodWrapper.invokeMethods(); } } private static void loadDirectories(List<String> changedDirs) { try { ProcessBuilder processBuilder = new ProcessBuilder("jar", "uf", TEMPJARFILENAME); processBuilder.command().addAll(changedDirs); processBuilder.start().waitFor(); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } } public static void listAllFiles(final File userDirectory) { String absolutePath = Paths.get(TEMPJARFILENAME).toFile().getAbsolutePath(); absolutePath = absolutePath.replaceAll(TEMPJARFILENAME, ""); File[] files = userDirectory.listFiles(); if (files != null) { for (final File fileEntry : files) { if (fileEntry.isDirectory()) { listAllFiles(fileEntry); } else { String fileEntryString = String.valueOf(fileEntry); if (fileEntryString.endsWith(".class")) { fileEntryString = fileEntryString.replaceAll(absolutePath, ""); int index = fileEntryString.lastIndexOf('/'); fileEntryString = fileEntryString.substring(0, index); String[] fileEntryParts = fileEntryString.split("/"); INSTRUMENTEDPATHS.add(fileEntryParts[0]); INSTRUMENTEDFILES.add(fileEntryString); } } } } } private static void findAllClassNames(String jarPath, ArrayList<String> classNames) throws IOException { try (ZipInputStream zipInputStream = new ZipInputStream(new FileInputStream(jarPath))) { for (ZipEntry entry = zipInputStream.getNextEntry(); entry != null; entry = zipInputStream.getNextEntry()) { if (!entry.isDirectory() && entry.getName().endsWith(".class")) { classNames.add(String.valueOf(entry)); } } } } private static void findUtilityClasses(ArrayList<String> classNames) { for (String className : classNames) { if (className.endsWith("$_init.class")) { String path = className.substring(0, className.lastIndexOf('/') + 1); if (!UTILINITPATHS.contains(path)) { UTILINITPATHS.add(path); } } } for (String name : classNames) { for (String path : UTILINITPATHS) { if (name.startsWith(path)) { String subPath = name.substring(path.length()); if (subPath.indexOf('/') == -1) { UTILPATHS.add(name); } } } } } private static void deleteTempData() { String filePrefix = "jartmp"; File[] files = new File(System.getProperty("user.dir")).listFiles(); if (files != null) { for (File file : files) { if (file.getName().startsWith(filePrefix)) { FileUtils.deleteQuietly(file); } } } } private static void tempFileCleanupShutdownHook() { Runtime.getRuntime().addShutdownHook(new Thread(() -> { try { long profilerTotalTime = TimeUnit.MILLISECONDS.convert( System.nanoTime(), TimeUnit.NANOSECONDS) - profilerStartTime; File tempJarFile = new File(TEMPJARFILENAME); if (tempJarFile.exists()) { boolean deleted = tempJarFile.delete(); if (!deleted) { System.err.printf("Failed to delete temp jar file: " + TEMPJARFILENAME + "%n"); } } System.out.printf("%n" + ANSI_CYAN + "[6/6] Generating Output..." + ANSI_RESET + "%n"); Thread.sleep(100); initializeCPUParser(skipFunctionString); deleteFileIfExists("usedPathsList.txt"); deleteFileIfExists("CpuPre.json"); System.out.printf(" ○ Execution Time: " + profilerTotalTime / 1000 + " Seconds" + "%n"); deleteTempData(); initializeHTMLExport(); deleteFileIfExists("performance_report.json"); System.out.printf("----------------------------------------"); System.out.printf("----------------------------------------" + "%n"); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } finally { String jarPath; try { jarPath = Main.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); } catch (URISyntaxException e) { throw new RuntimeException(e); } File jarFile = new File(jarPath); if (jarFile.exists()) { boolean deleted = jarFile.delete(); if (!deleted) { System.err.printf("Failed to delete jar file: " + jarPath + "%n"); } } } })); } private static void deleteFileIfExists(String filePath) { File file = new File(filePath); if (file.exists()) { boolean deleted = file.delete(); if (!deleted) { System.err.printf("Failed to delete file: " + filePath + "%n"); } } } public static void incrementBalFunctionCount() { balFunctionCount++; } public static String getBalJarArgs() { return balJarArgs; } }
class Main { static long profilerStartTime; static int exitCode = 0; private static String balJarArgs = null; static String balJarName = null; static String skipFunctionString = null; private static int balFunctionCount = 0; static int moduleCount = 0; static final List<String> INSTRUMENTED_PATHS = new ArrayList<>(); static final List<String> INSTRUMENTED_FILES = new ArrayList<>(); static final List<String> UTIL_INIT_PATHS = new ArrayList<>(); static final List<String> UTIL_PATHS = new ArrayList<>(); public static void main(String[] args) throws CustomException { profilerStartTime = TimeUnit.MILLISECONDS.convert(System.nanoTime(), TimeUnit.NANOSECONDS); addShutdownHookAndCleanup(); printHeader(); handleProfilerArguments(args); extractProfiler(); createTempJar(balJarName); initializeProfiling(balJarName); } private static void printHeader() { String header = "%n" + Constants.ANSI_GRAY + "================================================================================" + Constants.ANSI_RESET + "%n" + Constants.ANSI_CYAN + "Ballerina Profiler" + Constants.ANSI_RESET + ": Profiling..." + "%n" + Constants.ANSI_GRAY + "================================================================================" + Constants.ANSI_RESET + "%n" + "WARNING : Ballerina Profiler is an experimental feature."; OUT.printf(header + "%n"); } private static void extractProfiler() throws CustomException { OUT.printf(Constants.ANSI_CYAN + "[1/6] Initializing Profiler..." + Constants.ANSI_RESET + "%n"); try { new ProcessBuilder("jar", "xvf", "Profiler.jar", "io/ballerina/runtime/profiler/runtime") .start() .waitFor(); } catch (IOException | InterruptedException exception) { throw new CustomException(exception); } } public static void createTempJar(String balJarName) { try { OUT.printf(Constants.ANSI_CYAN + "[2/6] Copying Executable..." + Constants.ANSI_RESET + "%n"); Path sourcePath = Paths.get(balJarName); Path destinationPath = Paths.get(Constants.TEMP_JAR_FILE_NAME); Files.copy(sourcePath, destinationPath); } catch (IOException e) { exitCode = 2; OUT.printf("Error occurred while copying the file: %s%n", e.getMessage()); } } private static void initializeProfiling(String balJarName) throws CustomException { OUT.printf(Constants.ANSI_CYAN + "[3/6] Performing Analysis..." + Constants.ANSI_RESET + "%n"); ArrayList<String> classNames = new ArrayList<>(); try { findAllClassNames(balJarName, classNames); findUtilityClasses(classNames); } catch (Exception e) { OUT.printf("(No such file or directory)" + "%n"); } OUT.printf(Constants.ANSI_CYAN + "[4/6] Instrumenting Functions..." + Constants.ANSI_RESET + "%n"); try (JarFile jarFile = new JarFile(balJarName)) { String mainClassPackage = MethodWrapper.mainClassFinder( new URLClassLoader(new URL[]{new File(balJarName).toURI().toURL()})); CustomClassLoader customClassLoader = new CustomClassLoader( new URLClassLoader(new URL[]{new File(balJarName).toURI().toURL()})); Set<String> usedPaths = new HashSet<>(); for (String className : classNames) { if (mainClassPackage == null) { continue; } if (className.startsWith(mainClassPackage.split("/")[0]) || UTIL_PATHS.contains(className)) { try (InputStream inputStream = jarFile.getInputStream(jarFile.getJarEntry(className))) { byte[] code = MethodWrapper.modifyMethods(inputStream); customClassLoader.loadClass(code); usedPaths.add(className.replace(".class", "").replace("/", ".")); MethodWrapper.printCode(className, code); } } if (className.endsWith("/$_init.class")) { moduleCount++; } } OUT.printf(" ○ Instrumented Module Count: " + moduleCount + "%n"); try (PrintWriter printWriter = new PrintWriter("usedPathsList.txt", StandardCharsets.UTF_8)) { printWriter.println(String.join(", ", usedPaths)); } OUT.printf(" ○ Instrumented Function Count: " + balFunctionCount + "%n"); } catch (Throwable throwable) { throw new CustomException(throwable); } try { modifyJar(); } catch (Throwable throwable) { throw new CustomException(throwable); } } private static void modifyJar() throws InterruptedException, IOException { try { final File userDirectory = new File(System.getProperty("user.dir")); listAllFiles(userDirectory); List<String> changedDirectories = INSTRUMENTED_FILES.stream().distinct().collect(Collectors.toList()); loadDirectories(changedDirectories); } finally { for (String instrumentedFilePath : INSTRUMENTED_PATHS) { FileUtils.deleteDirectory(new File(instrumentedFilePath)); } FileUtils.deleteDirectory(new File("io/ballerina/runtime/profiler/runtime")); MethodWrapper.invokeMethods(); } } private static void loadDirectories(List<String> changedDirs) { try { ProcessBuilder processBuilder = new ProcessBuilder("jar", "uf", Constants.TEMP_JAR_FILE_NAME); processBuilder.command().addAll(changedDirs); processBuilder.start().waitFor(); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } } public static void listAllFiles(final File userDirectory) { String absolutePath = Paths.get(Constants.TEMP_JAR_FILE_NAME).toFile() .getAbsolutePath().replaceAll(Constants.TEMP_JAR_FILE_NAME, ""); File[] files = userDirectory.listFiles(); if (files != null) { for (final File fileEntry : files) { if (fileEntry.isDirectory()) { listAllFiles(fileEntry); } else { String fileEntryString = String.valueOf(fileEntry); if (fileEntryString.endsWith(".class")) { fileEntryString = fileEntryString.replaceAll(absolutePath, ""); int index = fileEntryString.lastIndexOf('/'); fileEntryString = fileEntryString.substring(0, index); String[] fileEntryParts = fileEntryString.split("/"); INSTRUMENTED_PATHS.add(fileEntryParts[0]); INSTRUMENTED_FILES.add(fileEntryString); } } } } } private static void findAllClassNames(String jarPath, ArrayList<String> classNames) throws IOException { try (ZipInputStream zipInputStream = new ZipInputStream(new FileInputStream(jarPath))) { for (ZipEntry entry = zipInputStream.getNextEntry(); entry != null; entry = zipInputStream.getNextEntry()) { if (!entry.isDirectory() && entry.getName().endsWith(".class")) { classNames.add(String.valueOf(entry)); } } } } private static void findUtilityClasses(ArrayList<String> classNames) { for (String className : classNames) { if (className.endsWith("$_init.class")) { String path = className.substring(0, className.lastIndexOf('/') + 1); if (!UTIL_INIT_PATHS.contains(path)) { UTIL_INIT_PATHS.add(path); } } } for (String name : classNames) { for (String path : UTIL_INIT_PATHS) { if (name.startsWith(path)) { String subPath = name.substring(path.length()); if (subPath.indexOf('/') == -1) { UTIL_PATHS.add(name); } } } } } private static void deleteTempData() { String filePrefix = "jartmp"; File[] files = new File(System.getProperty("user.dir")).listFiles(); if (files != null) { for (File file : files) { if (file.getName().startsWith(filePrefix)) { FileUtils.deleteQuietly(file); } } } } private static void addShutdownHookAndCleanup() { Runtime.getRuntime().addShutdownHook(new Thread(() -> { try { long profilerTotalTime = TimeUnit.MILLISECONDS.convert( System.nanoTime(), TimeUnit.NANOSECONDS) - profilerStartTime; File tempJarFile = new File(Constants.TEMP_JAR_FILE_NAME); if (tempJarFile.exists()) { boolean deleted = tempJarFile.delete(); if (!deleted) { System.err.printf("Failed to delete temp jar file: " + Constants.TEMP_JAR_FILE_NAME + "%n"); } } OUT.printf("%n" + Constants.ANSI_CYAN + "[6/6] Generating Output..." + Constants.ANSI_RESET + "%n"); Thread.sleep(100); initializeCPUParser(skipFunctionString); deleteFileIfExists("usedPathsList.txt"); deleteFileIfExists("CpuPre.json"); OUT.printf(" ○ Execution Time: " + profilerTotalTime / 1000 + " Seconds" + "%n"); deleteTempData(); initializeHTMLExport(); deleteFileIfExists("performance_report.json"); OUT.printf("----------------------------------------"); OUT.printf("----------------------------------------" + "%n"); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } finally { String jarPath; try { jarPath = Main.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); } catch (URISyntaxException e) { throw new RuntimeException(e); } File jarFile = new File(jarPath); if (jarFile.exists()) { boolean deleted = jarFile.delete(); if (!deleted) { System.err.printf("Failed to delete jar file: " + jarPath + "%n"); } } } })); } private static void deleteFileIfExists(String filePath) { File file = new File(filePath); if (file.exists()) { boolean deleted = file.delete(); if (!deleted) { System.err.printf("Failed to delete file: " + filePath + "%n"); } } } public static void incrementBalFunctionCount() { balFunctionCount++; } public static String getBalJarArgs() { return balJarArgs; } }
One already created bean, which generally won't happen until runtime when you actually have vert.x. Currently if you fail in static init then this method is called, but vert.x is not ready yet (as it is a runtime bean), so you get an extra unrelated exception in your logs (and possible unclean shutdown).
void undeployVerticles(@Observes @BeforeDestroyed(ApplicationScoped.class) Object event, BeanManager beanManager) { Set<Bean<?>> beans = beanManager.getBeans(AbstractVerticle.class, Any.Literal.INSTANCE); Context applicationContext = beanManager.getContext(ApplicationScoped.class); for (Bean<?> bean : beans) { if (ApplicationScoped.class.equals(bean.getScope())) { Object instance = applicationContext.get(bean); if (instance != null) { try { AbstractVerticle verticle = (AbstractVerticle) instance; io.vertx.mutiny.core.Vertx mutiny = beanManager.createInstance() .select(io.vertx.mutiny.core.Vertx.class).get(); mutiny.undeploy(verticle.deploymentID()).await().indefinitely(); LOGGER.debugf("Undeployed verticle: %s", instance.getClass()); } catch (Exception e) { LOGGER.debugf("Unable to undeploy verticle %s: %s", instance.getClass(), e.toString()); } } } } }
io.vertx.mutiny.core.Vertx mutiny = beanManager.createInstance()
void undeployVerticles(@Observes @BeforeDestroyed(ApplicationScoped.class) Object event, BeanManager beanManager) { Set<Bean<?>> beans = beanManager.getBeans(AbstractVerticle.class, Any.Literal.INSTANCE); Context applicationContext = beanManager.getContext(ApplicationScoped.class); for (Bean<?> bean : beans) { if (ApplicationScoped.class.equals(bean.getScope())) { Object instance = applicationContext.get(bean); if (instance != null) { try { AbstractVerticle verticle = (AbstractVerticle) instance; io.vertx.mutiny.core.Vertx mutiny = beanManager.createInstance() .select(io.vertx.mutiny.core.Vertx.class).get(); mutiny.undeploy(verticle.deploymentID()).await().indefinitely(); LOGGER.debugf("Undeployed verticle: %s", instance.getClass()); } catch (Exception e) { LOGGER.debugf("Unable to undeploy verticle %s: %s", instance.getClass(), e.toString()); } } } } }
class VertxProducer { private static final Logger LOGGER = Logger.getLogger(VertxProducer.class); @Singleton @Produces public EventBus eventbus(Vertx vertx) { return vertx.eventBus(); } @Singleton @Produces public io.vertx.mutiny.core.Vertx mutiny(Vertx vertx) { return io.vertx.mutiny.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.axle.core.Vertx axle(Vertx vertx) { LOGGER.warn( "`io.vertx.axle.core.Vertx` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.Vertx`"); return io.vertx.axle.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.reactivex.core.Vertx rx(Vertx vertx) { LOGGER.warn( "`io.vertx.reactivex.core.Vertx` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.Vertx`"); return io.vertx.reactivex.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.axle.core.eventbus.EventBus axleEventBus(io.vertx.axle.core.Vertx axle) { LOGGER.warn( "`io.vertx.axle.core.eventbus.EventBus` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.eventbus.EventBus`"); return axle.eventBus(); } @Singleton @Produces @Deprecated public io.vertx.reactivex.core.eventbus.EventBus rxEventBus(io.vertx.reactivex.core.Vertx rx) { LOGGER.warn( "`io.vertx.reactivex.core.eventbus.EventBus` is deprecated and will be removed in a future version - it " + "is recommended to switch to `io.vertx.mutiny.core.eventbus.EventBus`"); return rx.eventBus(); } @Singleton @Produces public io.vertx.mutiny.core.eventbus.EventBus mutinyEventBus(io.vertx.mutiny.core.Vertx mutiny) { return mutiny.eventBus(); } /** * Undeploy verticles backed by contextual instances of {@link ApplicationScoped} beans before the application context is * destroyed. Otherwise Vertx may attempt to stop the verticles after the CDI container is shut down. * * @param event * @param beanManager */ }
class VertxProducer { private static final Logger LOGGER = Logger.getLogger(VertxProducer.class); @Singleton @Produces public EventBus eventbus(Vertx vertx) { return vertx.eventBus(); } @Singleton @Produces public io.vertx.mutiny.core.Vertx mutiny(Vertx vertx) { return io.vertx.mutiny.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.axle.core.Vertx axle(Vertx vertx) { LOGGER.warn( "`io.vertx.axle.core.Vertx` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.Vertx`"); return io.vertx.axle.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.reactivex.core.Vertx rx(Vertx vertx) { LOGGER.warn( "`io.vertx.reactivex.core.Vertx` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.Vertx`"); return io.vertx.reactivex.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.axle.core.eventbus.EventBus axleEventBus(io.vertx.axle.core.Vertx axle) { LOGGER.warn( "`io.vertx.axle.core.eventbus.EventBus` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.eventbus.EventBus`"); return axle.eventBus(); } @Singleton @Produces @Deprecated public io.vertx.reactivex.core.eventbus.EventBus rxEventBus(io.vertx.reactivex.core.Vertx rx) { LOGGER.warn( "`io.vertx.reactivex.core.eventbus.EventBus` is deprecated and will be removed in a future version - it " + "is recommended to switch to `io.vertx.mutiny.core.eventbus.EventBus`"); return rx.eventBus(); } @Singleton @Produces public io.vertx.mutiny.core.eventbus.EventBus mutinyEventBus(io.vertx.mutiny.core.Vertx mutiny) { return mutiny.eventBus(); } /** * Undeploy verticles backed by contextual instances of {@link ApplicationScoped} beans before the application context is * destroyed. Otherwise Vertx may attempt to stop the verticles after the CDI container is shut down. * * @param event * @param beanManager */ }
Is this a typo for `inventory.customers`?
public void testDebeziumSchemaTransformPostgresRead() throws InterruptedException { long writeSize = 500L; long testTime = writeSize * 200L; POSTGRES_SQL_CONTAINER.start(); PipelineOptions options = PipelineOptionsFactory.create(); Pipeline writePipeline = Pipeline.create(options); writePipeline .apply(GenerateSequence.from(0).to(writeSize).withRate(10, Duration.standardSeconds(1))) .apply( MapElements.into(TypeDescriptors.rows()) .via( num -> Row.withSchema(TABLE_SCHEMA) .withFieldValue( "id", num <= 1000 ? Long.valueOf(num).intValue() : Long.valueOf(num).intValue() + 4) .withFieldValue("first_name", Long.toString(num)) .withFieldValue("last_name", Long.toString(writeSize - num)) .withFieldValue("email", Long.toString(num) + "@beamail.com") .build())) .setRowSchema(TABLE_SCHEMA) .apply( JdbcIO.<Row>write() .withTable("inventory.inventory.customers") .withDataSourceConfiguration( JdbcIO.DataSourceConfiguration.create(getPostgresDatasource()))); Pipeline readPipeline = Pipeline.create(options); PCollection<Row> result = PCollectionRowTuple.empty(readPipeline) .apply( new DebeziumReadSchemaTransformProvider( true, Long.valueOf(writeSize).intValue() + 4, testTime) .from( DebeziumReadSchemaTransformProvider.DebeziumReadSchemaTransformConfiguration .builder() .setDatabase("POSTGRES") .setPassword("dbz") .setUsername("debezium") .setHost("localhost") .setTable("inventory.customers") .setPort(POSTGRES_SQL_CONTAINER.getMappedPort(5432)) .build()) .buildTransform()) .get("output"); PAssert.that(result) .satisfies( rows -> { assertThat( Lists.newArrayList(rows).size(), equalTo(Long.valueOf(writeSize + 4).intValue())); return null; }); Thread writeThread = new Thread(() -> writePipeline.run().waitUntilFinish()); writeThread.start(); readPipeline.run().waitUntilFinish(); writeThread.join(); }
.withTable("inventory.inventory.customers")
public void testDebeziumSchemaTransformPostgresRead() throws InterruptedException { long writeSize = 500L; long testTime = writeSize * 200L; POSTGRES_SQL_CONTAINER.start(); PipelineOptions options = PipelineOptionsFactory.create(); Pipeline writePipeline = Pipeline.create(options); writePipeline .apply(GenerateSequence.from(0).to(writeSize).withRate(10, Duration.standardSeconds(1))) .apply( MapElements.into(TypeDescriptors.rows()) .via( num -> Row.withSchema(TABLE_SCHEMA) .withFieldValue( "id", num <= 1000 ? Long.valueOf(num).intValue() : Long.valueOf(num).intValue() + 4) .withFieldValue("first_name", Long.toString(num)) .withFieldValue("last_name", Long.toString(writeSize - num)) .withFieldValue("email", Long.toString(num) + "@beamail.com") .build())) .setRowSchema(TABLE_SCHEMA) .apply( JdbcIO.<Row>write() .withTable("inventory.inventory.customers") .withDataSourceConfiguration( JdbcIO.DataSourceConfiguration.create(getPostgresDatasource()))); Pipeline readPipeline = Pipeline.create(options); PCollection<Row> result = PCollectionRowTuple.empty(readPipeline) .apply( new DebeziumReadSchemaTransformProvider( true, Long.valueOf(writeSize).intValue() + 4, testTime) .from( DebeziumReadSchemaTransformProvider.DebeziumReadSchemaTransformConfiguration .builder() .setDatabase("POSTGRES") .setPassword("dbz") .setUsername("debezium") .setHost("localhost") .setTable("inventory.customers") .setPort(POSTGRES_SQL_CONTAINER.getMappedPort(5432)) .build()) .buildTransform()) .get("output"); PAssert.that(result) .satisfies( rows -> { assertThat( Lists.newArrayList(rows).size(), equalTo(Long.valueOf(writeSize + 4).intValue())); return null; }); Thread writeThread = new Thread(() -> writePipeline.run().waitUntilFinish()); writeThread.start(); readPipeline.run().waitUntilFinish(); writeThread.join(); }
class DebeziumIOPostgresSqlConnectorIT { /** * Debezium - PostgresSqlContainer * * <p>Creates a docker container using the image used by the debezium tutorial. */ @ClassRule public static final PostgreSQLContainer<?> POSTGRES_SQL_CONTAINER = new PostgreSQLContainer<>( DockerImageName.parse("debezium/example-postgres:latest") .asCompatibleSubstituteFor("postgres")) .withPassword("dbz") .withUsername("debezium") .withExposedPorts(5432) .withDatabaseName("inventory"); static final Schema TABLE_SCHEMA = Schema.builder() .addInt32Field("id") .addStringField("first_name") .addStringField("last_name") .addStringField("email") .build(); static DataSource getPostgresDatasource() { PGSimpleDataSource dataSource = new PGSimpleDataSource(); dataSource.setDatabaseName("inventory"); dataSource.setServerName(POSTGRES_SQL_CONTAINER.getContainerIpAddress()); dataSource.setPortNumber(POSTGRES_SQL_CONTAINER.getMappedPort(5432)); dataSource.setUser("debezium"); dataSource.setPassword("dbz"); return dataSource; } @Test /** * Debezium - PostgresSql connector Test. * * <p>Tests that connector can actually connect to the database */ @Test public void testDebeziumIOPostgresSql() { POSTGRES_SQL_CONTAINER.start(); String host = POSTGRES_SQL_CONTAINER.getContainerIpAddress(); String port = POSTGRES_SQL_CONTAINER.getMappedPort(5432).toString(); PipelineOptions options = PipelineOptionsFactory.create(); Pipeline p = Pipeline.create(options); PCollection<String> results = p.apply( DebeziumIO.<String>read() .withConnectorConfiguration( DebeziumIO.ConnectorConfiguration.create() .withUsername("debezium") .withPassword("dbz") .withConnectorClass(PostgresConnector.class) .withHostName(host) .withPort(port) .withConnectionProperty("database.dbname", "inventory") .withConnectionProperty("database.server.name", "dbserver1") .withConnectionProperty("database.include.list", "inventory") .withConnectionProperty("include.schema.changes", "false")) .withFormatFunction(new SourceRecordJson.SourceRecordJsonMapper()) .withMaxNumberOfRecords(30) .withCoder(StringUtf8Coder.of())); String expected = "{\"metadata\":{\"connector\":\"postgresql\",\"version\":\"1.3.1.Final\",\"name\":\"dbserver1\"," + "\"database\":\"inventory\",\"schema\":\"inventory\",\"table\":\"customers\"},\"before\":null," + "\"after\":{\"fields\":{\"last_name\":\"Thomas\",\"id\":1001,\"first_name\":\"Sally\"," + "\"email\":\"[email protected]\"}}}"; PAssert.that(results) .satisfies( (Iterable<String> res) -> { assertThat(res, hasItem(expected)); return null; }); p.run().waitUntilFinish(); POSTGRES_SQL_CONTAINER.stop(); } }
class DebeziumIOPostgresSqlConnectorIT { /** * Debezium - PostgresSqlContainer * * <p>Creates a docker container using the image used by the debezium tutorial. */ @ClassRule public static final PostgreSQLContainer<?> POSTGRES_SQL_CONTAINER = new PostgreSQLContainer<>( DockerImageName.parse("debezium/example-postgres:latest") .asCompatibleSubstituteFor("postgres")) .withPassword("dbz") .withUsername("debezium") .withExposedPorts(5432) .withDatabaseName("inventory"); static final Schema TABLE_SCHEMA = Schema.builder() .addInt32Field("id") .addStringField("first_name") .addStringField("last_name") .addStringField("email") .build(); static DataSource getPostgresDatasource() { PGSimpleDataSource dataSource = new PGSimpleDataSource(); dataSource.setDatabaseName("inventory"); dataSource.setServerName(POSTGRES_SQL_CONTAINER.getContainerIpAddress()); dataSource.setPortNumber(POSTGRES_SQL_CONTAINER.getMappedPort(5432)); dataSource.setUser("debezium"); dataSource.setPassword("dbz"); return dataSource; } @Test /** * Debezium - PostgresSql connector Test. * * <p>Tests that connector can actually connect to the database */ @Test public void testDebeziumIOPostgresSql() { POSTGRES_SQL_CONTAINER.start(); String host = POSTGRES_SQL_CONTAINER.getContainerIpAddress(); String port = POSTGRES_SQL_CONTAINER.getMappedPort(5432).toString(); PipelineOptions options = PipelineOptionsFactory.create(); Pipeline p = Pipeline.create(options); PCollection<String> results = p.apply( DebeziumIO.<String>read() .withConnectorConfiguration( DebeziumIO.ConnectorConfiguration.create() .withUsername("debezium") .withPassword("dbz") .withConnectorClass(PostgresConnector.class) .withHostName(host) .withPort(port) .withConnectionProperty("database.dbname", "inventory") .withConnectionProperty("database.server.name", "dbserver1") .withConnectionProperty("database.include.list", "inventory") .withConnectionProperty("include.schema.changes", "false")) .withFormatFunction(new SourceRecordJson.SourceRecordJsonMapper()) .withMaxNumberOfRecords(30) .withCoder(StringUtf8Coder.of())); String expected = "{\"metadata\":{\"connector\":\"postgresql\",\"version\":\"1.3.1.Final\",\"name\":\"dbserver1\"," + "\"database\":\"inventory\",\"schema\":\"inventory\",\"table\":\"customers\"},\"before\":null," + "\"after\":{\"fields\":{\"last_name\":\"Thomas\",\"id\":1001,\"first_name\":\"Sally\"," + "\"email\":\"[email protected]\"}}}"; PAssert.that(results) .satisfies( (Iterable<String> res) -> { assertThat(res, hasItem(expected)); return null; }); p.run().waitUntilFinish(); POSTGRES_SQL_CONTAINER.stop(); } }
It want it to fail hard if it's not `X509Certificate[]`.
public List<X509Certificate> getClientCertificateChain() { return Optional.ofNullable(parent.context().get(ServletRequest.JDISC_REQUEST_X509CERT)) .map(X509Certificate[].class::cast) .map(Arrays::asList) .orElse(Collections.emptyList()); }
.map(X509Certificate[].class::cast)
public List<X509Certificate> getClientCertificateChain() { return Optional.ofNullable(parent.context().get(ServletRequest.JDISC_REQUEST_X509CERT)) .map(X509Certificate[].class::cast) .map(Arrays::asList) .orElse(Collections.emptyList()); }
class JdiscFilterRequest extends DiscFilterRequest { private final HttpRequest parent; public JdiscFilterRequest(HttpRequest parent) { super(parent); this.parent = parent; } public HttpRequest getParentRequest() { return parent; } public void setUri(URI uri) { parent.setUri(uri); } @Override public String getMethod() { return parent.getMethod().name(); } @Override public String getParameter(String name) { if(parent.parameters().containsKey(name)) { return parent.parameters().get(name).get(0); } else { return null; } } @Override public Enumeration<String> getParameterNames() { return Collections.enumeration(parent.parameters().keySet()); } @Override public void addHeader(String name, String value) { parent.headers().add(name, value); } @Override public String getHeader(String name) { List<String> values = parent.headers().get(name); if (values == null || values.isEmpty()) { return null; } return values.get(values.size() - 1); } public Enumeration<String> getHeaderNames() { return Collections.enumeration(parent.headers().keySet()); } public List<String> getHeaderNamesAsList() { return new ArrayList<String>(parent.headers().keySet()); } @Override public Enumeration<String> getHeaders(String name) { return Collections.enumeration(getHeadersAsList(name)); } public List<String> getHeadersAsList(String name) { List<String> values = parent.headers().get(name); if(values == null) { return Collections.<String>emptyList(); } return parent.headers().get(name); } @Override public void removeHeaders(String name) { parent.headers().remove(name); } @Override public void setHeaders(String name, String value) { parent.headers().put(name, value); } @Override public void setHeaders(String name, List<String> values) { parent.headers().put(name, values); } @Override public Principal getUserPrincipal() { return parent.getUserPrincipal(); } @Override public void setUserPrincipal(Principal principal) { this.parent.setUserPrincipal(principal); } @Override @Override public void clearCookies() { parent.headers().remove(HttpHeaders.Names.COOKIE); } }
class JdiscFilterRequest extends DiscFilterRequest { private final HttpRequest parent; public JdiscFilterRequest(HttpRequest parent) { super(parent); this.parent = parent; } public HttpRequest getParentRequest() { return parent; } public void setUri(URI uri) { parent.setUri(uri); } @Override public String getMethod() { return parent.getMethod().name(); } @Override public String getParameter(String name) { if(parent.parameters().containsKey(name)) { return parent.parameters().get(name).get(0); } else { return null; } } @Override public Enumeration<String> getParameterNames() { return Collections.enumeration(parent.parameters().keySet()); } @Override public void addHeader(String name, String value) { parent.headers().add(name, value); } @Override public String getHeader(String name) { List<String> values = parent.headers().get(name); if (values == null || values.isEmpty()) { return null; } return values.get(values.size() - 1); } public Enumeration<String> getHeaderNames() { return Collections.enumeration(parent.headers().keySet()); } public List<String> getHeaderNamesAsList() { return new ArrayList<String>(parent.headers().keySet()); } @Override public Enumeration<String> getHeaders(String name) { return Collections.enumeration(getHeadersAsList(name)); } public List<String> getHeadersAsList(String name) { List<String> values = parent.headers().get(name); if(values == null) { return Collections.<String>emptyList(); } return parent.headers().get(name); } @Override public void removeHeaders(String name) { parent.headers().remove(name); } @Override public void setHeaders(String name, String value) { parent.headers().put(name, value); } @Override public void setHeaders(String name, List<String> values) { parent.headers().put(name, values); } @Override public Principal getUserPrincipal() { return parent.getUserPrincipal(); } @Override public void setUserPrincipal(Principal principal) { this.parent.setUserPrincipal(principal); } @Override @Override public void clearCookies() { parent.headers().remove(HttpHeaders.Names.COOKIE); } }
make this variable static and final?
public static String generatePartitionName(PartitionKeyDesc desc) { String partitionName = "p_"; Pattern pattern = Pattern.compile("[^a-zA-Z0-9,]"); Matcher matcher = pattern.matcher(desc.toSql()); partitionName += matcher.replaceAll("").replaceAll("\\,", "_"); if (partitionName.length() > 50) { partitionName = partitionName.substring(0, 30) + Math.abs(Objects.hash(partitionName)) + "_" + System.currentTimeMillis(); } return partitionName; }
Pattern pattern = Pattern.compile("[^a-zA-Z0-9,]");
public static String generatePartitionName(PartitionKeyDesc desc) { Matcher matcher = PARTITION_NAME_PATTERN.matcher(desc.toSql()); String partitionName = PARTITION_NAME_PREFIX + matcher.replaceAll("").replaceAll("\\,", "_"); if (partitionName.length() > 50) { partitionName = partitionName.substring(0, 30) + Math.abs(Objects.hash(partitionName)) + "_" + System.currentTimeMillis(); } return partitionName; }
class MTMVPartitionUtil { private static final Logger LOG = LogManager.getLogger(MTMVPartitionUtil.class); /** * Determine whether the partition is sync with retated partition and other baseTables * * @param mtmv * @param partitionId * @param tables * @param excludedTriggerTables * @return * @throws AnalysisException */ public static boolean isMTMVPartitionSync(MTMV mtmv, Long partitionId, Set<BaseTableInfo> tables, Set<String> excludedTriggerTables) throws AnalysisException { boolean isSyncWithPartition = true; if (mtmv.getMvPartitionInfo().getPartitionType() == MTMVPartitionType.FOLLOW_BASE_TABLE) { MTMVRelatedTableIf relatedTable = mtmv.getMvPartitionInfo().getRelatedTable(); excludedTriggerTables.add(relatedTable.getName()); PartitionItem item = mtmv.getPartitionInfo().getItemOrAnalysisException(partitionId); Map<Long, PartitionItem> relatedPartitionItems = relatedTable.getPartitionItems(); long relatedPartitionId = getExistPartitionId(item, relatedPartitionItems); if (relatedPartitionId == -1L) { LOG.warn("can not found related partition: " + partitionId); return false; } isSyncWithPartition = isSyncWithPartition(mtmv, partitionId, relatedTable, relatedPartitionId); } return isSyncWithPartition && isSyncWithAllBaseTables(mtmv, partitionId, tables, excludedTriggerTables); } /** * Align the partitions of mtmv and related tables, delete more and add less * * @param mtmv * @param relatedTable * @throws DdlException * @throws AnalysisException */ public static void alignMvPartition(MTMV mtmv, MTMVRelatedTableIf relatedTable) throws DdlException, AnalysisException { Map<Long, PartitionItem> relatedTableItems = Maps.newHashMap(relatedTable.getPartitionItems()); Map<Long, PartitionItem> mtmvItems = Maps.newHashMap(mtmv.getPartitionItems()); for (Entry<Long, PartitionItem> entry : mtmvItems.entrySet()) { long partitionId = getExistPartitionId(entry.getValue(), relatedTableItems); if (partitionId == -1L) { dropPartition(mtmv, entry.getKey()); } } for (Entry<Long, PartitionItem> entry : relatedTableItems.entrySet()) { long partitionId = getExistPartitionId(entry.getValue(), mtmvItems); if (partitionId == -1L) { addPartition(mtmv, entry.getValue()); } } } /** * getPartitionDescsByRelatedTable when create MTMV * * @param relatedTable * @param tableProperties * @return * @throws AnalysisException */ public static List<AllPartitionDesc> getPartitionDescsByRelatedTable(MTMVRelatedTableIf relatedTable, Map<String, String> tableProperties) throws AnalysisException { HashMap<String, String> partitionProperties = Maps.newHashMap(); List<AllPartitionDesc> res = Lists.newArrayList(); Map<Long, PartitionItem> relatedTableItems = relatedTable.getPartitionItems(); for (Entry<Long, PartitionItem> entry : relatedTableItems.entrySet()) { PartitionKeyDesc oldPartitionKeyDesc = entry.getValue().toPartitionKeyDesc(); SinglePartitionDesc singlePartitionDesc = new SinglePartitionDesc(true, generatePartitionName(oldPartitionKeyDesc), oldPartitionKeyDesc, partitionProperties); singlePartitionDesc.analyze(1, tableProperties); res.add(singlePartitionDesc); } return res; } public static List<String> getPartitionNamesByIds(MTMV mtmv, Collection<Long> ids) throws AnalysisException { List<String> res = Lists.newArrayList(); for (Long partitionId : ids) { res.add(mtmv.getPartitionOrAnalysisException(partitionId).getName()); } return res; } public static List<Long> getPartitionsIdsByNames(MTMV mtmv, List<String> partitions) throws AnalysisException { List<Long> res = Lists.newArrayList(); for (String partitionName : partitions) { Partition partition = mtmv.getPartitionOrAnalysisException(partitionName); res.add(partition.getId()); } return res; } /** * check if table is sync with all baseTables * * @param mtmv * @return */ public static boolean isMTMVSync(MTMV mtmv) { MTMVRelation mtmvRelation = mtmv.getRelation(); if (mtmvRelation == null) { return false; } try { return isMTMVSync(mtmv, mtmvRelation.getBaseTables(), Sets.newHashSet()); } catch (AnalysisException e) { LOG.warn("isMTMVSync failed: ", e); return false; } } /** * Determine whether the mtmv is sync with tables * * @param mtmv * @param tables * @param excludeTables * @return * @throws AnalysisException */ public static boolean isMTMVSync(MTMV mtmv, Set<BaseTableInfo> tables, Set<String> excludeTables) throws AnalysisException { Collection<Partition> partitions = mtmv.getPartitions(); for (Partition partition : partitions) { if (!isMTMVPartitionSync(mtmv, partition.getId(), tables, excludeTables)) { return false; } } return true; } /** * get not sync tables * * @param mtmv * @param partitionId * @return * @throws AnalysisException */ public static List<String> getPartitionUnSyncTables(MTMV mtmv, Long partitionId) throws AnalysisException { List<String> res = Lists.newArrayList(); for (BaseTableInfo baseTableInfo : mtmv.getRelation().getBaseTables()) { TableIf table = MTMVUtil.getTable(baseTableInfo); if (!(table instanceof MTMVRelatedTableIf)) { continue; } MTMVRelatedTableIf mtmvRelatedTableIf = (MTMVRelatedTableIf) table; if (!mtmvRelatedTableIf.needAutoRefresh()) { continue; } if (mtmv.getMvPartitionInfo().getPartitionType() == MTMVPartitionType.FOLLOW_BASE_TABLE && mtmv .getMvPartitionInfo().getRelatedTableInfo().equals(baseTableInfo)) { PartitionItem item = mtmv.getPartitionInfo().getItemOrAnalysisException(partitionId); Map<Long, PartitionItem> relatedPartitionItems = mtmvRelatedTableIf.getPartitionItems(); long relatedPartitionId = getExistPartitionId(item, relatedPartitionItems); if (relatedPartitionId == -1L) { throw new AnalysisException("can not found related partition"); } boolean isSyncWithPartition = isSyncWithPartition(mtmv, partitionId, mtmvRelatedTableIf, relatedPartitionId); if (!isSyncWithPartition) { res.add(mtmvRelatedTableIf.getName()); } } else { if (!isSyncWithBaseTable(mtmv, partitionId, baseTableInfo)) { res.add(table.getName()); } } } return res; } /** * Get the partitions that need to be refreshed * * @param mtmv * @param baseTables * @return */ public static List<Long> getMTMVNeedRefreshPartitions(MTMV mtmv, Set<BaseTableInfo> baseTables) { Collection<Partition> allPartitions = mtmv.getPartitions(); List<Long> res = Lists.newArrayList(); for (Partition partition : allPartitions) { try { if (!isMTMVPartitionSync(mtmv, partition.getId(), baseTables, mtmv.getExcludedTriggerTables())) { res.add(partition.getId()); } } catch (AnalysisException e) { res.add(partition.getId()); LOG.warn("check isMTMVPartitionSync failed", e); } } return res; } /** * compare last update time of mtmvPartition and tablePartition * * @param mtmv * @param mtmvPartitionId * @param relatedTable * @param relatedPartitionId * @return * @throws AnalysisException */ public static boolean isSyncWithPartition(MTMV mtmv, Long mtmvPartitionId, MTMVRelatedTableIf relatedTable, Long relatedPartitionId) throws AnalysisException { if (!relatedTable.needAutoRefresh()) { return true; } MTMVSnapshotIf relatedPartitionCurrentSnapshot = relatedTable .getPartitionSnapshot(relatedPartitionId); String relatedPartitionName = relatedTable.getPartitionName(relatedPartitionId); String mtmvPartitionName = mtmv.getPartitionName(mtmvPartitionId); return mtmv.getRefreshSnapshot() .equalsWithRelatedPartition(mtmvPartitionName, relatedPartitionName, relatedPartitionCurrentSnapshot); } /** * like p_00000101_20170201 * * @param desc * @return */ /** * drop partition of mtmv * * @param mtmv * @param partitionId */ private static void dropPartition(MTMV mtmv, Long partitionId) throws AnalysisException, DdlException { if (!mtmv.writeLockIfExist()) { return; } try { Partition partition = mtmv.getPartitionOrAnalysisException(partitionId); DropPartitionClause dropPartitionClause = new DropPartitionClause(false, partition.getName(), false, false); Env.getCurrentEnv().dropPartition((Database) mtmv.getDatabase(), mtmv, dropPartitionClause); } finally { mtmv.writeUnlock(); } } /** * add partition for mtmv like relatedPartitionId of relatedTable * * @param mtmv * @param partitionItem * @throws DdlException */ private static void addPartition(MTMV mtmv, PartitionItem partitionItem) throws DdlException { PartitionKeyDesc oldPartitionKeyDesc = partitionItem.toPartitionKeyDesc(); Map<String, String> partitionProperties = Maps.newHashMap(); SinglePartitionDesc singlePartitionDesc = new SinglePartitionDesc(true, generatePartitionName(oldPartitionKeyDesc), oldPartitionKeyDesc, partitionProperties); AddPartitionClause addPartitionClause = new AddPartitionClause(singlePartitionDesc, mtmv.getDefaultDistributionInfo().toDistributionDesc(), partitionProperties, false); Env.getCurrentEnv().addPartition((Database) mtmv.getDatabase(), mtmv.getName(), addPartitionClause); } /** * compare PartitionItem and return equals partitionId * if not found, return -1L * * @param target * @param sources * @return */ private static long getExistPartitionId(PartitionItem target, Map<Long, PartitionItem> sources) { for (Entry<Long, PartitionItem> entry : sources.entrySet()) { if (target.equals(entry.getValue())) { return entry.getKey(); } } return -1L; } /** * Determine is sync, ignoring excludedTriggerTables and non OlapTanle * * @param mtmvPartitionId * @param tables * @param excludedTriggerTables * @return */ private static boolean isSyncWithAllBaseTables(MTMV mtmv, long mtmvPartitionId, Set<BaseTableInfo> tables, Set<String> excludedTriggerTables) throws AnalysisException { for (BaseTableInfo baseTableInfo : tables) { TableIf table = null; try { table = MTMVUtil.getTable(baseTableInfo); } catch (AnalysisException e) { LOG.warn("get table failed, {}", baseTableInfo, e); return false; } if (excludedTriggerTables.contains(table.getName())) { continue; } boolean syncWithBaseTable = isSyncWithBaseTable(mtmv, mtmvPartitionId, baseTableInfo); if (!syncWithBaseTable) { return false; } } return true; } private static boolean isSyncWithBaseTable(MTMV mtmv, long mtmvPartitionId, BaseTableInfo baseTableInfo) throws AnalysisException { TableIf table = null; try { table = MTMVUtil.getTable(baseTableInfo); } catch (AnalysisException e) { LOG.warn("get table failed, {}", baseTableInfo, e); return false; } if (!(table instanceof MTMVRelatedTableIf)) { return true; } MTMVRelatedTableIf baseTable = (MTMVRelatedTableIf) table; if (!baseTable.needAutoRefresh()) { return true; } MTMVSnapshotIf baseTableCurrentSnapshot = baseTable.getTableSnapshot(); String mtmvPartitionName = mtmv.getPartitionName(mtmvPartitionId); return mtmv.getRefreshSnapshot() .equalsWithBaseTable(mtmvPartitionName, baseTable.getId(), baseTableCurrentSnapshot); } public static Map<String, MTMVRefreshPartitionSnapshot> generatePartitionSnapshots(MTMV mtmv, Set<BaseTableInfo> baseTables, Set<Long> partitionIds) throws AnalysisException { Map<String, MTMVRefreshPartitionSnapshot> res = Maps.newHashMap(); for (Long partitionId : partitionIds) { res.put(mtmv.getPartition(partitionId).getName(), generatePartitionSnapshot(mtmv, baseTables, partitionId)); } return res; } private static MTMVRefreshPartitionSnapshot generatePartitionSnapshot(MTMV mtmv, Set<BaseTableInfo> baseTables, Long partitionId) throws AnalysisException { MTMVRefreshPartitionSnapshot refreshPartitionSnapshot = new MTMVRefreshPartitionSnapshot(); if (mtmv.getMvPartitionInfo().getPartitionType() == MTMVPartitionType.FOLLOW_BASE_TABLE) { MTMVRelatedTableIf relatedTable = mtmv.getMvPartitionInfo().getRelatedTable(); List<Long> relatedPartitionIds = getMTMVPartitionRelatedPartitions( mtmv.getPartitionItems().get(partitionId), relatedTable); for (Long relatedPartitionId : relatedPartitionIds) { MTMVSnapshotIf partitionSnapshot = relatedTable .getPartitionSnapshot(relatedPartitionId); refreshPartitionSnapshot.getPartitions() .put(relatedTable.getPartitionName(relatedPartitionId), partitionSnapshot); } } for (BaseTableInfo baseTableInfo : baseTables) { if (mtmv.getMvPartitionInfo().getPartitionType() == MTMVPartitionType.FOLLOW_BASE_TABLE && mtmv .getMvPartitionInfo().getRelatedTableInfo().equals(baseTableInfo)) { continue; } TableIf table = MTMVUtil.getTable(baseTableInfo); if (!(table instanceof MTMVRelatedTableIf)) { continue; } refreshPartitionSnapshot.getTables().put(table.getId(), ((MTMVRelatedTableIf) table).getTableSnapshot()); } return refreshPartitionSnapshot; } private static List<Long> getMTMVPartitionRelatedPartitions(PartitionItem mtmvPartitionItem, MTMVRelatedTableIf relatedTable) { List<Long> res = Lists.newArrayList(); Map<Long, PartitionItem> relatedPartitionItems = relatedTable.getPartitionItems(); for (Entry<Long, PartitionItem> entry : relatedPartitionItems.entrySet()) { if (mtmvPartitionItem.equals(entry.getValue())) { res.add(entry.getKey()); return res; } } return res; } }
class MTMVPartitionUtil { private static final Logger LOG = LogManager.getLogger(MTMVPartitionUtil.class); private static final Pattern PARTITION_NAME_PATTERN = Pattern.compile("[^a-zA-Z0-9,]"); private static final String PARTITION_NAME_PREFIX = "p_"; /** * Determine whether the partition is sync with retated partition and other baseTables * * @param mtmv * @param partitionId * @param tables * @param excludedTriggerTables * @return * @throws AnalysisException */ public static boolean isMTMVPartitionSync(MTMV mtmv, Long partitionId, Set<BaseTableInfo> tables, Set<String> excludedTriggerTables) throws AnalysisException { boolean isSyncWithPartition = true; if (mtmv.getMvPartitionInfo().getPartitionType() == MTMVPartitionType.FOLLOW_BASE_TABLE) { MTMVRelatedTableIf relatedTable = mtmv.getMvPartitionInfo().getRelatedTable(); excludedTriggerTables.add(relatedTable.getName()); PartitionItem item = mtmv.getPartitionInfo().getItemOrAnalysisException(partitionId); Map<Long, PartitionItem> relatedPartitionItems = relatedTable.getPartitionItems(); long relatedPartitionId = getExistPartitionId(item, relatedPartitionItems); if (relatedPartitionId == -1L) { LOG.warn("can not found related partition: " + partitionId); return false; } isSyncWithPartition = isSyncWithPartition(mtmv, partitionId, relatedTable, relatedPartitionId); } return isSyncWithPartition && isSyncWithAllBaseTables(mtmv, partitionId, tables, excludedTriggerTables); } /** * Align the partitions of mtmv and related tables, delete more and add less * * @param mtmv * @param relatedTable * @throws DdlException * @throws AnalysisException */ public static void alignMvPartition(MTMV mtmv, MTMVRelatedTableIf relatedTable) throws DdlException, AnalysisException { Map<Long, PartitionItem> relatedTableItems = Maps.newHashMap(relatedTable.getPartitionItems()); Map<Long, PartitionItem> mtmvItems = Maps.newHashMap(mtmv.getPartitionItems()); for (Entry<Long, PartitionItem> entry : mtmvItems.entrySet()) { long partitionId = getExistPartitionId(entry.getValue(), relatedTableItems); if (partitionId == -1L) { dropPartition(mtmv, entry.getKey()); } } for (Entry<Long, PartitionItem> entry : relatedTableItems.entrySet()) { long partitionId = getExistPartitionId(entry.getValue(), mtmvItems); if (partitionId == -1L) { addPartition(mtmv, entry.getValue()); } } } /** * getPartitionDescsByRelatedTable when create MTMV * * @param relatedTable * @param tableProperties * @return * @throws AnalysisException */ public static List<AllPartitionDesc> getPartitionDescsByRelatedTable(MTMVRelatedTableIf relatedTable, Map<String, String> tableProperties) throws AnalysisException { HashMap<String, String> partitionProperties = Maps.newHashMap(); List<AllPartitionDesc> res = Lists.newArrayList(); Map<Long, PartitionItem> relatedTableItems = relatedTable.getPartitionItems(); for (Entry<Long, PartitionItem> entry : relatedTableItems.entrySet()) { PartitionKeyDesc oldPartitionKeyDesc = entry.getValue().toPartitionKeyDesc(); SinglePartitionDesc singlePartitionDesc = new SinglePartitionDesc(true, generatePartitionName(oldPartitionKeyDesc), oldPartitionKeyDesc, partitionProperties); singlePartitionDesc.analyze(1, tableProperties); res.add(singlePartitionDesc); } return res; } public static List<String> getPartitionNamesByIds(MTMV mtmv, Collection<Long> ids) throws AnalysisException { List<String> res = Lists.newArrayList(); for (Long partitionId : ids) { res.add(mtmv.getPartitionOrAnalysisException(partitionId).getName()); } return res; } public static List<Long> getPartitionsIdsByNames(MTMV mtmv, List<String> partitions) throws AnalysisException { List<Long> res = Lists.newArrayList(); for (String partitionName : partitions) { Partition partition = mtmv.getPartitionOrAnalysisException(partitionName); res.add(partition.getId()); } return res; } /** * check if table is sync with all baseTables * * @param mtmv * @return */ public static boolean isMTMVSync(MTMV mtmv) { MTMVRelation mtmvRelation = mtmv.getRelation(); if (mtmvRelation == null) { return false; } try { return isMTMVSync(mtmv, mtmvRelation.getBaseTables(), Sets.newHashSet()); } catch (AnalysisException e) { LOG.warn("isMTMVSync failed: ", e); return false; } } /** * Determine whether the mtmv is sync with tables * * @param mtmv * @param tables * @param excludeTables * @return * @throws AnalysisException */ public static boolean isMTMVSync(MTMV mtmv, Set<BaseTableInfo> tables, Set<String> excludeTables) throws AnalysisException { Collection<Partition> partitions = mtmv.getPartitions(); for (Partition partition : partitions) { if (!isMTMVPartitionSync(mtmv, partition.getId(), tables, excludeTables)) { return false; } } return true; } /** * get not sync tables * * @param mtmv * @param partitionId * @return * @throws AnalysisException */ public static List<String> getPartitionUnSyncTables(MTMV mtmv, Long partitionId) throws AnalysisException { List<String> res = Lists.newArrayList(); for (BaseTableInfo baseTableInfo : mtmv.getRelation().getBaseTables()) { TableIf table = MTMVUtil.getTable(baseTableInfo); if (!(table instanceof MTMVRelatedTableIf)) { continue; } MTMVRelatedTableIf mtmvRelatedTableIf = (MTMVRelatedTableIf) table; if (!mtmvRelatedTableIf.needAutoRefresh()) { continue; } if (mtmv.getMvPartitionInfo().getPartitionType() == MTMVPartitionType.FOLLOW_BASE_TABLE && mtmv .getMvPartitionInfo().getRelatedTableInfo().equals(baseTableInfo)) { PartitionItem item = mtmv.getPartitionInfo().getItemOrAnalysisException(partitionId); Map<Long, PartitionItem> relatedPartitionItems = mtmvRelatedTableIf.getPartitionItems(); long relatedPartitionId = getExistPartitionId(item, relatedPartitionItems); if (relatedPartitionId == -1L) { throw new AnalysisException("can not found related partition"); } boolean isSyncWithPartition = isSyncWithPartition(mtmv, partitionId, mtmvRelatedTableIf, relatedPartitionId); if (!isSyncWithPartition) { res.add(mtmvRelatedTableIf.getName()); } } else { if (!isSyncWithBaseTable(mtmv, partitionId, baseTableInfo)) { res.add(table.getName()); } } } return res; } /** * Get the partitions that need to be refreshed * * @param mtmv * @param baseTables * @return */ public static List<Long> getMTMVNeedRefreshPartitions(MTMV mtmv, Set<BaseTableInfo> baseTables) { Collection<Partition> allPartitions = mtmv.getPartitions(); List<Long> res = Lists.newArrayList(); for (Partition partition : allPartitions) { try { if (!isMTMVPartitionSync(mtmv, partition.getId(), baseTables, mtmv.getExcludedTriggerTables())) { res.add(partition.getId()); } } catch (AnalysisException e) { res.add(partition.getId()); LOG.warn("check isMTMVPartitionSync failed", e); } } return res; } /** * compare last update time of mtmvPartition and tablePartition * * @param mtmv * @param mtmvPartitionId * @param relatedTable * @param relatedPartitionId * @return * @throws AnalysisException */ public static boolean isSyncWithPartition(MTMV mtmv, Long mtmvPartitionId, MTMVRelatedTableIf relatedTable, Long relatedPartitionId) throws AnalysisException { if (!relatedTable.needAutoRefresh()) { return true; } MTMVSnapshotIf relatedPartitionCurrentSnapshot = relatedTable .getPartitionSnapshot(relatedPartitionId); String relatedPartitionName = relatedTable.getPartitionName(relatedPartitionId); String mtmvPartitionName = mtmv.getPartitionName(mtmvPartitionId); return mtmv.getRefreshSnapshot() .equalsWithRelatedPartition(mtmvPartitionName, relatedPartitionName, relatedPartitionCurrentSnapshot); } /** * like p_00000101_20170201 * * @param desc * @return */ /** * drop partition of mtmv * * @param mtmv * @param partitionId */ private static void dropPartition(MTMV mtmv, Long partitionId) throws AnalysisException, DdlException { if (!mtmv.writeLockIfExist()) { return; } try { Partition partition = mtmv.getPartitionOrAnalysisException(partitionId); DropPartitionClause dropPartitionClause = new DropPartitionClause(false, partition.getName(), false, false); Env.getCurrentEnv().dropPartition((Database) mtmv.getDatabase(), mtmv, dropPartitionClause); } finally { mtmv.writeUnlock(); } } /** * add partition for mtmv like relatedPartitionId of relatedTable * * @param mtmv * @param partitionItem * @throws DdlException */ private static void addPartition(MTMV mtmv, PartitionItem partitionItem) throws DdlException { PartitionKeyDesc oldPartitionKeyDesc = partitionItem.toPartitionKeyDesc(); Map<String, String> partitionProperties = Maps.newHashMap(); SinglePartitionDesc singlePartitionDesc = new SinglePartitionDesc(true, generatePartitionName(oldPartitionKeyDesc), oldPartitionKeyDesc, partitionProperties); AddPartitionClause addPartitionClause = new AddPartitionClause(singlePartitionDesc, mtmv.getDefaultDistributionInfo().toDistributionDesc(), partitionProperties, false); Env.getCurrentEnv().addPartition((Database) mtmv.getDatabase(), mtmv.getName(), addPartitionClause); } /** * compare PartitionItem and return equals partitionId * if not found, return -1L * * @param target * @param sources * @return */ private static long getExistPartitionId(PartitionItem target, Map<Long, PartitionItem> sources) { for (Entry<Long, PartitionItem> entry : sources.entrySet()) { if (target.equals(entry.getValue())) { return entry.getKey(); } } return -1L; } /** * Determine is sync, ignoring excludedTriggerTables and non OlapTanle * * @param mtmvPartitionId * @param tables * @param excludedTriggerTables * @return */ private static boolean isSyncWithAllBaseTables(MTMV mtmv, long mtmvPartitionId, Set<BaseTableInfo> tables, Set<String> excludedTriggerTables) throws AnalysisException { for (BaseTableInfo baseTableInfo : tables) { TableIf table = null; try { table = MTMVUtil.getTable(baseTableInfo); } catch (AnalysisException e) { LOG.warn("get table failed, {}", baseTableInfo, e); return false; } if (excludedTriggerTables.contains(table.getName())) { continue; } boolean syncWithBaseTable = isSyncWithBaseTable(mtmv, mtmvPartitionId, baseTableInfo); if (!syncWithBaseTable) { return false; } } return true; } private static boolean isSyncWithBaseTable(MTMV mtmv, long mtmvPartitionId, BaseTableInfo baseTableInfo) throws AnalysisException { TableIf table = null; try { table = MTMVUtil.getTable(baseTableInfo); } catch (AnalysisException e) { LOG.warn("get table failed, {}", baseTableInfo, e); return false; } if (!(table instanceof MTMVRelatedTableIf)) { return true; } MTMVRelatedTableIf baseTable = (MTMVRelatedTableIf) table; if (!baseTable.needAutoRefresh()) { return true; } MTMVSnapshotIf baseTableCurrentSnapshot = baseTable.getTableSnapshot(); String mtmvPartitionName = mtmv.getPartitionName(mtmvPartitionId); return mtmv.getRefreshSnapshot() .equalsWithBaseTable(mtmvPartitionName, baseTable.getId(), baseTableCurrentSnapshot); } public static Map<String, MTMVRefreshPartitionSnapshot> generatePartitionSnapshots(MTMV mtmv, Set<BaseTableInfo> baseTables, Set<Long> partitionIds) throws AnalysisException { Map<String, MTMVRefreshPartitionSnapshot> res = Maps.newHashMap(); for (Long partitionId : partitionIds) { res.put(mtmv.getPartition(partitionId).getName(), generatePartitionSnapshot(mtmv, baseTables, partitionId)); } return res; } private static MTMVRefreshPartitionSnapshot generatePartitionSnapshot(MTMV mtmv, Set<BaseTableInfo> baseTables, Long partitionId) throws AnalysisException { MTMVRefreshPartitionSnapshot refreshPartitionSnapshot = new MTMVRefreshPartitionSnapshot(); if (mtmv.getMvPartitionInfo().getPartitionType() == MTMVPartitionType.FOLLOW_BASE_TABLE) { MTMVRelatedTableIf relatedTable = mtmv.getMvPartitionInfo().getRelatedTable(); List<Long> relatedPartitionIds = getMTMVPartitionRelatedPartitions( mtmv.getPartitionItems().get(partitionId), relatedTable); for (Long relatedPartitionId : relatedPartitionIds) { MTMVSnapshotIf partitionSnapshot = relatedTable .getPartitionSnapshot(relatedPartitionId); refreshPartitionSnapshot.getPartitions() .put(relatedTable.getPartitionName(relatedPartitionId), partitionSnapshot); } } for (BaseTableInfo baseTableInfo : baseTables) { if (mtmv.getMvPartitionInfo().getPartitionType() == MTMVPartitionType.FOLLOW_BASE_TABLE && mtmv .getMvPartitionInfo().getRelatedTableInfo().equals(baseTableInfo)) { continue; } TableIf table = MTMVUtil.getTable(baseTableInfo); if (!(table instanceof MTMVRelatedTableIf)) { continue; } refreshPartitionSnapshot.getTables().put(table.getId(), ((MTMVRelatedTableIf) table).getTableSnapshot()); } return refreshPartitionSnapshot; } private static List<Long> getMTMVPartitionRelatedPartitions(PartitionItem mtmvPartitionItem, MTMVRelatedTableIf relatedTable) { List<Long> res = Lists.newArrayList(); Map<Long, PartitionItem> relatedPartitionItems = relatedTable.getPartitionItems(); for (Entry<Long, PartitionItem> entry : relatedPartitionItems.entrySet()) { if (mtmvPartitionItem.equals(entry.getValue())) { res.add(entry.getKey()); return res; } } return res; } }
Didn't see the purpose of re-wrapping an IOException
public boolean start() throws IOException { try { ConnectionHandler connectionHandler = new ConnectionHandler(source.spec.uri()); connectionHandler.start(); Channel channel = connectionHandler.getChannel(); String queueName = source.spec.queue(); if (source.spec.queueDeclare()) { channel.queueDeclare(queueName, false, false, false, null); } if (source.spec.exchange() != null) { if (source.spec.exchangeDeclare()) { channel.exchangeDeclare(source.spec.exchange(), source.spec.exchangeType()); } if (queueName == null) { queueName = channel.queueDeclare().getQueue(); } channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey()); } checkpointMark.channel = channel; consumer = new QueueingConsumer(channel); channel.txSelect(); channel.setDefaultConsumer(consumer); channel.basicConsume(queueName, false, consumer); } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException(e); } return advance(); }
channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey());
public boolean start() throws IOException { try { connectionHandler = new ConnectionHandler(source.spec.uri()); connectionHandler.start(); Channel channel = connectionHandler.getChannel(); queueName = source.spec.queue(); if (source.spec.queueDeclare()) { channel.queueDeclare(queueName, false, false, false, null); } if (source.spec.exchange() != null) { if (source.spec.exchangeDeclare()) { channel.exchangeDeclare(source.spec.exchange(), source.spec.exchangeType()); } if (queueName == null) { queueName = channel.queueDeclare().getQueue(); } channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey()); } checkpointMark.channel = channel; channel.txSelect(); } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException(e); } return advance(); }
class UnboundedRabbitMqReader extends UnboundedSource.UnboundedReader<RabbitMqMessage> { private final RabbitMQSource source; private RabbitMqMessage current; private byte[] currentRecordId; private ConnectionHandler connectionHandler; private QueueingConsumer consumer; private Instant currentTimestamp; private final RabbitMQCheckpointMark checkpointMark; UnboundedRabbitMqReader(RabbitMQSource source, RabbitMQCheckpointMark checkpointMark) throws IOException { this.source = source; this.current = null; this.checkpointMark = checkpointMark != null ? checkpointMark : new RabbitMQCheckpointMark(); try { connectionHandler = new ConnectionHandler(source.spec.uri()); } catch (Exception e) { throw new IOException(e); } } @Override public Instant getWatermark() { return checkpointMark.oldestTimestamp; } @Override public UnboundedSource.CheckpointMark getCheckpointMark() { return checkpointMark; } @Override public RabbitMQSource getCurrentSource() { return source; } @Override public byte[] getCurrentRecordId() { if (current == null) { throw new NoSuchElementException(); } if (currentRecordId != null) { return currentRecordId; } else { return "".getBytes(StandardCharsets.UTF_8); } } @Override public Instant getCurrentTimestamp() { if (currentTimestamp == null) { throw new NoSuchElementException(); } return currentTimestamp; } @Override public RabbitMqMessage getCurrent() { if (current == null) { throw new NoSuchElementException(); } return current; } @Override @Override public boolean advance() throws IOException { try { QueueingConsumer.Delivery delivery = consumer.nextDelivery(1000); if (delivery == null) { return false; } if (source.spec.useCorrelationId()) { String correlationId = delivery.getProperties().getCorrelationId(); if (correlationId == null) { throw new IOException( "RabbitMqIO.Read uses message correlation ID, but received " + "message has a null correlation ID"); } currentRecordId = correlationId.getBytes(StandardCharsets.UTF_8); } long deliveryTag = delivery.getEnvelope().getDeliveryTag(); checkpointMark.sessionIds.add(deliveryTag); current = new RabbitMqMessage(source.spec.routingKey(), delivery); currentTimestamp = new Instant(delivery.getProperties().getTimestamp()); if (currentTimestamp.isBefore(checkpointMark.oldestTimestamp)) { checkpointMark.oldestTimestamp = currentTimestamp; } } catch (Exception e) { throw new IOException(e); } return true; } @Override public void close() throws IOException { if (connectionHandler != null) { connectionHandler.stop(); } } }
class UnboundedRabbitMqReader extends UnboundedSource.UnboundedReader<RabbitMqMessage> { private final RabbitMQSource source; private RabbitMqMessage current; private byte[] currentRecordId; private ConnectionHandler connectionHandler; private String queueName; private Instant currentTimestamp; private final RabbitMQCheckpointMark checkpointMark; UnboundedRabbitMqReader(RabbitMQSource source, RabbitMQCheckpointMark checkpointMark) throws IOException { this.source = source; this.current = null; this.checkpointMark = checkpointMark != null ? checkpointMark : new RabbitMQCheckpointMark(); } @Override public Instant getWatermark() { return checkpointMark.oldestTimestamp; } @Override public UnboundedSource.CheckpointMark getCheckpointMark() { return checkpointMark; } @Override public RabbitMQSource getCurrentSource() { return source; } @Override public byte[] getCurrentRecordId() { if (current == null) { throw new NoSuchElementException(); } if (currentRecordId != null) { return currentRecordId; } else { return "".getBytes(StandardCharsets.UTF_8); } } @Override public Instant getCurrentTimestamp() { if (currentTimestamp == null) { throw new NoSuchElementException(); } return currentTimestamp; } @Override public RabbitMqMessage getCurrent() { if (current == null) { throw new NoSuchElementException(); } return current; } @Override @Override public boolean advance() throws IOException { try { Channel channel = connectionHandler.getChannel(); GetResponse delivery = channel.basicGet(queueName, false); if (delivery == null) { return false; } if (source.spec.useCorrelationId()) { String correlationId = delivery.getProps().getCorrelationId(); if (correlationId == null) { throw new IOException( "RabbitMqIO.Read uses message correlation ID, but received " + "message has a null correlation ID"); } currentRecordId = correlationId.getBytes(StandardCharsets.UTF_8); } long deliveryTag = delivery.getEnvelope().getDeliveryTag(); checkpointMark.sessionIds.add(deliveryTag); current = new RabbitMqMessage(source.spec.routingKey(), delivery); currentTimestamp = new Instant(delivery.getProps().getTimestamp()); if (currentTimestamp.isBefore(checkpointMark.oldestTimestamp)) { checkpointMark.oldestTimestamp = currentTimestamp; } } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException(e); } return true; } @Override public void close() throws IOException { if (connectionHandler != null) { connectionHandler.stop(); } } }
Better check !isAbsolute() and append else set the abosolute path
private void setLogProperties() { final Path projectRoot = Paths.get(System.getProperty(USER_DIR)); if (projectRoot != null) { String logDir = getTransactionLogDirectory(); String logPath = projectRoot.toAbsolutePath().toString() + File.separatorChar + logDir; Path transactionLogDirectory = Paths.get(logPath); if (!Files.exists(transactionLogDirectory)) { try { Files.createDirectory(transactionLogDirectory); } catch (IOException e) { stderr.println("error: failed to create '" + logDir + "' transaction log directory"); } } System.setProperty(ATOMIKOS_LOG_BASE_PROPERTY, logPath); System.setProperty(ATOMIKOS_LOG_NAME_PROPERTY, "transaction_recovery"); System.setProperty(ATOMIKOS_REGISTERED_PROPERTY, "not-registered"); } }
String logPath = projectRoot.toAbsolutePath().toString() + File.separatorChar + logDir;
private void setLogProperties() { final Path projectRoot = Paths.get(RuntimeUtils.USER_DIR); if (projectRoot != null) { String logDir = getTransactionLogDirectory(); Path logDirPath = Paths.get(logDir); Path transactionLogDirectory; if (!logDirPath.isAbsolute()) { logDir = projectRoot.toAbsolutePath().toString() + File.separatorChar + logDir; transactionLogDirectory = Paths.get(logDir); } else { transactionLogDirectory = logDirPath; } if (!Files.exists(transactionLogDirectory)) { try { Files.createDirectory(transactionLogDirectory); } catch (IOException e) { stderr.println("error: failed to create transaction log directory in " + logDir); } } System.setProperty(ATOMIKOS_LOG_BASE_PROPERTY, logDir); System.setProperty(ATOMIKOS_LOG_NAME_PROPERTY, "transaction_recovery"); System.setProperty(ATOMIKOS_REGISTERED_PROPERTY, "not-registered"); } }
class TransactionResourceManager { private static TransactionResourceManager transactionResourceManager = null; private static UserTransactionManager userTransactionManager = null; private static final StrandMetadata COMMIT_METADATA = new StrandMetadata(BALLERINA_BUILTIN_PKG_PREFIX, TRANSACTION_PACKAGE_NAME, TRANSACTION_PACKAGE_VERSION, "onCommit"); private static final StrandMetadata ROLLBACK_METADATA = new StrandMetadata(BALLERINA_BUILTIN_PKG_PREFIX, TRANSACTION_PACKAGE_NAME, TRANSACTION_PACKAGE_VERSION, "onRollback"); private static final String ATOMIKOS_LOG_BASE_PROPERTY = "com.atomikos.icatch.log_base_dir"; private static final String ATOMIKOS_LOG_NAME_PROPERTY = "com.atomikos.icatch.log_base_name"; private static final String ATOMIKOS_REGISTERED_PROPERTY = "com.atomikos.icatch.registered"; private static final String CONFIG_TRANSACTION_MANAGER_ENABLED = "b7a.transaction.manager.enabled"; private static final String CONFIG_TRANSACTION_LOG_BASE = "b7a.transaction.log.base"; public static final String USER_DIR = "user.dir"; private static final ConfigRegistry CONFIG_REGISTRY = ConfigRegistry.getInstance(); private static final Logger log = LoggerFactory.getLogger(TransactionResourceManager.class); private Map<String, List<BallerinaTransactionContext>> resourceRegistry; private Map<String, Transaction> trxRegistry; private Map<String, Xid> xidRegistry; private Map<String, List<BFunctionPointer>> committedFuncRegistry; private Map<String, List<BFunctionPointer>> abortedFuncRegistry; private ConcurrentSkipListSet<String> failedResourceParticipantSet = new ConcurrentSkipListSet<>(); private ConcurrentSkipListSet<String> failedLocalParticipantSet = new ConcurrentSkipListSet<>(); private ConcurrentHashMap<String, ConcurrentSkipListSet<String>> localParticipants = new ConcurrentHashMap<>(); private boolean transactionManagerEnabled; private static final PrintStream stderr = System.err; public Map<BArray, Object> transactionInfoMap; private TransactionResourceManager() { resourceRegistry = new HashMap<>(); committedFuncRegistry = new HashMap<>(); abortedFuncRegistry = new HashMap<>(); transactionInfoMap = new HashMap<>(); transactionManagerEnabled = getTransactionManagerEnabled(); if (transactionManagerEnabled) { trxRegistry = new HashMap<>(); setLogProperties(); userTransactionManager = new UserTransactionManager(); } else { xidRegistry = new HashMap<>(); } } public static TransactionResourceManager getInstance() { if (transactionResourceManager == null) { synchronized (TransactionResourceManager.class) { if (transactionResourceManager == null) { transactionResourceManager = new TransactionResourceManager(); } } } return transactionResourceManager; } /** * This method sets values for atomikos transaction log path and name properties using the available configs. * */ /** * This method checks whether the atomikos transaction manager should be enabled or not. * * @return boolean whether the atomikos transaction manager should be enabled or not */ public boolean getTransactionManagerEnabled() { boolean transactionManagerEnabled = CONFIG_REGISTRY.getAsBoolean(CONFIG_TRANSACTION_MANAGER_ENABLED); return transactionManagerEnabled; } /** * This method gets the user specified config for log directory name. * * @return string log directory name */ private String getTransactionLogDirectory() { String transactionLogDirectory = CONFIG_REGISTRY.getAsString(CONFIG_TRANSACTION_LOG_BASE); if (transactionLogDirectory != null) { return transactionLogDirectory; } return "transaction_log_dir"; } /** * This method will register connection resources with a particular transaction. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @param txContext ballerina transaction context which includes the underlying connection info */ public void register(String transactionId, String transactionBlockId, BallerinaTransactionContext txContext) { String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); resourceRegistry.computeIfAbsent(combinedId, resourceList -> new ArrayList<>()).add(txContext); } /** * This method will register a committed function handler of a particular transaction. * * @param transactionBlockId the block id of the transaction * @param fpValue the function pointer for the committed function */ public void registerCommittedFunction(String transactionBlockId, BFunctionPointer fpValue) { if (fpValue != null) { committedFuncRegistry.computeIfAbsent(transactionBlockId, list -> new ArrayList<>()).add(fpValue); } } /** * This method will register an aborted function handler of a particular transaction. * * @param transactionBlockId the block id of the transaction * @param fpValue the function pointer for the aborted function */ public void registerAbortedFunction(String transactionBlockId, BFunctionPointer fpValue) { if (fpValue != null) { abortedFuncRegistry.computeIfAbsent(transactionBlockId, list -> new ArrayList<>()).add(fpValue); } } /** * Register a participation in a global transaction. * * @param gTransactionId global transaction id * @param transactionBlockId participant identifier * @since 0.990.0 */ public void registerParticipation(String gTransactionId, String transactionBlockId) { localParticipants.computeIfAbsent(gTransactionId, gid -> new ConcurrentSkipListSet<>()).add(transactionBlockId); TransactionLocalContext transactionLocalContext = Scheduler.getStrand().currentTrxContext; transactionLocalContext.beginTransactionBlock(transactionBlockId); } /** * This method acts as the callback which notify all the resources participated in the given transaction. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @return the status of the prepare operation */ public boolean prepare(String transactionId, String transactionBlockId) { if (transactionManagerEnabled) { return true; } String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (txContextList != null) { Xid xid = xidRegistry.get(combinedId); for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (xaResource != null) { xaResource.prepare(xid); } } catch (XAException e) { log.error("error at transaction prepare phase in transaction " + transactionId + ":" + e.getMessage(), e); return false; } } } boolean status = true; if (failedResourceParticipantSet.contains(transactionId) || failedLocalParticipantSet.contains(transactionId)) { status = false; } log.info(String.format("Transaction prepare (participants): %s", status ? "success" : "failed")); return status; } /** * This method acts as the callback which commits all the resources participated in the given transaction. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @return the status of the commit operation */ public boolean notifyCommit(String transactionId, String transactionBlockId) { Strand strand = Scheduler.getStrand(); endXATransaction(transactionId, transactionBlockId); String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); boolean commitSuccess = true; List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (txContextList != null) { if (transactionManagerEnabled) { Transaction trx = trxRegistry.get(combinedId); try { if (trx != null) { trx.commit(); } } catch (SystemException | HeuristicMixedException | HeuristicRollbackException | RollbackException e) { log.error("error when committing transaction " + transactionId + ":" + e.getMessage(), e); commitSuccess = false; } } for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (transactionManagerEnabled && xaResource == null) { ctx.commit(); } else { if (xaResource != null) { Xid xid = xidRegistry.get(combinedId); xaResource.commit(xid, false); } else { ctx.commit(); } } } catch (XAException e) { log.error("error when committing transaction " + transactionId + ":" + e.getMessage(), e); commitSuccess = false; } finally { ctx.close(); } } } invokeCommittedFunction(strand, transactionId, transactionBlockId); removeContextsFromRegistry(combinedId, transactionId); failedResourceParticipantSet.remove(transactionId); failedLocalParticipantSet.remove(transactionId); localParticipants.remove(transactionId); return commitSuccess; } /** * This method acts as the callback which aborts all the resources participated in the given transaction. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @param error the cause of abortion * @return the status of the abort operation */ public boolean notifyAbort(String transactionId, String transactionBlockId, Object error) { Strand strand = Scheduler.getStrand(); String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); boolean abortSuccess = true; List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (txContextList != null) { if (transactionManagerEnabled) { Transaction trx = trxRegistry.get(combinedId); try { if (trx != null) { trx.rollback(); } } catch (SystemException e) { log.error("error when aborting transaction " + transactionId + ":" + e.getMessage(), e); abortSuccess = false; } } for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (transactionManagerEnabled && xaResource == null) { ctx.rollback(); } else { Xid xid = xidRegistry.get(combinedId); if (xaResource != null) { ctx.getXAResource().rollback(xid); } else { ctx.rollback(); } } } catch (XAException e) { log.error("error when aborting the transaction " + transactionId + ":" + e.getMessage(), e); abortSuccess = false; } finally { ctx.close(); } } } invokeAbortedFunction(strand, transactionId, transactionBlockId, error); removeContextsFromRegistry(combinedId, transactionId); failedResourceParticipantSet.remove(transactionId); failedLocalParticipantSet.remove(transactionId); localParticipants.remove(transactionId); return abortSuccess; } /** * This method starts a transaction for the given xa resource. If there is no transaction is started for the * given XID a new transaction is created. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @param xaResource the XA resource which participates in the transaction */ public void beginXATransaction(String transactionId, String transactionBlockId, XAResource xaResource) { String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); if (transactionManagerEnabled) { Transaction trx = trxRegistry.get(combinedId); try { if (trx == null) { userTransactionManager.begin(); trx = userTransactionManager.getTransaction(); trxRegistry.put(combinedId, trx); } } catch (SystemException | NotSupportedException e) { log.error("error in initiating transaction " + transactionId + ":" + e.getMessage(), e); } } else { Xid xid = xidRegistry.get(combinedId); if (xid == null) { xid = XIDGenerator.createXID(); xidRegistry.put(combinedId, xid); } try { xaResource.start(xid, TMNOFLAGS); } catch (XAException e) { log.error("error in starting XA transaction " + transactionId + ":" + e.getMessage(), e); } } } /** * Cleanup the Info record keeping state related to current transaction context and remove the current * context from the stack. */ public void cleanupTransactionContext() { Strand strand = Scheduler.getStrand(); TransactionLocalContext transactionLocalContext = strand.currentTrxContext; transactionLocalContext.removeTransactionInfo(); strand.removeCurrentTrxContext(); } /** * This method returns true if there is a failure of the current transaction, otherwise false. * @return true if there is a failure of the current transaction. */ public boolean getAndClearFailure() { return Scheduler.getStrand().currentTrxContext.getAndClearFailure() != null; } /** * This method is used to get the error which is set by calling setRollbackOnly(). * If it is not set, then returns null. * @return the error or null. */ public Object getRollBackOnlyError() { TransactionLocalContext transactionLocalContext = Scheduler.getStrand().currentTrxContext; return transactionLocalContext.getRollbackOnly(); } /** * This method checks if the current strand is in a transaction or not. * @return True if the current strand is in a transaction. */ public boolean isInTransaction() { return Scheduler.getStrand().isInTransaction(); } /** * This method rollbacks the given transaction. * @param transactionBlockId The transaction blockId * @param error The error which caused rolling back. */ public void rollbackTransaction(String transactionBlockId, Object error) { Scheduler.getStrand().currentTrxContext.rollbackTransaction(transactionBlockId, error); } /** * This method marks the current transaction context as non-transactional. */ public void setContextNonTransactional() { Scheduler.getStrand().currentTrxContext.setTransactional(false); } /** * This method set the given transaction context as the current transaction context in the stack. * @param trxCtx The input transaction context */ public void setCurrentTransactionContext(TransactionLocalContext trxCtx) { Scheduler.getStrand().setCurrentTransactionContext(trxCtx); } /** * This method returns the current transaction context. * @return The current Transaction Context */ public TransactionLocalContext getCurrentTransactionContext() { return Scheduler.getStrand().currentTrxContext; } /** * This method marks the end of a transaction for the given transaction id. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction */ void endXATransaction(String transactionId, String transactionBlockId) { String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); if (transactionManagerEnabled) { Transaction trx = trxRegistry.get(combinedId); if (trx != null) { List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (txContextList != null) { for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (xaResource != null) { trx.delistResource(xaResource, TMSUCCESS); } } catch (IllegalStateException | SystemException e) { log.error("error in ending the XA transaction " + transactionId + ":" + e.getMessage(), e); } } } } } else { Xid xid = xidRegistry.get(combinedId); List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (xid != null && txContextList != null) { for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (xaResource != null) { ctx.getXAResource().end(xid, TMSUCCESS); } } catch (XAException e) { log.error("error in ending XA transaction " + transactionId + ":" + e.getMessage(), e); } } } } } void rollbackTransaction(String transactionId, String transactionBlockId, Object error) { endXATransaction(transactionId, transactionBlockId); notifyAbort(transactionId, transactionBlockId, error); } private void removeContextsFromRegistry(String transactionCombinedId, String gTransactionId) { resourceRegistry.remove(transactionCombinedId); if (transactionManagerEnabled) { trxRegistry.remove(transactionCombinedId); } else { xidRegistry.remove(transactionCombinedId); } } private String generateCombinedTransactionId(String transactionId, String transactionBlockId) { return transactionId + ":" + transactionBlockId; } private void invokeCommittedFunction(Strand strand, String transactionId, String transactionBlockId) { List<BFunctionPointer> fpValueList = committedFuncRegistry.get(transactionId); if (fpValueList != null) { Object[] args = {strand, strand.currentTrxContext.getInfoRecord(), true}; for (int i = fpValueList.size(); i > 0; i--) { BFunctionPointer fp = fpValueList.get(i - 1); fp.getFunction().apply(args); } } } private void invokeAbortedFunction(Strand strand, String transactionId, String transactionBlockId, Object error) { List<BFunctionPointer> fpValueList = abortedFuncRegistry.get(transactionId); if (fpValueList != null) { Object[] args = {strand, strand.currentTrxContext.getInfoRecord(), true, error, true, false, true}; for (int i = fpValueList.size(); i > 0; i--) { BFunctionPointer fp = fpValueList.get(i - 1); fp.getFunction().apply(args); } } } public void notifyResourceFailure(String gTransactionId) { failedResourceParticipantSet.add(gTransactionId); log.info("Trx infected callable unit excepted id : " + gTransactionId); } public void notifyLocalParticipantFailure(String gTransactionId, String blockId) { ConcurrentSkipListSet<String> participantBlockIds = localParticipants.get(gTransactionId); if (participantBlockIds != null && participantBlockIds.contains(blockId)) { failedLocalParticipantSet.add(gTransactionId); } } }
class TransactionResourceManager { private static TransactionResourceManager transactionResourceManager = null; private static UserTransactionManager userTransactionManager = null; private static final StrandMetadata COMMIT_METADATA = new StrandMetadata(BALLERINA_BUILTIN_PKG_PREFIX, TRANSACTION_PACKAGE_NAME, TRANSACTION_PACKAGE_VERSION, "onCommit"); private static final StrandMetadata ROLLBACK_METADATA = new StrandMetadata(BALLERINA_BUILTIN_PKG_PREFIX, TRANSACTION_PACKAGE_NAME, TRANSACTION_PACKAGE_VERSION, "onRollback"); private static final String ATOMIKOS_LOG_BASE_PROPERTY = "com.atomikos.icatch.log_base_dir"; private static final String ATOMIKOS_LOG_NAME_PROPERTY = "com.atomikos.icatch.log_base_name"; private static final String ATOMIKOS_REGISTERED_PROPERTY = "com.atomikos.icatch.registered"; private static final String CONFIG_TRANSACTION_MANAGER_ENABLED = "b7a.transaction.manager.enabled"; private static final String CONFIG_TRANSACTION_LOG_BASE = "b7a.transaction.log.base"; private static final ConfigRegistry CONFIG_REGISTRY = ConfigRegistry.getInstance(); private static final Logger log = LoggerFactory.getLogger(TransactionResourceManager.class); private Map<String, List<BallerinaTransactionContext>> resourceRegistry; private Map<String, Transaction> trxRegistry; private Map<String, Xid> xidRegistry; private Map<String, List<BFunctionPointer>> committedFuncRegistry; private Map<String, List<BFunctionPointer>> abortedFuncRegistry; private ConcurrentSkipListSet<String> failedResourceParticipantSet = new ConcurrentSkipListSet<>(); private ConcurrentSkipListSet<String> failedLocalParticipantSet = new ConcurrentSkipListSet<>(); private ConcurrentHashMap<String, ConcurrentSkipListSet<String>> localParticipants = new ConcurrentHashMap<>(); private boolean transactionManagerEnabled; private static final PrintStream stderr = System.err; public Map<BArray, Object> transactionInfoMap; private TransactionResourceManager() { resourceRegistry = new HashMap<>(); committedFuncRegistry = new HashMap<>(); abortedFuncRegistry = new HashMap<>(); transactionInfoMap = new HashMap<>(); transactionManagerEnabled = getTransactionManagerEnabled(); if (transactionManagerEnabled) { trxRegistry = new HashMap<>(); setLogProperties(); userTransactionManager = new UserTransactionManager(); } else { xidRegistry = new HashMap<>(); } } public static TransactionResourceManager getInstance() { if (transactionResourceManager == null) { synchronized (TransactionResourceManager.class) { if (transactionResourceManager == null) { transactionResourceManager = new TransactionResourceManager(); } } } return transactionResourceManager; } /** * This method sets values for atomikos transaction log path and name properties using the available configs. * */ /** * This method checks whether the atomikos transaction manager should be enabled or not. * * @return boolean whether the atomikos transaction manager should be enabled or not */ public boolean getTransactionManagerEnabled() { boolean transactionManagerEnabled = CONFIG_REGISTRY.getAsBoolean(CONFIG_TRANSACTION_MANAGER_ENABLED); return transactionManagerEnabled; } /** * This method gets the user specified config for log directory name. * * @return string log directory name */ private String getTransactionLogDirectory() { String transactionLogDirectory = CONFIG_REGISTRY.getAsString(CONFIG_TRANSACTION_LOG_BASE); if (transactionLogDirectory != null) { return transactionLogDirectory; } return "transaction_log_dir"; } /** * This method will register connection resources with a particular transaction. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @param txContext ballerina transaction context which includes the underlying connection info */ public void register(String transactionId, String transactionBlockId, BallerinaTransactionContext txContext) { String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); resourceRegistry.computeIfAbsent(combinedId, resourceList -> new ArrayList<>()).add(txContext); } /** * This method will register a committed function handler of a particular transaction. * * @param transactionBlockId the block id of the transaction * @param fpValue the function pointer for the committed function */ public void registerCommittedFunction(String transactionBlockId, BFunctionPointer fpValue) { if (fpValue != null) { committedFuncRegistry.computeIfAbsent(transactionBlockId, list -> new ArrayList<>()).add(fpValue); } } /** * This method will register an aborted function handler of a particular transaction. * * @param transactionBlockId the block id of the transaction * @param fpValue the function pointer for the aborted function */ public void registerAbortedFunction(String transactionBlockId, BFunctionPointer fpValue) { if (fpValue != null) { abortedFuncRegistry.computeIfAbsent(transactionBlockId, list -> new ArrayList<>()).add(fpValue); } } /** * Register a participation in a global transaction. * * @param gTransactionId global transaction id * @param transactionBlockId participant identifier * @since 0.990.0 */ public void registerParticipation(String gTransactionId, String transactionBlockId) { localParticipants.computeIfAbsent(gTransactionId, gid -> new ConcurrentSkipListSet<>()).add(transactionBlockId); TransactionLocalContext transactionLocalContext = Scheduler.getStrand().currentTrxContext; transactionLocalContext.beginTransactionBlock(transactionBlockId); } /** * This method acts as the callback which notify all the resources participated in the given transaction. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @return the status of the prepare operation */ public boolean prepare(String transactionId, String transactionBlockId) { endXATransaction(transactionId, transactionBlockId); if (transactionManagerEnabled) { return true; } String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (txContextList != null) { Xid xid = xidRegistry.get(combinedId); for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (xaResource != null) { xaResource.prepare(xid); } } catch (XAException e) { log.error("error at transaction prepare phase in transaction " + transactionId + ":" + e.getMessage(), e); return false; } } } boolean status = true; if (failedResourceParticipantSet.contains(transactionId) || failedLocalParticipantSet.contains(transactionId)) { status = false; } log.info(String.format("Transaction prepare (participants): %s", status ? "success" : "failed")); return status; } /** * This method acts as the callback which commits all the resources participated in the given transaction. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @return the status of the commit operation */ public boolean notifyCommit(String transactionId, String transactionBlockId) { Strand strand = Scheduler.getStrand(); String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); boolean commitSuccess = true; List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (txContextList != null) { if (transactionManagerEnabled) { Transaction trx = trxRegistry.get(combinedId); try { if (trx != null) { trx.commit(); } } catch (SystemException | HeuristicMixedException | HeuristicRollbackException | RollbackException e) { log.error("error when committing transaction " + transactionId + ":" + e.getMessage(), e); commitSuccess = false; } } for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (transactionManagerEnabled && xaResource == null) { ctx.commit(); } else { if (xaResource != null) { Xid xid = xidRegistry.get(combinedId); xaResource.commit(xid, false); } else { ctx.commit(); } } } catch (XAException e) { log.error("error when committing transaction " + transactionId + ":" + e.getMessage(), e); commitSuccess = false; } finally { ctx.close(); } } } invokeCommittedFunction(strand, transactionId, transactionBlockId); removeContextsFromRegistry(combinedId, transactionId); failedResourceParticipantSet.remove(transactionId); failedLocalParticipantSet.remove(transactionId); localParticipants.remove(transactionId); return commitSuccess; } /** * This method acts as the callback which aborts all the resources participated in the given transaction. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @param error the cause of abortion * @return the status of the abort operation */ public boolean notifyAbort(String transactionId, String transactionBlockId, Object error) { Strand strand = Scheduler.getStrand(); String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); boolean abortSuccess = true; List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (txContextList != null) { if (transactionManagerEnabled) { Transaction trx = trxRegistry.get(combinedId); try { if (trx != null) { trx.rollback(); } } catch (SystemException e) { log.error("error when aborting transaction " + transactionId + ":" + e.getMessage(), e); abortSuccess = false; } } for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (transactionManagerEnabled && xaResource == null) { ctx.rollback(); } else { Xid xid = xidRegistry.get(combinedId); if (xaResource != null) { ctx.getXAResource().rollback(xid); } else { ctx.rollback(); } } } catch (XAException e) { log.error("error when aborting the transaction " + transactionId + ":" + e.getMessage(), e); abortSuccess = false; } finally { ctx.close(); } } } invokeAbortedFunction(strand, transactionId, transactionBlockId, error); removeContextsFromRegistry(combinedId, transactionId); failedResourceParticipantSet.remove(transactionId); failedLocalParticipantSet.remove(transactionId); localParticipants.remove(transactionId); return abortSuccess; } /** * This method starts a transaction for the given xa resource. If there is no transaction is started for the * given XID a new transaction is created. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction * @param xaResource the XA resource which participates in the transaction */ public void beginXATransaction(String transactionId, String transactionBlockId, XAResource xaResource) { String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); if (transactionManagerEnabled) { Transaction trx = trxRegistry.get(combinedId); try { if (trx == null) { userTransactionManager.begin(); trx = userTransactionManager.getTransaction(); trxRegistry.put(combinedId, trx); } } catch (SystemException | NotSupportedException e) { log.error("error in initiating transaction " + transactionId + ":" + e.getMessage(), e); } } else { Xid xid = xidRegistry.get(combinedId); if (xid == null) { xid = XIDGenerator.createXID(); xidRegistry.put(combinedId, xid); } try { xaResource.start(xid, TMNOFLAGS); } catch (XAException e) { log.error("error in starting XA transaction " + transactionId + ":" + e.getMessage(), e); } } } /** * Cleanup the Info record keeping state related to current transaction context and remove the current * context from the stack. */ public void cleanupTransactionContext() { Strand strand = Scheduler.getStrand(); TransactionLocalContext transactionLocalContext = strand.currentTrxContext; transactionLocalContext.removeTransactionInfo(); strand.removeCurrentTrxContext(); } /** * This method returns true if there is a failure of the current transaction, otherwise false. * @return true if there is a failure of the current transaction. */ public boolean getAndClearFailure() { return Scheduler.getStrand().currentTrxContext.getAndClearFailure() != null; } /** * This method is used to get the error which is set by calling setRollbackOnly(). * If it is not set, then returns null. * @return the error or null. */ public Object getRollBackOnlyError() { TransactionLocalContext transactionLocalContext = Scheduler.getStrand().currentTrxContext; return transactionLocalContext.getRollbackOnly(); } /** * This method checks if the current strand is in a transaction or not. * @return True if the current strand is in a transaction. */ public boolean isInTransaction() { return Scheduler.getStrand().isInTransaction(); } /** * This method rollbacks the given transaction. * @param transactionBlockId The transaction blockId * @param error The error which caused rolling back. */ public void rollbackTransaction(String transactionBlockId, Object error) { Scheduler.getStrand().currentTrxContext.rollbackTransaction(transactionBlockId, error); } /** * This method marks the current transaction context as non-transactional. */ public void setContextNonTransactional() { Scheduler.getStrand().currentTrxContext.setTransactional(false); } /** * This method set the given transaction context as the current transaction context in the stack. * @param trxCtx The input transaction context */ public void setCurrentTransactionContext(TransactionLocalContext trxCtx) { Scheduler.getStrand().setCurrentTransactionContext(trxCtx); } /** * This method returns the current transaction context. * @return The current Transaction Context */ public TransactionLocalContext getCurrentTransactionContext() { return Scheduler.getStrand().currentTrxContext; } /** * This method marks the end of a transaction for the given transaction id. * * @param transactionId the global transaction id * @param transactionBlockId the block id of the transaction */ void endXATransaction(String transactionId, String transactionBlockId) { String combinedId = generateCombinedTransactionId(transactionId, transactionBlockId); if (transactionManagerEnabled) { Transaction trx = trxRegistry.get(combinedId); if (trx != null) { List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (txContextList != null) { for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (xaResource != null) { trx.delistResource(xaResource, TMSUCCESS); } } catch (IllegalStateException | SystemException e) { log.error("error in ending the XA transaction " + transactionId + ":" + e.getMessage(), e); } } } } } else { Xid xid = xidRegistry.get(combinedId); List<BallerinaTransactionContext> txContextList = resourceRegistry.get(combinedId); if (xid != null && txContextList != null) { for (BallerinaTransactionContext ctx : txContextList) { try { XAResource xaResource = ctx.getXAResource(); if (xaResource != null) { ctx.getXAResource().end(xid, TMSUCCESS); } } catch (XAException e) { log.error("error in ending XA transaction " + transactionId + ":" + e.getMessage(), e); } } } } } void rollbackTransaction(String transactionId, String transactionBlockId, Object error) { notifyAbort(transactionId, transactionBlockId, error); } private void removeContextsFromRegistry(String transactionCombinedId, String gTransactionId) { resourceRegistry.remove(transactionCombinedId); if (transactionManagerEnabled) { trxRegistry.remove(transactionCombinedId); } else { xidRegistry.remove(transactionCombinedId); } } private String generateCombinedTransactionId(String transactionId, String transactionBlockId) { return transactionId + ":" + transactionBlockId; } private void invokeCommittedFunction(Strand strand, String transactionId, String transactionBlockId) { List<BFunctionPointer> fpValueList = committedFuncRegistry.get(transactionId); if (fpValueList != null) { Object[] args = {strand, strand.currentTrxContext.getInfoRecord(), true}; for (int i = fpValueList.size(); i > 0; i--) { BFunctionPointer fp = fpValueList.get(i - 1); fp.getFunction().apply(args); } } } private void invokeAbortedFunction(Strand strand, String transactionId, String transactionBlockId, Object error) { List<BFunctionPointer> fpValueList = abortedFuncRegistry.get(transactionId); if (fpValueList != null) { Object[] args = {strand, strand.currentTrxContext.getInfoRecord(), true, error, true, false, true}; for (int i = fpValueList.size(); i > 0; i--) { BFunctionPointer fp = fpValueList.get(i - 1); fp.getFunction().apply(args); } } } public void notifyResourceFailure(String gTransactionId) { failedResourceParticipantSet.add(gTransactionId); log.info("Trx infected callable unit excepted id : " + gTransactionId); } public void notifyLocalParticipantFailure(String gTransactionId, String blockId) { ConcurrentSkipListSet<String> participantBlockIds = localParticipants.get(gTransactionId); if (participantBlockIds != null && participantBlockIds.contains(blockId)) { failedLocalParticipantSet.add(gTransactionId); } } }
Remember it's a nested nodes structure, so you could still use the `hibernate.` prefix, as long as we manage to disambiguate sub elements. Specifically, `hibernate.query.executions` is taken, but you could use `hibernate.fulltextquery.executions`, or `hibernate.search.fulltextquery.executions`. And yes it's annoying, but I also give up.. let's consider that this PR is about better integration with an existing library which did behave like this. If one of you (both) feel very strongly this is wrong, I propose you follow up with a patch which makes the prefix configurable; if we default to `hibernate-orm` there's a (very remote) possibility that you manage to eventually re-educate the rest of the universe, but before we get there consider you'll be annoying many users who will be forced to figure out what's wrong, find the configuration property, reconfigure all their apps.
public void testMetrics() { assertEquals(0L, getCounterValueOrNull("hibernate.query.executions", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(0L, getCounterValueOrNull("hibernate.entities.inserts", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(0L, getCounterValueOrNull("hibernate.cache.query.requests", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME), new Tag("result", "miss"))); Arc.container().requestContext().activate(); try { DummyEntity entity = new DummyEntity(); entity.number = 12345L; em.persist(entity); em.flush(); em.createQuery("from DummyEntity e").getResultList(); assertEquals(1L, getCounterValueOrNull("hibernate.query.executions", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(1L, getCounterValueOrNull("hibernate.entities.inserts", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); } finally { Arc.container().requestContext().terminate(); } }
assertEquals(0L, getCounterValueOrNull("hibernate.query.executions",
public void testMetrics() { assertEquals(0L, getCounterValueOrNull("hibernate.query.executions", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(0L, getCounterValueOrNull("hibernate.entities.inserts", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(0L, getCounterValueOrNull("hibernate.cache.query.requests", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME), new Tag("result", "miss"))); Arc.container().requestContext().activate(); try { DummyEntity entity = new DummyEntity(); entity.number = 12345L; em.persist(entity); em.flush(); em.createQuery("from DummyEntity e").getResultList(); assertEquals(1L, getCounterValueOrNull("hibernate.query.executions", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(1L, getCounterValueOrNull("hibernate.entities.inserts", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); } finally { Arc.container().requestContext().terminate(); } }
class DummyEntity { @Id private Long number; public Long getNumber() { return number; } public void setNumber(Long number) { this.number = number; } }
class DummyEntity { @Id private Long number; public Long getNumber() { return number; } public void setNumber(Long number) { this.number = number; } }
Shall we do this like `return A && B && C`
private boolean checkInvalidActionInvocation(BLangInvocation.BLangActionInvocation aInv) { if (aInv.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (((((BLangSimpleVarRef) aInv.expr).symbol.tag & SymTag.ENDPOINT) != SymTag.ENDPOINT) && !aInv.async)) { return true; } else { return false; } }
if (aInv.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
private boolean checkInvalidActionInvocation(BLangInvocation.BLangActionInvocation aInv) { return aInv.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (((((BLangSimpleVarRef) aInv.expr).symbol.tag & SymTag.ENDPOINT) != SymTag.ENDPOINT) && !aInv.async); }
class TypeChecker extends BLangNodeVisitor { private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>(); private static Set<String> listLengthModifierFunctions = new HashSet<>(); private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>(); private static final String LIST_LANG_LIB = "lang.array"; private static final String MAP_LANG_LIB = "lang.map"; private static final String TABLE_LANG_LIB = "lang.table"; private static final String VALUE_LANG_LIB = "lang.value"; private static final String XML_LANG_LIB = "lang.xml"; private static final String FUNCTION_NAME_PUSH = "push"; private static final String FUNCTION_NAME_POP = "pop"; private static final String FUNCTION_NAME_SHIFT = "shift"; private static final String FUNCTION_NAME_UNSHIFT = "unshift"; private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType"; private Names names; private SymbolTable symTable; private SymbolEnter symbolEnter; private SymbolResolver symResolver; private NodeCloner nodeCloner; private Types types; private BLangDiagnosticLog dlog; private SymbolEnv env; private boolean isTypeChecked; private TypeNarrower typeNarrower; private TypeParamAnalyzer typeParamAnalyzer; private BLangAnonymousModelHelper anonymousModelHelper; private SemanticAnalyzer semanticAnalyzer; private Unifier unifier; private boolean nonErrorLoggingCheck = false; private int letCount = 0; private Stack<SymbolEnv> queryEnvs, prevEnvs; private Stack<BLangNode> queryFinalClauses; private boolean checkWithinQueryExpr = false; private BLangMissingNodesHelper missingNodesHelper; private boolean breakToParallelQueryEnv = false; /** * Expected types or inherited types. */ private BType expType; private BType resultType; private DiagnosticCode diagCode; static { listLengthModifierFunctions.add(FUNCTION_NAME_PUSH); listLengthModifierFunctions.add(FUNCTION_NAME_POP); listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT); listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT); modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeAll"); add("setLength"); add("reverse"); add("sort"); add("pop"); add("push"); add("shift"); add("unshift"); }}); modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{ add("put"); add("add"); add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{ add("mergeJson"); }}); modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{ add("setName"); add("setChildren"); add("strip"); }}); } public static TypeChecker getInstance(CompilerContext context) { TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY); if (typeChecker == null) { typeChecker = new TypeChecker(context); } return typeChecker; } public TypeChecker(CompilerContext context) { context.put(TYPE_CHECKER_KEY, this); this.names = Names.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); this.typeNarrower = TypeNarrower.getInstance(context); this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context); this.queryFinalClauses = new Stack<>(); this.queryEnvs = new Stack<>(); this.prevEnvs = new Stack<>(); this.unifier = new Unifier(); } public BType checkExpr(BLangExpression expr, SymbolEnv env) { return checkExpr(expr, env, symTable.noType); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) { return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) { if (expr.typeChecked) { return expr.getBType(); } if (expType.tag == TypeTags.INTERSECTION) { expType = ((BIntersectionType) expType).effectiveType; } SymbolEnv prevEnv = this.env; BType preExpType = this.expType; DiagnosticCode preDiagCode = this.diagCode; this.env = env; this.diagCode = diagCode; this.expType = expType; this.isTypeChecked = true; expr.expectedType = expType; expr.accept(this); if (resultType.tag == TypeTags.INTERSECTION) { resultType = ((BIntersectionType) resultType).effectiveType; } expr.setTypeCheckedType(resultType); expr.typeChecked = isTypeChecked; this.env = prevEnv; this.expType = preExpType; this.diagCode = preDiagCode; validateAndSetExprExpectedType(expr); return resultType; } private void analyzeObjectConstructor(BLangNode node, SymbolEnv env) { if (!nonErrorLoggingCheck) { semanticAnalyzer.analyzeNode(node, env); } } private void validateAndSetExprExpectedType(BLangExpression expr) { if (resultType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null && expr.expectedType.tag == TypeTags.MAP && expr.getBType().tag == TypeTags.RECORD) { return; } expr.expectedType = resultType; } public void visit(BLangLiteral literalExpr) { BType literalType = setLiteralValueAndGetType(literalExpr, expType); if (literalType == symTable.semanticError || literalExpr.isFiniteContext) { return; } resultType = types.checkType(literalExpr, literalType, expType); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { checkXMLNamespacePrefixes(xmlElementAccess.filters); checkExpr(xmlElementAccess.expr, env, symTable.xmlType); resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { checkXMLNamespacePrefixes(xmlNavigation.filters); if (xmlNavigation.childIndex != null) { checkExpr(xmlNavigation.childIndex, env, symTable.intType); } BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType); if (exprType.tag == TypeTags.UNION) { dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS, xmlNavigation.expr.getBType()); } BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN ? symTable.xmlType : symTable.xmlElementSeqType; types.checkType(xmlNavigation, actualType, expType); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { resultType = symTable.xmlType; } else { resultType = symTable.xmlElementSeqType; } } private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) { for (BLangXMLElementFilter filter : filters) { if (!filter.namespace.isEmpty()) { Name nsName = names.fromString(filter.namespace); BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName); filter.namespaceSymbol = nsSymbol; if (nsSymbol == symTable.notFoundSymbol) { dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName); } } } } private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) { BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag); Object literalValue = literalExpr.value; if (literalType.tag == TypeTags.INT || literalType.tag == TypeTags.BYTE) { if (expType.tag == TypeTags.FLOAT) { literalType = symTable.floatType; literalExpr.value = ((Long) literalValue).doubleValue(); } else if (expType.tag == TypeTags.DECIMAL && !NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) { literalType = symTable.decimalType; literalExpr.value = String.valueOf(literalValue); } else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) { literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue); if (literalType == symTable.semanticError) { return symTable.semanticError; } } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); BType intSubType = null; boolean intOrIntCompatibleTypeFound = false; for (BType memType : memberTypes) { if ((memType.tag != TypeTags.INT && TypeTags.isIntegerTypeTag(memType.tag)) || memType.tag == TypeTags.BYTE) { intSubType = memType; } else if (memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) { intOrIntCompatibleTypeFound = true; } } if (intOrIntCompatibleTypeFound) { return setLiteralValueAndGetType(literalExpr, symTable.intType); } if (intSubType != null) { return setLiteralValueAndGetType(literalExpr, intSubType); } BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) { return setLiteralValueAndGetType(literalExpr, symTable.byteType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) { return setLiteralValueAndGetType(literalExpr, symTable.floatType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } } } else if (literalType.tag == TypeTags.FLOAT) { String literal = String.valueOf(literalValue); String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal); boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal); if (expType.tag == TypeTags.DECIMAL) { if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) { dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.floatType); resultType = symTable.semanticError; return resultType; } literalType = symTable.decimalType; literalExpr.value = numericLiteral; } else if (expType.tag == TypeTags.FLOAT) { literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral)); } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (!isDiscriminatedFloat && literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType); if (unionMember != symTable.noType) { return unionMember; } } } else if (literalType.tag == TypeTags.DECIMAL) { return decimalLiteral(literalValue, literalExpr, expType); } else if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) { if (expType.tag == TypeTags.CHAR_STRING) { return symTable.charStringType; } if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); for (BType memType : memberTypes) { if (TypeTags.isStringTypeTag(memType.tag)) { return setLiteralValueAndGetType(literalExpr, memType); } else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) { return setLiteralValueAndGetType(literalExpr, symTable.charStringType); } else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType, literalExpr)) { setLiteralValueForFiniteType(literalExpr, symTable.charStringType); return literalType; } } } boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else { if (this.expType.tag == TypeTags.FINITE) { boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else if (this.expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) this.expType; boolean foundMember = unionType.getMemberTypes() .stream() .anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr)); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } } if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) { literalType = new BArrayType(symTable.byteType); } return literalType; } private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) { Set<BType> memberTypes = expType.getMemberTypes(); if (memberTypes.stream() .anyMatch(memType -> memType.tag == desiredType.tag || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) { return setLiteralValueAndGetType(literalExpr, desiredType); } BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } return symTable.noType; } private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType, int targetMemberTypeTag) { for (BLangExpression valueExpr : finiteType.getValueSpace()) { if (valueExpr.getBType().tag == targetMemberTypeTag && types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) { return true; } } return false; } private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) { String literal = String.valueOf(literalValue); if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) { dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.decimalType); resultType = symTable.semanticError; return resultType; } if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType); if (unionMember != symTable.noType) { return unionMember; } } literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal); resultType = symTable.decimalType; return symTable.decimalType; } private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) { types.setImplicitCastExpr(literalExpr, type, this.expType); this.resultType = type; literalExpr.isFiniteContext = true; } private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) { List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(memFiniteType -> (BFiniteType) memFiniteType) .collect(Collectors.toList()); if (finiteTypeMembers.isEmpty()) { return symTable.semanticError; } int tag = matchType.tag; Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>(); for (BFiniteType finiteType : finiteTypeMembers) { Set<BLangExpression> set = new HashSet<>(); for (BLangExpression expression : finiteType.getValueSpace()) { if (expression.getBType().tag == tag) { set.add(expression); } } matchedValueSpace.addAll(set); } if (matchedValueSpace.isEmpty()) { return symTable.semanticError; } return new BFiniteType(null, matchedValueSpace); } private BType getIntLiteralType(Location location, BType expType, BType literalType, Object literalValue) { switch (expType.tag) { case TypeTags.INT: return symTable.intType; case TypeTags.BYTE: if (types.isByteLiteralValue((Long) literalValue)) { return symTable.byteType; } break; case TypeTags.SIGNED32_INT: if (types.isSigned32LiteralValue((Long) literalValue)) { return symTable.signed32IntType; } break; case TypeTags.SIGNED16_INT: if (types.isSigned16LiteralValue((Long) literalValue)) { return symTable.signed16IntType; } break; case TypeTags.SIGNED8_INT: if (types.isSigned8LiteralValue((Long) literalValue)) { return symTable.signed8IntType; } break; case TypeTags.UNSIGNED32_INT: if (types.isUnsigned32LiteralValue((Long) literalValue)) { return symTable.unsigned32IntType; } break; case TypeTags.UNSIGNED16_INT: if (types.isUnsigned16LiteralValue((Long) literalValue)) { return symTable.unsigned16IntType; } break; case TypeTags.UNSIGNED8_INT: if (types.isUnsigned8LiteralValue((Long) literalValue)) { return symTable.unsigned8IntType; } break; default: } dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, literalType); resultType = symTable.semanticError; return resultType; } @Override public void visit(BLangListConstructorExpr listConstructor) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) { BType inferredType = getInferredTupleType(listConstructor, expType); resultType = inferredType == symTable.semanticError ? symTable.semanticError : types.checkType(listConstructor, inferredType, expType); return; } resultType = checkListConstructorCompatibility(expType, listConstructor); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) { List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { resultType = symTable.semanticError; return; } } if (tableConstructorExpr.recordLiteralList.size() == 0) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE); resultType = symTable.semanticError; return; } BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr); BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { recordLiteral.setBType(inherentMemberType); } if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) { resultType = symTable.semanticError; return; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } resultType = tableType; return; } BType applicableExpType = expType.tag == TypeTags.INTERSECTION ? ((BIntersectionType) expType).effectiveType : expType; if (applicableExpType.tag == TypeTags.TABLE) { List<BType> memTypes = new ArrayList<>(); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { BLangRecordLiteral clonedExpr = recordLiteral; if (this.nonErrorLoggingCheck) { clonedExpr.cloneAttempt++; clonedExpr = nodeCloner.cloneNode(recordLiteral); } BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint); if (recordType == symTable.semanticError) { resultType = symTable.semanticError; return; } memTypes.add(recordType); } BTableType expectedTableType = (BTableType) applicableExpType; if (expectedTableType.constraint.tag == TypeTags.MAP && expectedTableType.isTypeInlineDefined) { validateMapConstraintTable(applicableExpType); return; } if (!(validateKeySpecifierInTableConstructor((BTableType) applicableExpType, tableConstructorExpr.recordLiteralList) && validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) { resultType = symTable.semanticError; return; } BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType), null); if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) { tableType.flags |= Flags.READONLY; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) { tableType.fieldNameList = expectedTableType.fieldNameList; } resultType = tableType; } else if (applicableExpType.tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); List<BType> matchingTypes = new ArrayList<>(); BUnionType expectedType = (BUnionType) applicableExpType; for (BType memType : expectedType.getMemberTypes()) { dlog.resetErrorCount(); BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr); } BType resultType = checkExpr(clonedTableExpr, env, memType); if (resultType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(matchingTypes, resultType)) { matchingTypes.add(resultType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (matchingTypes.isEmpty()) { BLangTableConstructorExpr exprToLog = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; exprToLog = nodeCloner.cloneNode(tableConstructorExpr); } dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getInferredTableType(exprToLog)); } else if (matchingTypes.size() != 1) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); } else { resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0)); return; } resultType = symTable.semanticError; } else { resultType = symTable.semanticError; } } private BType getInferredTableType(BLangTableConstructorExpr exprToLog) { List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null); } private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { if (tableConstructorExpr.tableKeySpecifier != null) { if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr. tableKeySpecifier), tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return true; } tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); } return false; } private BType inferTableMemberType(List<BType> memTypes, BType expType) { if (memTypes.isEmpty()) { return ((BTableType) expType).constraint; } LinkedHashSet<BType> result = new LinkedHashSet<>(); result.add(memTypes.get(0)); BUnionType unionType = BUnionType.create(null, result); for (int i = 1; i < memTypes.size(); i++) { BType source = memTypes.get(i); if (!types.isAssignable(source, unionType)) { result.add(source); unionType = BUnionType.create(null, result); } } if (unionType.getMemberTypes().size() == 1) { return memTypes.get(0); } return unionType; } private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) { BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier; List<String> keySpecifierFieldNames = new ArrayList<>(); Set<BField> allFieldSet = new LinkedHashSet<>(); for (BType memType : memTypes) { allFieldSet.addAll(((BRecordType) memType).fields.values()); } Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet); for (BType memType : memTypes) { commonFieldSet.retainAll(((BRecordType) memType).fields.values()); } List<String> requiredFieldNames = new ArrayList<>(); if (keySpecifier != null) { for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) { requiredFieldNames.add(((BLangIdentifier) identifierNode).value); keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value); } } List<String> fieldNames = new ArrayList<>(); for (BField field : allFieldSet) { String fieldName = field.name.value; if (fieldNames.contains(fieldName)) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY, fieldName); return symTable.semanticError; } fieldNames.add(fieldName); boolean isOptional = true; for (BField commonField : commonFieldSet) { if (commonField.name.value.equals(fieldName)) { isOptional = false; requiredFieldNames.add(commonField.name.value); } } if (isOptional) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL)); } else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY)); } else if (requiredFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)); } } return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos); } private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, Location pos) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL); for (BField field : allFieldSet) { recordSymbol.scope.define(field.name, field.symbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = allFieldSet.stream().collect(getFieldCollector()); recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); recordType.sealed = true; recordType.restFieldType = symTable.noType; return recordType; } private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() { BinaryOperator<BField> mergeFunc = (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new); } private boolean validateTableType(BTableType tableType) { BType constraint = tableType.constraint; if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) { dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint); resultType = symTable.semanticError; return false; } return true; } private boolean validateKeySpecifierInTableConstructor(BTableType tableType, List<BLangRecordLiteral> recordLiterals) { List<String> fieldNameList = tableType.fieldNameList; if (fieldNameList != null) { return validateTableKeyValue(fieldNameList, recordLiterals); } return true; } private boolean validateTableKeyValue(List<String> keySpecifierFieldNames, List<BLangRecordLiteral> recordLiterals) { for (String fieldName : keySpecifierFieldNames) { for (BLangRecordLiteral recordLiteral : recordLiterals) { BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName); if (recordKeyValueField != null && isConstExpression(recordKeyValueField.getValue())) { continue; } dlog.error(recordLiteral.pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName); resultType = symTable.semanticError; return false; } } return true; } private boolean isConstExpression(BLangExpression expression) { switch(expression.getKind()) { case LITERAL: case NUMERIC_LITERAL: case STRING_TEMPLATE_LITERAL: case XML_ELEMENT_LITERAL: case XML_TEXT_LITERAL: case LIST_CONSTRUCTOR_EXPR: case TABLE_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: case TYPE_CONVERSION_EXPR: case UNARY_EXPR: case BINARY_EXPR: case TYPE_TEST_EXPR: case TERNARY_EXPR: return true; case SIMPLE_VARIABLE_REF: return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; case GROUP_EXPR: return isConstExpression(((BLangGroupExpr) expression).expression); default: return false; } } private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral, String fieldName) { for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) { BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField; if (fieldName.equals(recordKeyValueField.key.toString())) { return recordKeyValueField; } } return null; } public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint, Location pos) { for (String fieldName : fieldNameList) { BField field = types.getTableConstraintField(constraint, fieldName); if (field == null) { dlog.error(pos, DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName); resultType = symTable.semanticError; return false; } if (!types.isAssignable(field.type, symTable.anydataType)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint); resultType = symTable.semanticError; return false; } } return true; } private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { BType constraintType = tableType.constraint; if (tableConstructorExpr.tableKeySpecifier != null) { List<String> fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); if (tableType.fieldNameList == null && !validateKeySpecifier(fieldNameList, constraintType.tag != TypeTags.INTERSECTION ? constraintType : ((BIntersectionType) constraintType).effectiveType, tableConstructorExpr.tableKeySpecifier.pos)) { return false; } if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH, tableType.fieldNameList.toString(), fieldNameList.toString()); resultType = symTable.semanticError; return false; } } BType keyTypeConstraint = tableType.keyTypeConstraint; if (keyTypeConstraint != null) { List<BType> memberTypes = new ArrayList<>(); if (keyTypeConstraint.tag == TypeTags.TUPLE) { for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) { memberTypes.add((BType) type); } } else { memberTypes.add(keyTypeConstraint); } if (tableConstructorExpr.tableKeySpecifier == null && keyTypeConstraint.tag == TypeTags.NEVER) { return true; } if (tableConstructorExpr.tableKeySpecifier == null || tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT, memberTypes.size(), tableConstructorExpr.tableKeySpecifier == null ? 0 : tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size()); resultType = symTable.semanticError; return false; } List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier. fieldNameIdentifierList; int index = 0; for (IdentifierNode identifier : fieldNameIdentifierList) { BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value); if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT, fieldNameIdentifierList.toString(), memberTypes.toString()); resultType = symTable.semanticError; return false; } index++; } } return true; } public void validateMapConstraintTable(BType expType) { if (expType != null && (((BTableType) expType).fieldNameList != null || ((BTableType) expType).keyTypeConstraint != null) && !expType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) { dlog.error(((BTableType) expType).keyPos, DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } resultType = expType; } private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) { List<String> fieldNamesList = new ArrayList<>(); for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) { fieldNamesList.add(((BLangIdentifier) identifier).value); } return fieldNamesList; } private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) { if (fieldNames == null) { return symTable.semanticError; } List<BType> memTypes = new ArrayList<>(); for (String fieldName : fieldNames) { BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName); if (tableConstraintField == null) { return symTable.semanticError; } BType fieldType = tableConstraintField.type; memTypes.add(fieldType); } if (memTypes.size() == 1) { return memTypes.get(0); } return new BTupleType(memTypes); } private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.nonErrorLoggingCheck = true; this.dlog.mute(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } dlog.resetErrorCount(); BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (compatibleTypes.isEmpty()) { BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.cloneNode(listConstructor); } BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType); if (!erroredExpType && inferredTupleType != symTable.semanticError) { dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType); } return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor); } if (tag == TypeTags.INTERSECTION) { return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor); } BType possibleType = getListConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.ARRAY: return checkArrayType(listConstructor, (BArrayType) possibleType); case TypeTags.TUPLE: return checkTupleType(listConstructor, (BTupleType) possibleType); case TypeTags.READONLY: return checkReadOnlyListType(listConstructor); case TypeTags.TYPEDESC: List<BType> results = new ArrayList<>(); listConstructor.isTypedescExpr = true; for (int i = 0; i < listConstructor.exprs.size(); i++) { results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType)); } List<BType> actualTypes = new ArrayList<>(); for (int i = 0; i < listConstructor.exprs.size(); i++) { final BLangExpression expr = listConstructor.exprs.get(i); if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) { actualTypes.add(((BLangTypedescExpr) expr).resolvedType); } else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { actualTypes.add(((BLangSimpleVarRef) expr).symbol.type); } else { actualTypes.add(results.get(i)); } } if (actualTypes.size() == 1) { listConstructor.typedescType = actualTypes.get(0); } else { listConstructor.typedescType = new BTupleType(actualTypes); } return new BTypedescType(listConstructor.typedescType, null); } BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.cloneNode(listConstructor); } if (bType == symTable.semanticError) { getInferredTupleType(exprToLog, symTable.semanticError); } else { dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType, getInferredTupleType(exprToLog, symTable.noType)); } return symTable.semanticError; } private BType getListConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.READONLY: case TypeTags.TYPEDESC: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) { BType eType = arrayType.eType; if (arrayType.state == BArrayState.INFERRED) { arrayType.size = listConstructor.exprs.size(); arrayType.state = BArrayState.CLOSED; } else if ((arrayType.state != BArrayState.OPEN) && (arrayType.size != listConstructor.exprs.size())) { if (arrayType.size < listConstructor.exprs.size()) { dlog.error(listConstructor.pos, DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, listConstructor.exprs.size()); return symTable.semanticError; } if (!types.hasFillerValue(eType)) { dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType); return symTable.semanticError; } } boolean errored = false; for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(eType, expr) && !errored) { errored = true; } } return errored ? symTable.semanticError : arrayType; } private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) { List<BLangExpression> exprs = listConstructor.exprs; List<BType> memberTypes = tupleType.tupleTypes; BType restType = tupleType.restType; int listExprSize = exprs.size(); int memberTypeSize = memberTypes.size(); if (listExprSize < memberTypeSize) { for (int i = listExprSize; i < memberTypeSize; i++) { if (!types.hasFillerValue(memberTypes.get(i))) { dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } } } else if (listExprSize > memberTypeSize && restType == null) { dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } boolean errored = false; int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize; for (int i = 0; i < nonRestCountToCheck; i++) { if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) { errored = true; } } for (int i = nonRestCountToCheck; i < exprs.size(); i++) { if (exprIncompatible(restType, exprs.get(i)) && !errored) { errored = true; } } return errored ? symTable.semanticError : tupleType; } private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return types.checkType(listConstructor, inferredType, symTable.readonlyType); } for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(symTable.readonlyType, expr)) { return symTable.semanticError; } } return symTable.readonlyType; } private boolean exprIncompatible(BType eType, BLangExpression expr) { if (expr.typeChecked) { return expr.getBType() == symTable.semanticError; } BLangExpression exprToCheck = expr; if (this.nonErrorLoggingCheck) { expr.cloneAttempt++; exprToCheck = nodeCloner.cloneNode(expr); } return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError; } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) { return checkExprList(exprs, env, symTable.noType); } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) { List<BType> types = new ArrayList<>(); SymbolEnv prevEnv = this.env; BType preExpType = this.expType; this.env = env; this.expType = expType; for (BLangExpression e : exprs) { checkExpr(e, this.env, expType); types.add(resultType); } this.env = prevEnv; this.expType = preExpType; return types; } private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) { List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } BTupleType tupleType = new BTupleType(memTypes); if (expType.tag != TypeTags.READONLY) { return tupleType; } tupleType.flags |= Flags.READONLY; return tupleType; } public void visit(BLangRecordLiteral recordLiteral) { int expTypeTag = expType.tag; if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) { expType = defineInferredRecordType(recordLiteral, expType); } else if (expTypeTag == TypeTags.OBJECT) { dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType); resultType = symTable.semanticError; return; } resultType = getEffectiveMappingType(recordLiteral, checkMappingConstructorCompatibility(expType, recordLiteral)); } private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) { if (applicableMappingType == symTable.semanticError || (applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags, Flags.READONLY))) { return applicableMappingType; } Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>(); LinkedHashMap<String, BField> applicableTypeFields = applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields : new LinkedHashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { continue; } String name; if (field.isKeyValueField()) { BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field; if (!keyValueField.readonly) { continue; } BLangExpression keyExpr = keyValueField.key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { name = ((BLangSimpleVarRef) keyExpr).variableName.value; } else { name = (String) ((BLangLiteral) keyExpr).value; } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; if (!varNameField.readonly) { continue; } name = varNameField.variableName.value; } if (applicableTypeFields.containsKey(name) && Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) { continue; } readOnlyFields.put(name, field); } if (readOnlyFields.isEmpty()) { return applicableMappingType; } PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); LinkedHashMap<String, BField> newFields = new LinkedHashMap<>(); for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) { RecordLiteralNode.RecordField field = readOnlyEntry.getValue(); String key = readOnlyEntry.getKey(); Name fieldName = names.fromString(key); BType readOnlyFieldType; if (field.isKeyValueField()) { readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType(); } else { readOnlyFieldType = ((BLangRecordVarNameField) field).getBType(); } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(Flag.REQUIRED); add(Flag.READONLY); }}), fieldName, pkgID, readOnlyFieldType, recordSymbol, ((BLangNode) field).pos, VIRTUAL); newFields.put(key, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags); if (applicableMappingType.tag == TypeTags.MAP) { recordType.sealed = false; recordType.restFieldType = ((BMapType) applicableMappingType).constraint; } else { BRecordType applicableRecordType = (BRecordType) applicableMappingType; boolean allReadOnlyFields = true; for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) { String fieldName = origEntry.getKey(); BField field = origEntry.getValue(); if (readOnlyFields.containsKey(fieldName)) { continue; } BVarSymbol origFieldSymbol = field.symbol; long origFieldFlags = origFieldSymbol.flags; if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) { allReadOnlyFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID, origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL); newFields.put(fieldName, new BField(field.name, null, fieldSymbol)); recordSymbol.scope.define(field.name, fieldSymbol); } recordType.sealed = applicableRecordType.sealed; recordType.restFieldType = applicableRecordType.restFieldType; if (recordType.sealed && allReadOnlyFields) { recordType.flags |= Flags.READONLY; recordType.tsymbol.flags |= Flags.READONLY; } } recordType.fields = newFields; recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); if (applicableMappingType.tag == TypeTags.MAP) { recordLiteral.expectedType = applicableMappingType; } return recordType; } private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } dlog.resetErrorCount(); BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType, mappingConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (compatibleTypes.isEmpty()) { if (!erroredExpType) { reportIncompatibleMappingConstructorError(mappingConstructor, bType); } validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor); } if (tag == TypeTags.INTERSECTION) { return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor); } BType possibleType = getMappingConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.MAP: return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType : symTable.semanticError; case TypeTags.RECORD: boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType); boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType, mappingConstructor.fields, mappingConstructor.pos); return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError; case TypeTags.READONLY: return checkReadOnlyMappingType(mappingConstructor); } reportIncompatibleMappingConstructorError(mappingConstructor, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return checkMappingConstructorCompatibility(inferredType, mappingConstructor); } for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BLangExpression exprToCheck; if (field.isKeyValueField()) { exprToCheck = ((BLangRecordKeyValueField) field).valueExpr; } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } else { exprToCheck = (BLangRecordVarNameField) field; } if (exprIncompatible(symTable.readonlyType, exprToCheck)) { return symTable.semanticError; } } return symTable.readonlyType; } private BType getMappingConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.MAP: case TypeTags.RECORD: case TypeTags.READONLY: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private boolean isMappingConstructorCompatibleType(BType type) { return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP; } private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) { if (expType == symTable.semanticError) { return; } if (expType.tag != TypeTags.UNION) { dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType); return; } BUnionType unionType = (BUnionType) expType; BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]); if (memberTypes.length == 2) { BRecordType recType = null; if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[0]; } else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[1]; } if (recType != null) { validateSpecifiedFields(mappingConstructorExpr, recType); validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos); return; } } for (BType bType : memberTypes) { if (isMappingConstructorCompatibleType(bType)) { dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR, unionType); return; } } dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType); } private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) { boolean isFieldsValid = true; for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BType checkedType = checkMappingField(field, possibleType); if (isFieldsValid && checkedType == symTable.semanticError) { isFieldsValid = false; } } return isFieldsValid; } private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields, Location pos) { HashSet<String> specFieldNames = getFieldNames(specifiedFields); boolean hasAllRequiredFields = true; for (BField field : type.fields.values()) { String fieldName = field.name.value; if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED) && !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) { dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name); if (hasAllRequiredFields) { hasAllRequiredFields = false; } } } return hasAllRequiredFields; } private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) { HashSet<String> fieldNames = new HashSet<>(); for (RecordLiteralNode.RecordField specifiedField : specifiedFields) { if (specifiedField.isKeyValueField()) { String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField); if (name == null) { continue; } fieldNames.add(name); } else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField)); } else { fieldNames.addAll(getSpreadOpFieldRequiredFieldNames( (BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField)); } } return fieldNames; } private String getKeyValueFieldName(BLangRecordKeyValueField field) { BLangRecordKey key = field.key; if (key.computedKey) { return null; } BLangExpression keyExpr = key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return ((BLangSimpleVarRef) keyExpr).variableName.value; } else if (keyExpr.getKind() == NodeKind.LITERAL) { return (String) ((BLangLiteral) keyExpr).value; } return null; } private String getVarNameFieldName(BLangRecordVarNameField field) { return field.variableName.value; } private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) { BType spreadType = checkExpr(field.expr, env); if (spreadType.tag != TypeTags.RECORD) { return Collections.emptyList(); } List<String> fieldNames = new ArrayList<>(); for (BField bField : ((BRecordType) spreadType).getFields().values()) { if (!Symbols.isOptional(bField.symbol)) { fieldNames.add(bField.name.value); } } return fieldNames; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { if (workerFlushExpr.workerIdentifier != null) { String workerName = workerFlushExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName); } else { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName)); if (symbol != symTable.notFoundSymbol) { workerFlushExpr.workerSymbol = symbol; } } } BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(workerFlushExpr, actualType, expType); } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier)); if (symTable.notFoundSymbol.equals(symbol)) { syncSendExpr.workerType = symTable.semanticError; } else { syncSendExpr.workerType = symbol.type; syncSendExpr.workerSymbol = symbol; } syncSendExpr.env = this.env; checkExpr(syncSendExpr.expr, this.env); if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) { this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND, syncSendExpr.expr.getBType()); } String workerName = syncSendExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName); } syncSendExpr.expectedType = expType; resultType = expType == symTable.noType ? symTable.nilType : expType; } @Override public void visit(BLangWorkerReceive workerReceiveExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier)); workerReceiveExpr.env = this.env; if (symTable.notFoundSymbol.equals(symbol)) { workerReceiveExpr.workerType = symTable.semanticError; } else { workerReceiveExpr.workerType = symbol.type; workerReceiveExpr.workerSymbol = symbol; } if (symTable.noType == this.expType) { this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION); } workerReceiveExpr.setBType(this.expType); resultType = this.expType; } private boolean workerExists(SymbolEnv env, String workerName) { if (workerName.equals(DEFAULT_WORKER_NAME)) { return true; } BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName)); return symbol != this.symTable.notFoundSymbol && symbol.type.tag == TypeTags.FUTURE && ((BFutureType) symbol.type).workerDerivative; } @Override public void visit(BLangConstRef constRef) { constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env, names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName)); types.setImplicitCastExpr(constRef, constRef.getBType(), expType); resultType = constRef.getBType(); } public void visit(BLangSimpleVarRef varRefExpr) { BType actualType = symTable.semanticError; Name varName = names.fromIdNode(varRefExpr.variableName); if (varName == Names.IGNORE) { if (varRefExpr.isLValue) { varRefExpr.setBType(this.symTable.anyType); } else { varRefExpr.setBType(this.symTable.semanticError); dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDERSCORE_NOT_ALLOWED); } varRefExpr.symbol = new BVarSymbol(0, true, varName, names.originalNameFromIdNode(varRefExpr.variableName), env.enclPkg.symbol.pkgID, varRefExpr.getBType(), env.scope.owner, varRefExpr.pos, VIRTUAL); resultType = varRefExpr.getBType(); return; } Name compUnitName = getCurrentCompUnit(varRefExpr); varRefExpr.pkgSymbol = symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName); if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) { varRefExpr.symbol = symTable.notFoundSymbol; dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias); } if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { actualType = symTable.stringType; } else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env, names.fromIdNode(varRefExpr.pkgAlias), varName); if (symbol == symTable.notFoundSymbol && env.enclType != null) { Name objFuncName = names.fromString(Symbols .getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value)); symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName, env.enclType.getBType().tsymbol); } if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) { BVarSymbol varSym = (BVarSymbol) symbol; checkSelfReferences(varRefExpr.pos, env, varSym); varRefExpr.symbol = varSym; actualType = varSym.type; markAndRegisterClosureVariable(symbol, varRefExpr.pos, env); } else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) { actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null); varRefExpr.symbol = symbol; } else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) symbol; varRefExpr.symbol = constSymbol; BType symbolType = symbol.type; if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE || (expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream() .anyMatch(memType -> memType.tag == TypeTags.FINITE && types.isAssignable(symbolType, memType)))) { actualType = symbolType; } else { actualType = constSymbol.literalType; } if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) { actualType = symTable.semanticError; dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE); } } else { varRefExpr.symbol = symbol; logUndefinedSymbolError(varRefExpr.pos, varName.value); } } if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE); return; } resultType = types.checkType(varRefExpr, actualType, expType); } @Override public void visit(BLangRecordVarRef varRefExpr) { LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID); BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName), env.enclPkg.symbol.pkgID, null, env.scope.owner, varRefExpr.pos, SOURCE); symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env); boolean unresolvedReference = false; for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference; bLangVarReference.isLValue = true; checkExpr(recordRefField.variableReference, env); if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol || !isValidVariableReference(recordRefField.variableReference)) { unresolvedReference = true; continue; } BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol; BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos, new BVarSymbol(0, names.fromIdNode(recordRefField.variableName), names.originalNameFromIdNode(recordRefField.variableName), env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol, varRefExpr.pos, SOURCE)); fields.put(field.name.value, field); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { checkExpr(restParam, env); unresolvedReference = !isValidVariableReference(restParam); } if (unresolvedReference) { resultType = symTable.semanticError; return; } BRecordType bRecordType = new BRecordType(recordSymbol); bRecordType.fields = fields; recordSymbol.type = bRecordType; varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, recordSymbol.getOriginalName(), env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos, SOURCE); if (restParam == null) { bRecordType.sealed = true; bRecordType.restFieldType = symTable.noType; } else if (restParam.getBType() == symTable.semanticError) { bRecordType.restFieldType = symTable.mapType; } else { BType restFieldType; if (restParam.getBType().tag == TypeTags.RECORD) { restFieldType = ((BRecordType) restParam.getBType()).restFieldType; } else if (restParam.getBType().tag == TypeTags.MAP) { restFieldType = ((BMapType) restParam.getBType()).constraint; } else { restFieldType = restParam.getBType(); } bRecordType.restFieldType = restFieldType; } resultType = bRecordType; } @Override public void visit(BLangErrorVarRef varRefExpr) { if (varRefExpr.typeNode != null) { BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env); varRefExpr.setBType(bType); checkIndirectErrorVarRef(varRefExpr); resultType = bType; return; } if (varRefExpr.message != null) { varRefExpr.message.isLValue = true; checkExpr(varRefExpr.message, env); if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) { dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, varRefExpr.message.getBType()); } } if (varRefExpr.cause != null) { varRefExpr.cause.isLValue = true; checkExpr(varRefExpr.cause, env); if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) { dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType, varRefExpr.cause.getBType()); } } boolean unresolvedReference = false; for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { BLangVariableReference refItem = (BLangVariableReference) detailItem.expr; refItem.isLValue = true; checkExpr(refItem, env); if (!isValidVariableReference(refItem)) { unresolvedReference = true; continue; } if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN, refItem); unresolvedReference = true; continue; } if (refItem.symbol == null) { unresolvedReference = true; } } if (varRefExpr.restVar != null) { varRefExpr.restVar.isLValue = true; if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { checkExpr(varRefExpr.restVar, env); unresolvedReference = unresolvedReference || varRefExpr.restVar.symbol == null || !isValidVariableReference(varRefExpr.restVar); } } if (unresolvedReference) { resultType = symTable.semanticError; return; } BType errorRefRestFieldType; if (varRefExpr.restVar == null) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR || varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { errorRefRestFieldType = varRefExpr.restVar.getBType(); } else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) { errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint; } else { dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, varRefExpr.restVar.getBType(), symTable.detailType); resultType = symTable.semanticError; return; } BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly ? symTable.errorType.detailType : new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC); resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType); } private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) { for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { checkExpr(detailItem.expr, env); checkExpr(detailItem, env, detailItem.expr.getBType()); } if (varRefExpr.restVar != null) { checkExpr(varRefExpr.restVar, env); } if (varRefExpr.message != null) { varRefExpr.message.isLValue = true; checkExpr(varRefExpr.message, env); } if (varRefExpr.cause != null) { varRefExpr.cause.isLValue = true; checkExpr(varRefExpr.cause, env); } } @Override public void visit(BLangTupleVarRef varRefExpr) { List<BType> results = new ArrayList<>(); for (int i = 0; i < varRefExpr.expressions.size(); i++) { ((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true; results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType)); } BTupleType actualType = new BTupleType(results); if (varRefExpr.restParam != null) { BLangExpression restExpr = (BLangExpression) varRefExpr.restParam; ((BLangVariableReference) restExpr).isLValue = true; BType checkedType = checkExpr(restExpr, env, symTable.noType); if (!(checkedType.tag == TypeTags.ARRAY || checkedType.tag == TypeTags.TUPLE)) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType); resultType = symTable.semanticError; return; } if (checkedType.tag == TypeTags.ARRAY) { actualType.restType = ((BArrayType) checkedType).eType; } else { actualType.restType = checkedType; } } resultType = types.checkType(varRefExpr, actualType, expType); } /** * This method will recursively check if a multidimensional array has at least one open sealed dimension. * * @param arrayType array to check if open sealed * @return true if at least one dimension is open sealed */ public boolean isArrayOpenSealedType(BArrayType arrayType) { if (arrayType.state == BArrayState.INFERRED) { return true; } if (arrayType.eType.tag == TypeTags.ARRAY) { return isArrayOpenSealedType((BArrayType) arrayType.eType); } return false; } /** * This method will recursively traverse and find the symbol environment of a lambda node (which is given as the * enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the * enclosing invokable node's environment, which are outside of the scope of a lambda function. */ private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) { return env.enclEnv; } if (env.enclInvokable != null && env.enclInvokable == encInvokable) { return findEnclosingInvokableEnv(env.enclEnv, encInvokable); } return env; } private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) { return env.enclEnv; } if (env.enclType != null && env.enclType == recordTypeNode) { return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode); } return env; } private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) { return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) && param.getBType().tag == symbol.type.tag)); } public void visit(BLangFieldBasedAccess fieldAccessExpr) { markLeafNode(fieldAccessExpr); BLangExpression containerExpression = fieldAccessExpr.expr; if (containerExpression instanceof BLangValueExpression) { ((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue; ((BLangValueExpression) containerExpression).isCompoundAssignmentLValue = fieldAccessExpr.isCompoundAssignmentLValue; } BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, env)); if (fieldAccessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess && !isXmlAccess(fieldAccessExpr)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION); resultType = symTable.semanticError; return; } BType actualType; if (fieldAccessExpr.optionalFieldAccess) { if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS); resultType = symTable.semanticError; return; } actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); } else { actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); if (actualType != symTable.semanticError && (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) { if (isAllReadonlyTypes(varRefType)) { if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return; } } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) && isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, fieldAccessExpr.field.value, varRefType); resultType = symTable.semanticError; return; } } } resultType = types.checkType(fieldAccessExpr, actualType, this.expType); } private boolean isAllReadonlyTypes(BType type) { if (type.tag != TypeTags.UNION) { return Symbols.isFlagOn(type.flags, Flags.READONLY); } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isAllReadonlyTypes(memberType)) { return false; } } return true; } private boolean isInitializationInInit(BType type) { BObjectType objectType = (BObjectType) type; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; return env.enclInvokable != null && initializerFunc != null && env.enclInvokable.symbol == initializerFunc.symbol; } private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) { if (type.tag == TypeTags.RECORD) { if (Symbols.isFlagOn(type.flags, Flags.READONLY)) { return true; } BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (!field.name.value.equals(fieldName)) { continue; } return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY); } return recordType.sealed; } boolean allInvalidUpdates = true; for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) { allInvalidUpdates = false; } } return allInvalidUpdates; } private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangExpression expr = fieldAccessExpr.expr; BType exprType = expr.getBType(); if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) { return true; } if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr) && exprType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes(); return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType); } return false; } public void visit(BLangIndexBasedAccess indexBasedAccessExpr) { markLeafNode(indexBasedAccessExpr); BLangExpression containerExpression = indexBasedAccessExpr.expr; if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, ((BLangTypedescExpr) containerExpression).typeNode); resultType = symTable.semanticError; return; } if (containerExpression instanceof BLangValueExpression) { ((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue; ((BLangValueExpression) containerExpression).isCompoundAssignmentLValue = indexBasedAccessExpr.isCompoundAssignmentLValue; } boolean isStringValue = containerExpression.getBType() != null && containerExpression.getBType().tag == TypeTags.STRING; if (!isStringValue) { checkExpr(containerExpression, this.env, symTable.noType); } if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY && containerExpression.getBType().tag != TypeTags.TABLE) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED, containerExpression.getBType()); resultType = symTable.semanticError; return; } BType actualType = checkIndexAccessExpr(indexBasedAccessExpr); BType exprType = containerExpression.getBType(); BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; if (actualType != symTable.semanticError && (indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) { if (isAllReadonlyTypes(exprType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, exprType); resultType = symTable.semanticError; return; } else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) && (indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) && isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, getConstFieldName(indexExpr), exprType); resultType = symTable.semanticError; return; } } if (indexBasedAccessExpr.isLValue) { indexBasedAccessExpr.originalType = actualType; indexBasedAccessExpr.setBType(actualType); resultType = actualType; return; } this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType); } public void visit(BLangInvocation iExpr) { if (iExpr.expr == null) { checkFunctionInvocationExpr(iExpr); return; } if (invalidModuleAliasUsage(iExpr)) { return; } checkExpr(iExpr.expr, this.env, symTable.noType); BType varRefType = iExpr.expr.getBType(); switch (varRefType.tag) { case TypeTags.OBJECT: checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType); break; case TypeTags.RECORD: checkFieldFunctionPointer(iExpr, this.env); break; case TypeTags.NONE: dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name); break; case TypeTags.SEMANTIC_ERROR: break; default: checkInLangLib(iExpr, varRefType); } } public void visit(BLangErrorConstructorExpr errorConstructorExpr) { BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef; if (userProvidedTypeRef != null) { symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR); } validateErrorConstructorPositionalArgs(errorConstructorExpr); List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr); List<BType> errorDetailTypes = new ArrayList<>(); for (BType expandedCandidate : expandedCandidates) { BType detailType = ((BErrorType) expandedCandidate).detailType; errorDetailTypes.add(detailType); } BType detailCandidate; if (errorDetailTypes.size() == 1) { detailCandidate = errorDetailTypes.get(0); } else { detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes)); } BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr); BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env); int index = errorDetailTypes.indexOf(inferredDetailType); BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index); if (selectedCandidate != symTable.semanticError && (userProvidedTypeRef == null || userProvidedTypeRef.getBType() == selectedCandidate)) { checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType); resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); return; } if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) { dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType); } BErrorType errorType; if (userProvidedTypeRef != null && userProvidedTypeRef.getBType().tag == TypeTags.ERROR) { errorType = (BErrorType) userProvidedTypeRef.getBType(); } else if (expandedCandidates.size() == 1) { errorType = (BErrorType) expandedCandidates.get(0); } else { errorType = symTable.errorType; } List<BLangNamedArgsExpression> namedArgs = checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType); BType detailType = errorType.detailType; if (detailType.tag == TypeTags.MAP) { BType errorDetailTypeConstraint = ((BMapType) detailType).constraint; for (BLangNamedArgsExpression namedArgExpr: namedArgs) { if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) { dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType()); } } } else if (detailType.tag == TypeTags.RECORD) { BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType; LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream() .filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED) .map(f -> f.name.value) .collect(Collectors.toCollection(LinkedList::new)); LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields; for (BLangNamedArgsExpression namedArg : namedArgs) { BField field = targetFields.get(namedArg.name.value); Location pos = namedArg.pos; if (field == null) { if (targetErrorDetailRec.sealed) { dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC, namedArg.name, targetErrorDetailRec); } else if (targetFields.isEmpty() && !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) { dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE, namedArg.name, targetErrorDetailRec); } } else { missingRequiredFields.remove(namedArg.name.value); if (!types.isAssignable(namedArg.expr.getBType(), field.type)) { dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArg.name, field.type, namedArg.expr.getBType()); } } } for (String requiredField : missingRequiredFields) { dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField); } } if (userProvidedTypeRef != null) { errorConstructorExpr.setBType(userProvidedTypeRef.getBType()); } else { errorConstructorExpr.setBType(errorType); } resultType = errorConstructorExpr.getBType(); } private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) { if (errorConstructorExpr.positionalArgs.isEmpty()) { return; } checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType); int positionalArgCount = errorConstructorExpr.positionalArgs.size(); if (positionalArgCount > 1) { checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType); } } private BType checkExprSilent(BLangExpression expr, BType expType, SymbolEnv env) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); BType type = checkExpr(expr, env, expType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } return type; } private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) { BLangRecordKeyValueField field = (BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue(); field.valueExpr = (BLangExpression) namedArg.getExpression(); BLangLiteral expr = new BLangLiteral(); expr.value = namedArg.getName().value; expr.setBType(symTable.stringType); field.key = new BLangRecordKey(expr); recordLiteral.fields.add(field); } return recordLiteral; } private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) { BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef; if (errorTypeRef == null) { if (expType.tag == TypeTags.ERROR) { return List.of(expType); } else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) { return expandExpectedErrorTypes(expType); } } else { if (errorTypeRef.getBType().tag != TypeTags.ERROR) { if (errorTypeRef.getBType().tag != TypeTags.SEMANTIC_ERROR) { dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef); } } else { return List.of(errorTypeRef.getBType()); } } return List.of(symTable.errorType); } private List<BType> expandExpectedErrorTypes(BType candidateType) { List<BType> expandedCandidates = new ArrayList<>(); if (candidateType.tag == TypeTags.UNION) { for (BType memberType : ((BUnionType) candidateType).getMemberTypes()) { if (types.isAssignable(memberType, symTable.errorType)) { if (memberType.tag == TypeTags.INTERSECTION) { expandedCandidates.add(((BIntersectionType) memberType).effectiveType); } else { expandedCandidates.add(memberType); } } } } else if (types.isAssignable(candidateType, symTable.errorType)) { if (candidateType.tag == TypeTags.INTERSECTION) { expandedCandidates.add(((BIntersectionType) candidateType).effectiveType); } else { expandedCandidates.add(candidateType); } } return expandedCandidates; } public void visit(BLangInvocation.BLangActionInvocation aInv) { if (aInv.expr == null) { checkFunctionInvocationExpr(aInv); return; } if (invalidModuleAliasUsage(aInv)) { return; } checkExpr(aInv.expr, this.env, symTable.noType); BLangExpression varRef = aInv.expr; switch (varRef.getBType().tag) { case TypeTags.OBJECT: checkActionInvocation(aInv, (BObjectType) varRef.getBType()); break; case TypeTags.RECORD: checkFieldFunctionPointer(aInv, this.env); break; case TypeTags.NONE: dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name); resultType = symTable.semanticError; break; case TypeTags.SEMANTIC_ERROR: default: dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType()); resultType = symTable.semanticError; break; } } private boolean invalidModuleAliasUsage(BLangInvocation invocation) { Name pkgAlias = names.fromIdNode(invocation.pkgAlias); if (pkgAlias != Names.EMPTY) { dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE); return true; } return false; } public void visit(BLangLetExpression letExpression) { BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())), new Name(String.format("$let_symbol_%d$", letCount++)), env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner, letExpression.pos); letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env); } BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType); types.checkType(letExpression, exprType, this.expType); } private void checkInLangLib(BLangInvocation iExpr, BType varRefType) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value, iExpr.expr.getBType()); resultType = symTable.semanticError; return; } if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) { return; } checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType); } private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType, BSymbol langLibMethodSymbol) { if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) { return false; } String packageId = langLibMethodSymbol.pkgID.name.value; if (!modifierFunctions.containsKey(packageId)) { return false; } String funcName = langLibMethodSymbol.name.value; if (!modifierFunctions.get(packageId).contains(funcName)) { return false; } if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) { return false; } if (funcName.equals("strip") && TypeTags.isXMLTypeTag(varRefType.tag)) { return false; } dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return true; } private boolean isFixedLengthList(BType type) { switch(type.tag) { case TypeTags.ARRAY: return (((BArrayType) type).state != BArrayState.OPEN); case TypeTags.TUPLE: return (((BTupleType) type).restType == null); case TypeTags.UNION: BUnionType unionType = (BUnionType) type; for (BType member : unionType.getMemberTypes()) { if (!isFixedLengthList(member)) { return false; } } return true; default: return false; } } private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) { String invocationName = iExpr.name.getValue(); if (!listLengthModifierFunctions.contains(invocationName)) { return; } if (isFixedLengthList(varRefType)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName, varRefType); resultType = symTable.semanticError; return; } if (isShiftOnIncompatibleTuples(varRefType, invocationName)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName, varRefType); resultType = symTable.semanticError; return; } } private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) { if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) && hasDifferentTypeThanRest((BTupleType) varRefType)) { return true; } if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) { BUnionType unionVarRef = (BUnionType) varRefType; boolean allMemberAreFixedShapeTuples = true; for (BType member : unionVarRef.getMemberTypes()) { if (member.tag != TypeTags.TUPLE) { allMemberAreFixedShapeTuples = false; break; } if (!hasDifferentTypeThanRest((BTupleType) member)) { allMemberAreFixedShapeTuples = false; break; } } return allMemberAreFixedShapeTuples; } return false; } private boolean hasDifferentTypeThanRest(BTupleType tupleType) { if (tupleType.restType == null) { return false; } for (BType member : tupleType.getTupleTypes()) { if (!types.isSameType(tupleType.restType, member)) { return true; } } return false; } private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) { BType type = checkExpr(iExpr.expr, env); BLangIdentifier invocationIdentifier = iExpr.name; if (type == symTable.semanticError) { return false; } BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier), type.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD, invocationIdentifier, type); return false; } if (fieldSymbol.kind != SymbolKind.FUNCTION) { checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD, fieldSymbol.type); return false; } iExpr.symbol = fieldSymbol; iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType); checkInvocationParamAndReturnType(iExpr); iExpr.functionPointerInvocation = true; return true; } private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos, DiagnosticErrorCode errCode, Object... diagMsgArgs) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(pos, errCode, diagMsgArgs); resultType = symTable.semanticError; } else { checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol); } } @Override public void visit(BLangObjectConstructorExpression objectCtorExpression) { if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) { BObjectType objectType = (BObjectType) objectCtorExpression.classNode.getBType(); if (objectCtorExpression.expectedType.tag == TypeTags.OBJECT) { BObjectType expObjType = (BObjectType) objectCtorExpression.expectedType; objectType.typeIdSet = expObjType.typeIdSet; } else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) { if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) { dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR, objectCtorExpression.expectedType); resultType = symTable.semanticError; return; } } } visit(objectCtorExpression.typeInit); } private boolean isDefiniteObjectType(BType type, Set<BTypeIdSet> typeIdSets) { if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) { return false; } Set<BType> visitedTypes = new HashSet<>(); if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) { return false; } return typeIdSets.size() <= 1; } private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) { if (type.tag == TypeTags.OBJECT) { var objectType = (BObjectType) type; typeIdSets.add(objectType.typeIdSet); return true; } if (type.tag == TypeTags.UNION) { if (!visitedTypes.add(type)) { return true; } for (BType member : ((BUnionType) type).getMemberTypes()) { if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) { return false; } } return true; } return false; } private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) { Set<BTypeIdSet> typeIdSets = new HashSet<>(); if (!isDefiniteObjectType(type, typeIdSets)) { return false; } if (typeIdSets.isEmpty()) { objectType.typeIdSet = BTypeIdSet.emptySet(); return true; } var typeIdIterator = typeIdSets.iterator(); if (typeIdIterator.hasNext()) { BTypeIdSet typeIdSet = typeIdIterator.next(); objectType.typeIdSet = typeIdSet; return true; } return true; } public void visit(BLangTypeInit cIExpr) { if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType); resultType = symTable.semanticError; return; } BType actualType; if (cIExpr.userDefinedType != null) { actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env); } else { actualType = expType; } if (actualType == symTable.semanticError) { resultType = symTable.semanticError; return; } if (actualType.tag == TypeTags.INTERSECTION) { actualType = ((BIntersectionType) actualType).effectiveType; } switch (actualType.tag) { case TypeTags.OBJECT: BObjectType actualObjectType = (BObjectType) actualType; if (isObjectConstructorExpr(cIExpr, actualObjectType)) { BLangClassDefinition classDefForConstructor = getClassDefinitionForObjectConstructorExpr(cIExpr, env); List<BLangType> typeRefs = classDefForConstructor.typeRefs; SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol); if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv, false); } else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv, true); } else { analyzeObjectConstructor(classDefForConstructor, pkgEnv); } markConstructedObjectIsolatedness(actualObjectType); } if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return; } if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); } else { if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) { return; } } break; case TypeTags.STREAM: if (cIExpr.initInvocation.argExprs.size() > 1) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation); resultType = symTable.semanticError; return; } BStreamType actualStreamType = (BStreamType) actualType; if (actualStreamType.completionType != null) { BType completionType = actualStreamType.completionType; if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) { dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString()); resultType = symTable.semanticError; return; } } if (!cIExpr.initInvocation.argExprs.isEmpty()) { BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0); BType constructType = checkExpr(iteratorExpr, env, symTable.noType); BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType); if (constructType.tag != TypeTags.OBJECT) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType, BLangCompilerConstants.CLOSE_FUNC); if (closeFunc != null) { BType closeableIteratorType = symTable.langQueryModuleSymbol.scope .lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type; if (!types.isAssignable(constructType, closeableIteratorType)) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } } else { BType iteratorType = symTable.langQueryModuleSymbol.scope .lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type; if (!types.isAssignable(constructType, iteratorType)) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } } BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType); if (nextReturnType != null) { types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } else { dlog.error(constructType.tsymbol.getPosition(), DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType); } } if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType, actualType); resultType = symTable.semanticError; return; } resultType = actualType; return; case TypeTags.UNION: List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType); BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType); cIExpr.initInvocation.setBType(symTable.nilType); if (matchedType.tag == TypeTags.OBJECT) { if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); actualType = matchedType; break; } else { if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) { return; } } } types.checkType(cIExpr, matchedType, expType); cIExpr.setBType(matchedType); resultType = matchedType; return; default: dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType); resultType = symTable.semanticError; return; } if (cIExpr.initInvocation.getBType() == null) { cIExpr.initInvocation.setBType(symTable.nilType); } BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType()); resultType = types.checkType(cIExpr, actualTypeInitType, expType); } private BUnionType createNextReturnType(Location pos, BStreamType streamType) { BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS); recordType.restFieldType = symTable.noType; recordType.sealed = true; Name fieldName = Names.VALUE; BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC, fieldName, env.enclPkg.packageID, streamType.constraint, env.scope.owner, pos, VIRTUAL)); field.type = streamType.constraint; recordType.fields.put(field.name.value, field); recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, recordType, env.scope.owner, pos, VIRTUAL); recordType.tsymbol.scope = new Scope(env.scope.owner); recordType.tsymbol.scope.define(fieldName, field.symbol); LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(recordType); retTypeMembers.addAll(types.getAllTypes(streamType.completionType)); BUnionType unionType = BUnionType.create(null); unionType.addAll(retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL); return unionType; } private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) { if (!cIExpr.initInvocation.argExprs.isEmpty() && ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) { dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.name.value); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return false; } return true; } private BType getObjectConstructorReturnType(BType objType, BType initRetType) { if (initRetType.tag == TypeTags.UNION) { LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(objType); retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes()); retTypeMembers.remove(symTable.nilType); BUnionType unionType = BUnionType.create(null, retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, symTable.builtinPos, VIRTUAL); return unionType; } else if (initRetType.tag == TypeTags.NIL) { return objType; } return symTable.semanticError; } private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) { int objectCount = 0; for (BType memberType : lhsUnionType.getMemberTypes()) { int tag = memberType.tag; if (tag == TypeTags.OBJECT) { objectCount++; continue; } if (tag != TypeTags.INTERSECTION) { continue; } if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) { objectCount++; } } boolean containsSingleObject = objectCount == 1; List<BType> matchingLhsMemberTypes = new ArrayList<>(); for (BType memberType : lhsUnionType.getMemberTypes()) { if (memberType.tag != TypeTags.OBJECT) { continue; } if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, lhsUnionType.tsymbol); } if (containsSingleObject) { return Collections.singletonList(memberType); } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc; if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) { matchingLhsMemberTypes.add(memberType); } } return matchingLhsMemberTypes; } private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) { if (matchingLhsMembers.isEmpty()) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } else if (matchingLhsMembers.size() == 1) { return matchingLhsMembers.get(0).tsymbol.type; } else { dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } } private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) { invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType)); if (function == null) { return invocationArguments.isEmpty(); } if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) { return true; } List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); List<BLangExpression> positionalArgs = new ArrayList<>(); for (BLangExpression argument : invocationArguments) { if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) { namedArgs.add((BLangNamedArgsExpression) argument); } else { positionalArgs.add(argument); } } List<BVarSymbol> requiredParams = function.symbol.params.stream() .filter(param -> !param.isDefaultable) .collect(Collectors.toList()); if (requiredParams.size() > invocationArguments.size()) { return false; } List<BVarSymbol> defaultableParams = function.symbol.params.stream() .filter(param -> param.isDefaultable) .collect(Collectors.toList()); int givenRequiredParamCount = 0; for (int i = 0; i < positionalArgs.size(); i++) { if (function.symbol.params.size() > i) { givenRequiredParamCount++; BVarSymbol functionParam = function.symbol.params.get(i); if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); continue; } if (function.symbol.restParam != null) { BType restParamType = ((BArrayType) function.symbol.restParam.type).eType; if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) { return false; } continue; } return false; } for (BLangNamedArgsExpression namedArg : namedArgs) { boolean foundNamedArg = false; List<BVarSymbol> params = function.symbol.params; for (int i = givenRequiredParamCount; i < params.size(); i++) { BVarSymbol functionParam = params.get(i); if (!namedArg.name.value.equals(functionParam.name.value)) { continue; } foundNamedArg = true; BType namedArgExprType = checkExpr(namedArg.expr, env); if (!types.isAssignable(functionParam.type, namedArgExprType)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); } if (!foundNamedArg) { return false; } } return requiredParams.size() <= 0; } public void visit(BLangWaitForAllExpr waitForAllExpr) { switch (expType.tag) { case TypeTags.RECORD: checkTypesForRecords(waitForAllExpr); break; case TypeTags.MAP: checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint); LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypesForMap.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypesForMap.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap); resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol); break; case TypeTags.NONE: case TypeTags.ANY: checkTypesForMap(waitForAllExpr, expType); LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypes.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintType = BUnionType.create(null, memberTypes); resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol); break; default: dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos)); resultType = symTable.semanticError; break; } waitForAllExpr.setBType(resultType); if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType); } } private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr, Location pos) { BRecordType retType = new BRecordType(null, Flags.ANONYMOUS); List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs; for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BLangIdentifier fieldName; if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { fieldName = keyVal.key; } else { fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName; } BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName)); BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type; BField field = new BField(names.fromIdNode(keyVal.key), null, new BVarSymbol(0, names.fromIdNode(keyVal.key), names.originalNameFromIdNode(keyVal.key), env.enclPkg.packageID, fieldType, null, keyVal.pos, VIRTUAL)); retType.fields.put(field.name.value, field); } retType.restFieldType = symTable.noType; retType.sealed = true; retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null, pos, VIRTUAL); return retType; } private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) { LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType(); if (bType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) bType).constraint); } else { memberTypes.add(bType); } } return memberTypes; } private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) { List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs; keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType)); } private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) { List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs(); Map<String, BField> lhsFields = ((BRecordType) expType).fields; if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) { dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitExpr, waitExpr.pos)); resultType = symTable.semanticError; return; } for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) { String key = keyVal.key.value; if (!lhsFields.containsKey(key)) { if (((BRecordType) expType).sealed) { dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType); resultType = symTable.semanticError; } else { BType restFieldType = ((BRecordType) expType).restFieldType; checkWaitKeyValExpr(keyVal, restFieldType); } } else { checkWaitKeyValExpr(keyVal, lhsFields.get(key).type); } } checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos); if (symTable.semanticError != resultType) { resultType = expType; } } private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs, Location pos) { type.fields.values().forEach(field -> { boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value)); if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name); } }); } private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) { BLangExpression expr; if (keyVal.keyExpr != null) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode (((BLangSimpleVarRef) keyVal.keyExpr).variableName)); keyVal.keyExpr.setBType(symbol.type); expr = keyVal.keyExpr; } else { expr = keyVal.valueExpr; } BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null); checkExpr(expr, env, futureType); setEventualTypeForExpression(expr, type); } private void setEventualTypeForExpression(BLangExpression expression, BType currentExpectedType) { if (expression == null) { return; } if (isSimpleWorkerReference(expression)) { return; } BFutureType futureType = (BFutureType) expression.expectedType; BType currentType = futureType.constraint; if (types.containsErrorType(currentType)) { return; } BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType); if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) && !types.isAssignable(eventualType, currentExpectedType)) { dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); } futureType.constraint = eventualType; } private void setEventualTypeForWaitExpression(BLangExpression expression, Location pos) { if ((resultType == symTable.semanticError) || (types.containsErrorType(resultType))) { return; } if (isSimpleWorkerReference(expression)) { return; } BType currentExpectedType = ((BFutureType) expType).constraint; BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType); if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) { resultType = eventualType; return; } if (!types.isAssignable(eventualType, currentExpectedType)) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); resultType = symTable.semanticError; return; } if (resultType.tag == TypeTags.FUTURE) { ((BFutureType) resultType).constraint = eventualType; } else { resultType = eventualType; } } private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) { if ((resultType == symTable.semanticError) || (expression.getKind() != NodeKind.BINARY_EXPR) || (types.containsErrorType(resultType))) { return; } if (types.containsErrorType(resultType)) { return; } if (!isReferencingNonWorker((BLangBinaryExpr) expression)) { return; } BType currentExpectedType = ((BFutureType) expType).constraint; BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType); if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) { resultType = eventualType; return; } if (!types.isAssignable(eventualType, currentExpectedType)) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); resultType = symTable.semanticError; return; } if (resultType.tag == TypeTags.FUTURE) { ((BFutureType) resultType).constraint = eventualType; } else { resultType = eventualType; } } private boolean isSimpleWorkerReference(BLangExpression expression) { if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression); BSymbol varRefSymbol = simpleVarRef.symbol; if (varRefSymbol == null) { return false; } if (workerExists(env, simpleVarRef.variableName.value)) { return true; } return false; } private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) { BLangExpression lhsExpr = binaryExpr.lhsExpr; BLangExpression rhsExpr = binaryExpr.rhsExpr; if (isReferencingNonWorker(lhsExpr)) { return true; } return isReferencingNonWorker(rhsExpr); } private boolean isReferencingNonWorker(BLangExpression expression) { if (expression.getKind() == NodeKind.BINARY_EXPR) { return isReferencingNonWorker((BLangBinaryExpr) expression); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression; BSymbol varRefSymbol = simpleVarRef.symbol; String varRefSymbolName = varRefSymbol.getName().value; if (workerExists(env, varRefSymbolName)) { return false; } } return true; } public void visit(BLangTernaryExpr ternaryExpr) { BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType); SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env); BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType); SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env); BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType); if (condExprType == symTable.semanticError || thenType == symTable.semanticError || elseType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isAssignable(elseType, thenType)) { resultType = thenType; } else if (types.isAssignable(thenType, elseType)) { resultType = elseType; } else { dlog.error(ternaryExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, thenType, elseType); resultType = symTable.semanticError; } } else { resultType = expType; } } public void visit(BLangWaitExpr waitExpr) { expType = new BFutureType(TypeTags.FUTURE, expType, null); checkExpr(waitExpr.getExpression(), env, expType); if (resultType.tag == TypeTags.UNION) { LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>()); if (memberTypes.size() == 1) { resultType = memberTypes.toArray(new BType[0])[0]; } else { resultType = BUnionType.create(null, memberTypes); } } else if (resultType != symTable.semanticError) { resultType = ((BFutureType) resultType).constraint; } BLangExpression waitFutureExpression = waitExpr.getExpression(); if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) { setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos); } else { setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos); } waitExpr.setBType(resultType); if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint); } } private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) { for (BType memberType : unionType.getMemberTypes()) { if (memberType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) memberType).constraint); } else { memberTypes.add(memberType); } } return memberTypes; } @Override public void visit(BLangTrapExpr trapExpr) { boolean firstVisit = trapExpr.expr.getBType() == null; BType actualType; BType exprType = checkExpr(trapExpr.expr, env, expType); boolean definedWithVar = expType == symTable.noType; if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = trapExpr.getBType(); exprType = trapExpr.expr.getBType(); } } if (expType == symTable.semanticError || exprType == symTable.semanticError) { actualType = symTable.semanticError; } else { LinkedHashSet<BType> resultTypes = new LinkedHashSet<>(); if (exprType.tag == TypeTags.UNION) { resultTypes.addAll(((BUnionType) exprType).getMemberTypes()); } else { resultTypes.add(exprType); } resultTypes.add(symTable.errorType); actualType = BUnionType.create(null, resultTypes); } resultType = types.checkType(trapExpr, actualType, expType); if (definedWithVar && resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType); } } public void visit(BLangBinaryExpr binaryExpr) { if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) { BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType); BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType); if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) { resultType = symTable.semanticError; return; } resultType = BUnionType.create(null, lhsResultType, rhsResultType); return; } checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr); SymbolEnv rhsExprEnv; BType lhsType; if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) { lhsType = checkAndGetType(binaryExpr.lhsExpr, env, binaryExpr); } else { lhsType = checkExpr(binaryExpr.lhsExpr, env); } if (binaryExpr.opKind == OperatorKind.AND) { rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true); } else if (binaryExpr.opKind == OperatorKind.OR) { rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env); } else { rhsExprEnv = env; } BType rhsType; if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) { rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr); } else { rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv); } BType actualType = symTable.semanticError; switch (binaryExpr.opKind) { case ADD: BType leftConstituent = getXMLConstituents(lhsType); BType rightConstituent = getXMLConstituents(rhsType); if (leftConstituent != null && rightConstituent != null) { actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null); break; } default: if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) { BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryBitwiseOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType, binaryExpr, env); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind, lhsType, rhsType); } else { binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; actualType = opSymbol.type.getReturnType(); } } } resultType = types.checkType(binaryExpr, actualType, expType); } private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); expr.cloneAttempt++; BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (errorCount == 0 && exprCompatibleType != symTable.semanticError) { return checkExpr(expr, env, binaryExpr.expectedType); } else { return checkExpr(expr, env); } } private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) { while (env != null && env.node != node) { env = env.enclEnv; } return env != null && env.enclEnv != null ? env.enclEnv.createClone() : new SymbolEnv(node, null); } private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) { SymbolEnv clone = env.createClone(); while (clone != null && clone.node != node) { clone = clone.enclEnv; } if (clone != null) { clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv)); } else { clone = new SymbolEnv(node, null); } return clone; } private BLangNode getLastInputNodeFromEnv(SymbolEnv env) { while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) { env = env.enclEnv; } return env != null ? env.node : null; } public void visit(BLangTransactionalExpr transactionalExpr) { resultType = types.checkType(transactionalExpr, symTable.booleanType, expType); } public void visit(BLangCommitExpr commitExpr) { BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(commitExpr, actualType, expType); } private BType getXMLConstituents(BType type) { BType constituent = null; if (type.tag == TypeTags.XML) { constituent = ((BXMLType) type).constraint; } else if (TypeTags.isXMLNonSequenceType(type.tag)) { constituent = type; } return constituent; } private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) { if (expType.tag != TypeTags.DECIMAL) { return; } switch (binaryExpr.opKind) { case ADD: case SUB: case MUL: case DIV: checkExpr(binaryExpr.lhsExpr, env, expType); checkExpr(binaryExpr.rhsExpr, env, expType); break; default: break; } } public void visit(BLangElvisExpr elvisExpr) { BType lhsType = checkExpr(elvisExpr.lhsExpr, env); BType actualType = symTable.semanticError; if (lhsType != symTable.semanticError) { if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) { BUnionType unionType = (BUnionType) lhsType; LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream() .filter(type -> type.tag != TypeTags.NIL) .collect(Collectors.toCollection(LinkedHashSet::new)); if (memberTypes.size() == 1) { actualType = memberTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, memberTypes); } } else { dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS, lhsType); } } BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType); BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isSameType(rhsReturnType, lhsReturnType)) { resultType = lhsReturnType; } else { dlog.error(elvisExpr.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType); resultType = symTable.semanticError; } } else { resultType = expType; } } @Override public void visit(BLangGroupExpr groupExpr) { resultType = checkExpr(groupExpr.expression, env, expType); } public void visit(BLangTypedescExpr accessExpr) { if (accessExpr.resolvedType == null) { accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env); } int resolveTypeTag = accessExpr.resolvedType.tag; final BType actualType; if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) { actualType = new BTypedescType(accessExpr.resolvedType, null); } else { actualType = accessExpr.resolvedType; } resultType = types.checkType(accessExpr, actualType, expType); } public void visit(BLangUnaryExpr unaryExpr) { BType exprType; BType actualType = symTable.semanticError; if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = exprType; } } else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = new BTypedescType(exprType, null); } } else { boolean decimalNegation = OperatorKind.SUB.equals(unaryExpr.operator) && expType.tag == TypeTags.DECIMAL; boolean isAdd = OperatorKind.ADD.equals(unaryExpr.operator); exprType = (decimalNegation || isAdd) ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType); if (symbol == symTable.notFoundSymbol) { dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES, unaryExpr.operator, exprType); } else { unaryExpr.opSymbol = (BOperatorSymbol) symbol; actualType = symbol.type.getReturnType(); } } } resultType = types.checkType(unaryExpr, actualType, expType); } public void visit(BLangTypeConversionExpr conversionExpr) { BType actualType = symTable.semanticError; for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) { annAttachment.attachPoints.add(AttachPoint.Point.TYPE); semanticAnalyzer.analyzeNode(annAttachment, this.env); } BLangExpression expr = conversionExpr.expr; if (conversionExpr.typeNode == null) { if (!conversionExpr.annAttachments.isEmpty()) { resultType = checkExpr(expr, env, this.expType); } return; } BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos, symResolver.resolveTypeNode(conversionExpr.typeNode, env)); conversionExpr.targetType = targetType; boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, targetType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) { checkExpr(expr, env, targetType); } else { checkExpr(expr, env, symTable.noType); } BType exprType = expr.getBType(); if (types.isTypeCastable(expr, exprType, targetType, this.env)) { actualType = targetType; } else if (exprType != symTable.semanticError && exprType != symTable.noType) { dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType); } resultType = types.checkType(conversionExpr, actualType, this.expType); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType()); bLangLambdaFunction.capturedClosureEnv = env.createClone(); if (!this.nonErrorLoggingCheck) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); } resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BType expectedType = expType; if (expectedType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expectedType; BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE) .collect(Collectors.collectingAndThen(Collectors.toList(), list -> { if (list.size() != 1) { return null; } return list.get(0); } )); if (invokableType != null) { expectedType = invokableType; } } if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) { dlog.error(bLangArrowFunction.pos, DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS); resultType = symTable.semanticError; return; } BInvokableType expectedInvocation = (BInvokableType) expectedType; populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes); bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType)); if (expectedInvocation.retType.tag == TypeTags.NONE) { expectedInvocation.retType = bLangArrowFunction.body.expr.getBType(); } resultType = bLangArrowFunction.funcType = expectedInvocation; } public void visit(BLangXMLQName bLangXMLQName) { String prefix = bLangXMLQName.prefix.value; resultType = types.checkType(bLangXMLQName, symTable.stringType, expType); if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty() && bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix); bLangXMLQName.setBType(symTable.semanticError); return; } if (bLangXMLQName.prefix.value.isEmpty()) { return; } BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix)); if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { return; } if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { logUndefinedSymbolError(bLangXMLQName.pos, prefix); bLangXMLQName.setBType(symTable.semanticError); return; } if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) { xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value, (BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos); } if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) { resultType = symTable.semanticError; return; } bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol; bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI; } private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix, BPackageSymbol pkgSymbol, Location pos) { BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env, names.fromString(localname), SymTag.CONSTANT); if (constSymbol == symTable.notFoundSymbol) { if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) { dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname); } return null; } BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol; if (constantSymbol.literalType.tag != TypeTags.STRING) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType); return null; } String constVal = (String) constantSymbol.value.value; int s = constVal.indexOf('{'); int e = constVal.lastIndexOf('}'); if (e > s + 1) { pkgSymbol.isUsed = true; String nsURI = constVal.substring(s + 1, e); String local = constVal.substring(e); return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos, SOURCE); } dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname); return null; } public void visit(BLangXMLAttribute bLangXMLAttribute) { SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env); BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name; checkExpr(name, xmlAttributeEnv, symTable.stringType); if (name.prefix.value.isEmpty()) { name.namespaceURI = null; } checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType); symbolEnter.defineNode(bLangXMLAttribute, env); } public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) { SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env); Set<String> usedPrefixes = new HashSet<>(); BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix; if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) { usedPrefixes.add(elemNamePrefix.value); } for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) { if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) { BLangXMLQuotedString value = attribute.value; if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) { dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION); } checkExpr(attribute, xmlElementEnv, symTable.noType); } BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix; if (prefix != null && !prefix.value.isEmpty()) { usedPrefixes.add(prefix.value); } } bLangXMLElementLiteral.attributes.forEach(attribute -> { if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) { checkExpr(attribute, xmlElementEnv, symTable.noType); } }); Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv); Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX); if (namespaces.containsKey(defaultNs)) { bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs); } for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) { if (usedPrefixes.contains(nsEntry.getKey().value)) { bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue()); } } validateTags(bLangXMLElementLiteral, xmlElementEnv); bLangXMLElementLiteral.modifiedChildren = concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType, this.expType); if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) { markChildrenAsImmutable(bLangXMLElementLiteral); } } private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) { BLangXMLQName attrName = (BLangXMLQName) attribute.name; return (attrName.prefix.value.isEmpty() && attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) || attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE); } public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) { if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) { return symTable.xmlElementType; } if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) { return symTable.xmlTextType; } if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) { return symTable.xmlPIType; } return symTable.xmlCommentType; } public void muteErrorLog() { this.nonErrorLoggingCheck = true; this.dlog.mute(); } public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) { this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } } public BType getXMLSequenceType(BType xmlSubType) { switch (xmlSubType.tag) { case TypeTags.XML_ELEMENT: return new BXMLType(symTable.xmlElementType, null); case TypeTags.XML_COMMENT: return new BXMLType(symTable.xmlCommentType, null); case TypeTags.XML_PI: return new BXMLType(symTable.xmlPIType, null); default: return symTable.xmlTextType; } } public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT && expType != symTable.noType) { dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, "XML Sequence"); resultType = symTable.semanticError; return; } List<BType> xmlTypesInSequence = new ArrayList<>(); for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) { resultType = checkExpr(expressionItem, env, expType); if (!xmlTypesInSequence.contains(resultType)) { xmlTypesInSequence.add(resultType); } } if (expType.tag == TypeTags.XML || expType == symTable.noType) { if (xmlTypesInSequence.size() == 1) { resultType = getXMLSequenceType(xmlTypesInSequence.get(0)); return; } resultType = symTable.xmlType; return; } if (expType.tag == TypeTags.XML_TEXT) { resultType = symTable.xmlTextType; return; } for (BType item : ((BUnionType) expType).getMemberTypes()) { if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) { dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.xmlType); resultType = symTable.semanticError; return; } } resultType = symTable.xmlType; } public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) { List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments; checkStringTemplateExprs(literalValues); BLangExpression xmlExpression = literalValues.get(0); if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL && ((String) ((BLangLiteral) xmlExpression).value).isEmpty()) { resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType); return; } resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType); } public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) { checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType, this.expType); } public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) { checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType); checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType); } public void visit(BLangXMLQuotedString bLangXMLQuotedString) { checkStringTemplateExprs(bLangXMLQuotedString.textFragments); resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType); } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { dlog.error(xmlAttributeAccessExpr.pos, DiagnosticErrorCode.DEPRECATED_XML_ATTRIBUTE_ACCESS); resultType = symTable.semanticError; } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { checkStringTemplateExprs(stringTemplateLiteral.exprs); resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType); if (type == symTable.semanticError) { resultType = type; return; } BObjectType literalType = (BObjectType) type; BType stringsType = literalType.fields.get("strings").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } BType insertionsType = literalType.fields.get("insertions").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } resultType = type; } private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) { if (expType == symTable.noType || containsAnyType(expType)) { return symTable.rawTemplateType; } BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos); BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE); if (type == symTable.semanticError) { return type; } if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type); return symTable.semanticError; } BObjectType litObjType = (BObjectType) type; BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol; if (litObjType.fields.size() > 2) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType); type = symTable.semanticError; } if (!objTSymbol.attachedFuncs.isEmpty()) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType); type = symTable.semanticError; } return type; } private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType, DiagnosticCode code, Location pos) { BType listType = fieldType.tag != TypeTags.INTERSECTION ? fieldType : ((BIntersectionType) fieldType).effectiveType; boolean errored = false; if (listType.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) listType; if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) { dlog.error(pos, code, arrayType.size, exprs.size()); return false; } for (BLangExpression expr : exprs) { errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored; } } else if (listType.tag == TypeTags.TUPLE) { BTupleType tupleType = (BTupleType) listType; final int size = exprs.size(); final int requiredItems = tupleType.tupleTypes.size(); if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) { dlog.error(pos, code, requiredItems, size); return false; } int i; List<BType> memberTypes = tupleType.tupleTypes; for (i = 0; i < requiredItems; i++) { errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored; } if (size > requiredItems) { for (; i < size; i++) { errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored; } } } else { throw new IllegalStateException("Expected a list type, but found: " + listType); } return errored; } private boolean containsAnyType(BType type) { if (type == symTable.anyType) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().contains(symTable.anyType); } return false; } private BType getCompatibleRawTemplateType(BType expType, Location pos) { if (expType.tag != TypeTags.UNION) { return expType; } BUnionType unionType = (BUnionType) expType; List<BType> compatibleTypes = new ArrayList<>(); for (BType type : unionType.getMemberTypes()) { if (types.isAssignable(type, symTable.rawTemplateType)) { compatibleTypes.add(type); } } if (compatibleTypes.size() == 0) { return expType; } if (compatibleTypes.size() > 1) { dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType, expType); return symTable.semanticError; } return compatibleTypes.get(0); } @Override public void visit(BLangIntRangeExpression intRangeExpression) { checkExpr(intRangeExpression.startExpr, env, symTable.intType); checkExpr(intRangeExpression.endExpr, env, symTable.intType); resultType = new BArrayType(symTable.intType); } @Override public void visit(BLangRestArgsExpression bLangRestArgExpression) { resultType = checkExpr(bLangRestArgExpression.expr, env, expType); } @Override public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) { if (expType.tag != TypeTags.TYPEDESC) { dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc); resultType = symTable.semanticError; return; } resultType = expType; } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType); bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType()); } @Override public void visit(BLangMatchExpression bLangMatchExpression) { SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env); checkExpr(bLangMatchExpression.expr, matchExprEnv); bLangMatchExpression.patternClauses.forEach(pattern -> { if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) { symbolEnter.defineNode(pattern.variable, matchExprEnv); } checkExpr(pattern.expr, matchExprEnv, expType); pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv)); }); LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression); BType actualType; if (matchExprTypes.contains(symTable.semanticError)) { actualType = symTable.semanticError; } else if (matchExprTypes.size() == 1) { actualType = matchExprTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, matchExprTypes); } resultType = types.checkType(bLangMatchExpression, actualType, expType); } @Override public void visit(BLangCheckedExpr checkedExpr) { checkWithinQueryExpr = isWithinQuery(); visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangQueryExpr queryExpr) { boolean cleanPrevEnvs = false; if (prevEnvs.empty()) { prevEnvs.push(env); cleanPrevEnvs = true; } if (breakToParallelQueryEnv) { queryEnvs.push(prevEnvs.peek()); } else { queryEnvs.push(env); } queryFinalClauses.push(queryExpr.getSelectClause()); List<BLangNode> clauses = queryExpr.getQueryClauses(); BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection(); clauses.forEach(clause -> clause.accept(this)); BType actualType = resolveQueryType(queryEnvs.peek(), ((BLangSelectClause) queryFinalClauses.peek()).expression, collectionNode.getBType(), expType, queryExpr); actualType = (actualType == symTable.semanticError) ? actualType : types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); queryFinalClauses.pop(); queryEnvs.pop(); if (cleanPrevEnvs) { prevEnvs.pop(); } if (actualType.tag == TypeTags.TABLE) { BTableType tableType = (BTableType) actualType; tableType.constraintPos = queryExpr.pos; tableType.isTypeInlineDefined = true; if (!validateTableType(tableType)) { resultType = symTable.semanticError; return; } } checkWithinQueryExpr = false; resultType = actualType; } private boolean isWithinQuery() { return !queryEnvs.isEmpty() && !queryFinalClauses.isEmpty(); } private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType, BType targetType, BLangQueryExpr queryExpr) { List<BType> resultTypes = types.getAllTypes(targetType).stream() .filter(t -> !types.isAssignable(t, symTable.errorType)) .filter(t -> !types.isAssignable(t, symTable.nilType)) .collect(Collectors.toList()); if (resultTypes.isEmpty()) { resultTypes.add(symTable.noType); } BType actualType = symTable.semanticError; List<BType> selectTypes = new ArrayList<>(); List<BType> resolvedTypes = new ArrayList<>(); BType selectType, resolvedType; for (BType type : resultTypes) { switch (type.tag) { case TypeTags.ARRAY: selectType = checkExpr(selectExp, env, ((BArrayType) type).eType); resolvedType = new BArrayType(selectType); break; case TypeTags.TABLE: selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint, true, true)); resolvedType = symTable.tableType; break; case TypeTags.STREAM: selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint, true, true)); resolvedType = symTable.streamType; break; case TypeTags.STRING: case TypeTags.XML: selectType = checkExpr(selectExp, env, type); resolvedType = selectType; break; case TypeTags.NONE: default: selectType = checkExpr(selectExp, env, type); resolvedType = getNonContextualQueryType(selectType, collectionType); break; } if (selectType != symTable.semanticError) { if (resolvedType.tag == TypeTags.STREAM) { queryExpr.isStream = true; } if (resolvedType.tag == TypeTags.TABLE) { queryExpr.isTable = true; } selectTypes.add(selectType); resolvedTypes.add(resolvedType); } } if (selectTypes.size() == 1) { BType errorType = getErrorType(collectionType, queryExpr); selectType = selectTypes.get(0); if (queryExpr.isStream) { return new BStreamType(TypeTags.STREAM, selectType, errorType, null); } else if (queryExpr.isTable) { actualType = getQueryTableType(queryExpr, selectType); } else { actualType = resolvedTypes.get(0); } if (errorType != null && errorType.tag != TypeTags.NIL) { return BUnionType.create(null, actualType, errorType); } else { return actualType; } } else if (selectTypes.size() > 1) { dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes); return actualType; } else { return actualType; } } private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) { final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null); if (!queryExpr.fieldNameIdentifierList.isEmpty()) { validateKeySpecifier(queryExpr.fieldNameIdentifierList, constraintType); markReadOnlyForConstraintType(constraintType); tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream() .map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList()); return BUnionType.create(null, tableType, symTable.errorType); } return tableType; } private void validateKeySpecifier(List<IdentifierNode> fieldList, BType constraintType) { for (IdentifierNode identifier : fieldList) { BField field = types.getTableConstraintField(constraintType, identifier.getValue()); if (field == null) { dlog.error(identifier.getPosition(), DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, identifier.getValue(), constraintType); } else if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { field.symbol.flags |= Flags.READONLY; } } } private void markReadOnlyForConstraintType(BType constraintType) { if (constraintType.tag != TypeTags.RECORD) { return; } BRecordType recordType = (BRecordType) constraintType; for (BField field : recordType.fields.values()) { if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { return; } } if (recordType.sealed) { recordType.flags |= Flags.READONLY; recordType.tsymbol.flags |= Flags.READONLY; } } private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr) { if (collectionType.tag == TypeTags.SEMANTIC_ERROR) { return null; } BType returnType = null, errorType = null; switch (collectionType.tag) { case TypeTags.STREAM: errorType = ((BStreamType) collectionType).completionType; break; case TypeTags.OBJECT: returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType); break; default: BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); if (itrSymbol == this.symTable.notFoundSymbol) { return null; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol; returnType = types.getResultTypeOfNextInvocation((BObjectType) invokableSymbol.retType); } List<BType> errorTypes = new ArrayList<>(); if (returnType != null) { types.getAllTypes(returnType).stream() .filter(t -> types.isAssignable(t, symTable.errorType)) .forEach(errorTypes::add); } if (checkWithinQueryExpr && queryExpr.isStream) { if (errorTypes.isEmpty()) { errorTypes.add(symTable.nilType); } errorTypes.add(symTable.errorType); } if (!errorTypes.isEmpty()) { if (errorTypes.size() == 1) { errorType = errorTypes.get(0); } else { errorType = BUnionType.create(null, errorTypes.toArray(new BType[0])); } } return errorType; } private BType getNonContextualQueryType(BType staticType, BType basicType) { BType resultType; switch (basicType.tag) { case TypeTags.TABLE: resultType = symTable.tableType; break; case TypeTags.STREAM: resultType = symTable.streamType; break; case TypeTags.XML: resultType = new BXMLType(staticType, null); break; case TypeTags.STRING: resultType = symTable.stringType; break; default: resultType = new BArrayType(staticType); break; } return resultType; } @Override public void visit(BLangQueryAction queryAction) { if (prevEnvs.empty()) { prevEnvs.push(env); } else { prevEnvs.push(prevEnvs.peek()); } queryEnvs.push(prevEnvs.peek()); BLangDoClause doClause = queryAction.getDoClause(); queryFinalClauses.push(doClause); List<BLangNode> clauses = queryAction.getQueryClauses(); clauses.forEach(clause -> clause.accept(this)); semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek())); BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); queryFinalClauses.pop(); queryEnvs.pop(); prevEnvs.pop(); } @Override public void visit(BLangFromClause fromClause) { boolean prevBreakToParallelEnv = this.breakToParallelQueryEnv; this.breakToParallelQueryEnv = true; SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop()); fromClause.env = fromEnv; queryEnvs.push(fromEnv); checkExpr(fromClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(fromClause); handleInputClauseVariables(fromClause, queryEnvs.peek()); this.breakToParallelQueryEnv = prevBreakToParallelEnv; } @Override public void visit(BLangJoinClause joinClause) { boolean prevBreakEnv = this.breakToParallelQueryEnv; this.breakToParallelQueryEnv = true; SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop()); joinClause.env = joinEnv; queryEnvs.push(joinEnv); checkExpr(joinClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(joinClause); handleInputClauseVariables(joinClause, queryEnvs.peek()); if (joinClause.onClause != null) { ((BLangOnClause) joinClause.onClause).accept(this); } this.breakToParallelQueryEnv = prevBreakEnv; } @Override public void visit(BLangLetClause letClause) { SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop()); letClause.env = letEnv; queryEnvs.push(letEnv); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv); } } @Override public void visit(BLangWhereClause whereClause) { whereClause.env = handleFilterClauses(whereClause.expression); } @Override public void visit(BLangSelectClause selectClause) { SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop()); selectClause.env = selectEnv; queryEnvs.push(selectEnv); } @Override public void visit(BLangDoClause doClause) { SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop()); doClause.env = letEnv; queryEnvs.push(letEnv); } @Override public void visit(BLangOnConflictClause onConflictClause) { BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType); if (!types.isAssignable(exprType, symTable.errorType)) { dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, symTable.errorType, exprType); } } @Override public void visit(BLangLimitClause limitClause) { BType exprType = checkExpr(limitClause.expression, queryEnvs.peek()); if (!types.isAssignable(exprType, symTable.intType)) { dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType, exprType); } } @Override public void visit(BLangOnClause onClause) { BType lhsType, rhsType; BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek()); onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode); lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv); onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode); rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek()); if (!types.isAssignable(lhsType, rhsType)) { dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType); } } @Override public void visit(BLangOrderByClause orderByClause) { orderByClause.env = queryEnvs.peek(); for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) { BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env); if (!types.isOrderedType(exprType, false)) { dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED); } } } @Override public void visit(BLangDo doNode) { if (doNode.onFailClause != null) { doNode.onFailClause.accept(this); } } public void visit(BLangOnFailClause onFailClause) { onFailClause.body.stmts.forEach(stmt -> stmt.accept(this)); } private SymbolEnv handleFilterClauses (BLangExpression filterExpression) { checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType); BType actualType = filterExpression.getBType(); if (TypeTags.TUPLE == actualType.tag) { dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.booleanType, actualType); } SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, queryFinalClauses.peek(), queryEnvs.pop()); queryEnvs.push(filterEnv); return filterEnv; } private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) { if (bLangInputClause.variableDefinitionNode == null) { return; } BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable(); if (bLangInputClause.isDeclaredWithVar) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv); if (types.isAssignable(bLangInputClause.varType, typeNodeType)) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } if (typeNodeType != symTable.semanticError) { dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bLangInputClause.varType, typeNodeType); } semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) { String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic"; BLangExpression exprWithCheckingKeyword = checkedExpr.expr; boolean firstVisit = exprWithCheckingKeyword.getBType() == null; BType checkExprCandidateType; if (expType == symTable.noType) { checkExprCandidateType = symTable.noType; } else { BType exprType = getCandidateType(checkedExpr, expType); if (exprType == symTable.semanticError) { checkExprCandidateType = BUnionType.create(null, expType, symTable.errorType); } else { checkExprCandidateType = addDefaultErrorIfNoErrorComponentFound(expType); } } if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(expType)) { rewriteWithEnsureTypeFunc(checkedExpr, checkExprCandidateType); } BType exprType = checkExpr(checkedExpr.expr, env, checkExprCandidateType); if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = checkedExpr.getBType(); exprType = checkedExpr.expr.getBType(); } } boolean isErrorType = types.isAssignable(exprType, symTable.errorType); if (exprType.tag != TypeTags.UNION && !isErrorType) { if (exprType.tag == TypeTags.READONLY) { checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{ add(symTable.errorType); }}; resultType = symTable.anyAndReadonly; return; } else if (exprType != symTable.semanticError) { dlog.error(checkedExpr.expr.pos, DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); } checkedExpr.setBType(symTable.semanticError); return; } List<BType> errorTypes = new ArrayList<>(); List<BType> nonErrorTypes = new ArrayList<>(); if (!isErrorType) { for (BType memberType : ((BUnionType) exprType).getMemberTypes()) { if (memberType.tag == TypeTags.READONLY) { errorTypes.add(symTable.errorType); nonErrorTypes.add(symTable.anyAndReadonly); continue; } if (types.isAssignable(memberType, symTable.errorType)) { errorTypes.add(memberType); continue; } nonErrorTypes.add(memberType); } } else { errorTypes.add(exprType); } checkedExpr.equivalentErrorTypeList = errorTypes; if (errorTypes.isEmpty()) { dlog.error(checkedExpr.expr.pos, DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); checkedExpr.setBType(symTable.semanticError); return; } BType actualType; if (nonErrorTypes.size() == 0) { actualType = symTable.neverType; } else if (nonErrorTypes.size() == 1) { actualType = nonErrorTypes.get(0); } else { actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes)); } resultType = types.checkType(checkedExpr, actualType, expType); } private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type) { BType rhsType = getCandidateType(checkedExpr, type); if (rhsType == symTable.semanticError) { rhsType = getCandidateType(checkedExpr, rhsType); } BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType); if (!types.isLax(candidateLaxType)) { return; } ArrayList<BLangExpression> argExprs = new ArrayList<>(); BType typedescType = new BTypedescType(expType, null); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = expType; typedescExpr.setBType(typedescType); argExprs.add(typedescExpr); BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE, argExprs, checkedExpr.expr, checkedExpr.pos); invocation.symbol = symResolver.lookupLangLibMethod(type, names.fromString(invocation.name.value)); invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); checkedExpr.expr = invocation; } private BType getCandidateLaxType(BLangNode expr, BType rhsType) { if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { return types.getSafeType(rhsType, false, true); } return rhsType; } private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); checkedExpr.expr.cloneAttempt++; BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr); BType rhsType; if (checkExprCandidateType == symTable.semanticError) { rhsType = checkExpr(clone, env); } else { rhsType = checkExpr(clone, env, checkExprCandidateType); } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } return rhsType; } private BType addDefaultErrorIfNoErrorComponentFound(BType type) { for (BType t : types.getAllTypes(type)) { if (types.isAssignable(t, symTable.errorType)) { return type; } } return BUnionType.create(null, type, symTable.errorType); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { resultType = serviceConstructorExpr.serviceNode.symbol.type; } @Override public void visit(BLangTypeTestExpr typeTestExpr) { typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env)); checkExpr(typeTestExpr.expr, env); resultType = types.checkType(typeTestExpr, symTable.booleanType, expType); } public void visit(BLangAnnotAccessExpr annotAccessExpr) { checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc); BType actualType = symTable.semanticError; BSymbol symbol = this.symResolver.resolveAnnotation(annotAccessExpr.pos, env, names.fromString(annotAccessExpr.pkgAlias.getValue()), names.fromString(annotAccessExpr.annotationName.getValue())); if (symbol == this.symTable.notFoundSymbol) { this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION, annotAccessExpr.annotationName.getValue()); } else { annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol; BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType : ((BAnnotationSymbol) symbol).attachedType.type; actualType = BUnionType.create(null, annotType, symTable.nilType); } this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType); } private boolean isValidVariableReference(BLangExpression varRef) { switch (varRef.getKind()) { case SIMPLE_VARIABLE_REF: case RECORD_VARIABLE_REF: case TUPLE_VARIABLE_REF: case ERROR_VARIABLE_REF: case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case XML_ATTRIBUTE_ACCESS_EXPR: return true; default: dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType()); return false; } } private BType getEffectiveReadOnlyType(Location pos, BType origTargetType) { if (origTargetType == symTable.readonlyType) { if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } if (origTargetType.tag != TypeTags.UNION) { return origTargetType; } boolean hasReadOnlyType = false; LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>(); for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) { if (memberType == symTable.readonlyType) { hasReadOnlyType = true; continue; } nonReadOnlyTypes.add(memberType); } if (!hasReadOnlyType) { return origTargetType; } if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes); nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>())); return nonReadOnlyUnion; } private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) { SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env); bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv)); return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType); } private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) { if (paramTypes.size() != bLangArrowFunction.params.size()) { dlog.error(bLangArrowFunction.pos, DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH, paramTypes.size(), bLangArrowFunction.params.size()); resultType = symTable.semanticError; bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError)); return; } for (int i = 0; i < bLangArrowFunction.params.size(); i++) { BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i); BType bType = paramTypes.get(i); BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); valueTypeNode.setTypeKind(bType.getKind()); valueTypeNode.pos = symTable.builtinPos; paramIdentifier.setTypeNode(valueTypeNode); paramIdentifier.setBType(bType); } } public void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) { if (env.enclVarSym == varSymbol) { dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name); } } public List<BType> getListWithErrorTypes(int count) { List<BType> list = new ArrayList<>(count); for (int i = 0; i < count; i++) { list.add(symTable.semanticError); } return list; } private void checkFunctionInvocationExpr(BLangInvocation iExpr) { Name funcName = names.fromIdNode(iExpr.name); Name pkgAlias = names.fromIdNode(iExpr.pkgAlias); BSymbol funcSymbol = symTable.notFoundSymbol; BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr)); if (pkgSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias); } else { if (funcSymbol == symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { funcSymbol = symbol; } if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) && (symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) { funcSymbol = symbol; } } if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) { BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol; } } if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) { if (!missingNodesHelper.isMissingNode(funcName)) { dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName); } iExpr.argExprs.forEach(arg -> checkExpr(arg, env)); resultType = symTable.semanticError; return; } if (isFunctionPointer(funcSymbol)) { iExpr.functionPointerInvocation = true; markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID); if (langLibPackageID) { this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); } iExpr.symbol = funcSymbol; checkInvocationParamAndReturnType(iExpr); if (langLibPackageID && !iExpr.argExprs.isEmpty()) { checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol); } } protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) { BLangInvokableNode encInvokable = env.enclInvokable; if (symbol.closure || (symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE && env.node.getKind() != NodeKind.ARROW_EXPR && env.node.getKind() != NodeKind.EXPR_FUNCTION_BODY && encInvokable != null && !encInvokable.flagSet.contains(Flag.LAMBDA)) { return; } if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) && !isFunctionArgument(symbol, encInvokable.requiredParams)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.node.getKind() == NodeKind.ARROW_EXPR && !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; ((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && encInvokable != null && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } } private boolean isNotFunction(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION || (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) { return false; } if (isFunctionPointer(funcSymbol)) { return false; } return true; } private boolean isFunctionPointer(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) { return false; } return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE && funcSymbol.kind == SymbolKind.FUNCTION && (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE; } private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr, BType expectedType) { List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) { BType target = getErrorCtorNamedArgTargetType(namedArgsExpression, expectedType); BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression); BType type = checkExpr(clone, env, target); if (type == symTable.semanticError) { checkExpr(namedArgsExpression, env); } else { checkExpr(namedArgsExpression, env, target); } namedArgs.add(namedArgsExpression); } return namedArgs; } private BType getErrorCtorNamedArgTargetType(BLangNamedArgsExpression namedArgsExpression, BType expectedType) { if (expectedType == symTable.semanticError) { return symTable.semanticError; } if (expectedType.tag == TypeTags.MAP) { return ((BMapType) expectedType).constraint; } if (expectedType.tag != TypeTags.RECORD) { return symTable.semanticError; } BRecordType recordType = (BRecordType) expectedType; BField targetField = recordType.fields.get(namedArgsExpression.name.value); if (targetField != null) { return targetField.type; } if (!recordType.sealed && !recordType.fields.isEmpty()) { dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name, recordType); } return recordType.sealed ? symTable.noType : recordType.restFieldType; } private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) { if (objectType.getKind() == TypeKind.SERVICE && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION); return; } Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value)); BSymbol funcSymbol = symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol); if (funcSymbol == symTable.notFoundSymbol) { BSymbol invocableField = symResolver.resolveInvocableObjectField( iExpr.pos, env, names.fromIdNode(iExpr.name), (BObjectTypeSymbol) objectType.tsymbol); if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) { funcSymbol = invocableField; iExpr.functionPointerInvocation = true; } } if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) { if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value, objectType); resultType = symTable.semanticError; return; } } else { iExpr.symbol = funcSymbol; } if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } checkInvocationParamAndReturnType(iExpr); } private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) { if (checkInvalidActionInvocation(aInv)) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, aInv.expr.getBType()); this.resultType = symTable.semanticError; aInv.symbol = symTable.notFoundSymbol; return; } Name remoteMethodQName = names .fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value)); Name actionName = names.fromIdNode(aInv.name); BSymbol remoteFuncSymbol = symResolver .resolveObjectMethod(aInv.pos, env, remoteMethodQName, (BObjectTypeSymbol) expType.tsymbol); if (remoteFuncSymbol == symTable.notFoundSymbol) { BSymbol invocableField = symResolver.resolveInvocableObjectField( aInv.pos, env, names.fromIdNode(aInv.name), (BObjectTypeSymbol) expType.tsymbol); if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) { remoteFuncSymbol = invocableField; aInv.functionPointerInvocation = true; } } if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) { dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType); resultType = symTable.semanticError; return; } if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName); this.resultType = symTable.semanticError; return; } if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && Symbols.isFlagOn(expType.flags, Flags.CLIENT) && types.isNeverTypeOrStructureTypeWithARequiredNeverMember ((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL); } aInv.symbol = remoteFuncSymbol; checkInvocationParamAndReturnType(aInv); } private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) { return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol; } private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) { Name funcName = names.fromString(iExpr.name.value); BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName); if (funcSymbol == symTable.notFoundSymbol) { return symTable.notFoundSymbol; } iExpr.symbol = funcSymbol; iExpr.langLibInvocation = true; SymbolEnv enclEnv = this.env; this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); iExpr.argExprs.add(0, iExpr.expr); checkInvocationParamAndReturnType(iExpr); this.env = enclEnv; return funcSymbol; } private void checkInvocationParamAndReturnType(BLangInvocation iExpr) { BType actualType = checkInvocationParam(iExpr); resultType = types.checkType(iExpr, actualType, this.expType); } private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams, Set<String> requiredParamNames) { if (openIncRecordParams.size() != 1) { return null; } LinkedHashMap<String, BField> fields = ((BRecordType) openIncRecordParams.get(0).type).fields; for (String paramName : requiredParamNames) { if (!fields.containsKey(paramName)) { return null; } } return openIncRecordParams.get(0); } private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol, List<BVarSymbol> incRecordParams) { Set<String> requiredParamNames = new HashSet<>(); List<BVarSymbol> openIncRecordParams = new ArrayList<>(); for (BVarSymbol paramSymbol : invokableSymbol.params) { if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) && paramSymbol.type.getKind() == TypeKind.RECORD) { boolean recordWithDisallowFieldsOnly = true; LinkedHashMap<String, BField> fields = ((BRecordType) paramSymbol.type).fields; for (String fieldName : fields.keySet()) { BField field = fields.get(fieldName); if (field.symbol.type.tag != TypeTags.NEVER) { recordWithDisallowFieldsOnly = false; incRecordParams.add(field.symbol); requiredParamNames.add(fieldName); } } if (recordWithDisallowFieldsOnly && ((BRecordType) paramSymbol.type).restFieldType != symTable.noType) { openIncRecordParams.add(paramSymbol); } } else { requiredParamNames.add(paramSymbol.name.value); } } return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames); } private BType checkInvocationParam(BLangInvocation iExpr) { if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE); return symTable.semanticError; } if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type); return symTable.noType; } BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol); List<BType> paramTypes = ((BInvokableType) invokableSymbol.type).getParameterTypes(); List<BVarSymbol> incRecordParams = new ArrayList<>(); BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol, incRecordParams); int parameterCountForPositionalArgs = paramTypes.size(); int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size(); iExpr.requiredArgs = new ArrayList<>(); for (BVarSymbol symbol : invokableSymbol.params) { if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) || symbol.type.tag != TypeTags.RECORD) { continue; } LinkedHashMap<String, BField> fields = ((BRecordType) symbol.type).fields; if (fields.isEmpty()) { continue; } for (String field : fields.keySet()) { if (fields.get(field).type.tag != TypeTags.NEVER) { parameterCountForNamedArgs = parameterCountForNamedArgs - 1; break; } } } int i = 0; BLangExpression vararg = null; boolean foundNamedArg = false; for (BLangExpression expr : iExpr.argExprs) { switch (expr.getKind()) { case NAMED_ARGS_EXPR: foundNamedArg = true; if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) { iExpr.requiredArgs.add(expr); } else { dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); } i++; break; case REST_ARGS_EXPR: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG); continue; } vararg = expr; break; default: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG); } if (i < parameterCountForPositionalArgs) { iExpr.requiredArgs.add(expr); } else { iExpr.restArgs.add(expr); } i++; break; } } return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams, incRecordParamAllowAdditionalFields); } private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg, List<BVarSymbol> incRecordParams, BVarSymbol incRecordParamAllowAdditionalFields) { BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol; BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type; BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol; List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params); List<BLangExpression> nonRestArgs = iExpr.requiredArgs; List<BVarSymbol> valueProvidedParams = new ArrayList<>(); List<BVarSymbol> requiredParams = new ArrayList<>(); List<BVarSymbol> requiredIncRecordParams = new ArrayList<>(); for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.isDefaultable) { continue; } requiredParams.add(nonRestParam); } for (BVarSymbol incRecordParam : incRecordParams) { if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) { requiredIncRecordParams.add(incRecordParam); } } int i = 0; for (; i < nonRestArgs.size(); i++) { BLangExpression arg = nonRestArgs.get(i); if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) { BType expectedType = paramTypes.get(i); BType actualType = arg.getBType(); if (expectedType == symTable.charStringType) { arg.cloneAttempt++; BLangExpression clonedArg = nodeCloner.cloneNode(arg); BType argType = checkExprSilent(clonedArg, expectedType, env); if (argType != symTable.semanticError) { actualType = argType; } } types.checkType(arg.pos, actualType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); types.setImplicitCastExpr(arg, arg.getBType(), expectedType); } if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) { if (i < nonRestParams.size()) { BVarSymbol param = nonRestParams.get(i); checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation); valueProvidedParams.add(param); requiredParams.remove(param); continue; } break; } if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) { BLangIdentifier argName = ((NamedArgNode) arg).getName(); BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr, nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields); if (varSym == null) { dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName); break; } requiredParams.remove(varSym); requiredIncRecordParams.remove(varSym); if (valueProvidedParams.contains(varSym)) { dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value); continue; } checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation); valueProvidedParams.add(varSym); } } BVarSymbol restParam = invokableTypeSymbol.restParam; boolean errored = false; if (!requiredParams.isEmpty() && vararg == null) { for (BVarSymbol requiredParam : requiredParams) { if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) { dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name, iExpr.name.value); errored = true; } } } if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) { for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) { for (BVarSymbol requiredParam : requiredParams) { if (requiredParam.type == requiredIncRecordParam.owner.type) { dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredIncRecordParam.name, iExpr.name.value); errored = true; } } } } if (restParam == null && (!iExpr.restArgs.isEmpty() || (vararg != null && valueProvidedParams.size() == nonRestParams.size()))) { dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); errored = true; } if (errored) { return symTable.semanticError; } BType listTypeRestArg = restParam == null ? null : restParam.type; BRecordType mappingTypeRestArg = null; if (vararg != null && nonRestArgs.size() < nonRestParams.size()) { PackageID pkgID = env.enclPkg.symbol.pkgID; List<BType> tupleMemberTypes = new ArrayList<>(); BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL); mappingTypeRestArg = new BRecordType(recordSymbol); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); BType tupleRestType = null; BVarSymbol fieldSymbol; for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) { BType paramType = paramTypes.get(j); BVarSymbol nonRestParam = nonRestParams.get(j); Name paramName = nonRestParam.name; tupleMemberTypes.add(paramType); boolean required = requiredParams.contains(nonRestParam); fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName, nonRestParam.getOriginalName(), pkgID, paramType, recordSymbol, symTable.builtinPos, VIRTUAL); fields.put(paramName.value, new BField(paramName, null, fieldSymbol)); } if (listTypeRestArg != null) { if (listTypeRestArg.tag == TypeTags.ARRAY) { tupleRestType = ((BArrayType) listTypeRestArg).eType; } else if (listTypeRestArg.tag == TypeTags.TUPLE) { BTupleType restTupleType = (BTupleType) listTypeRestArg; tupleMemberTypes.addAll(restTupleType.tupleTypes); if (restTupleType.restType != null) { tupleRestType = restTupleType.restType; } } } BTupleType tupleType = new BTupleType(tupleMemberTypes); tupleType.restType = tupleRestType; listTypeRestArg = tupleType; mappingTypeRestArg.sealed = true; mappingTypeRestArg.restFieldType = symTable.noType; mappingTypeRestArg.fields = fields; recordSymbol.type = mappingTypeRestArg; mappingTypeRestArg.tsymbol = recordSymbol; } if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) { dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); return symTable.semanticError; } BType restType = null; if (vararg != null && !iExpr.restArgs.isEmpty()) { BType elementType = ((BArrayType) listTypeRestArg).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); } checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation); iExpr.restArgs.add(vararg); restType = this.resultType; } else if (vararg != null) { iExpr.restArgs.add(vararg); if (mappingTypeRestArg != null) { LinkedHashSet<BType> restTypes = new LinkedHashSet<>(); restTypes.add(listTypeRestArg); restTypes.add(mappingTypeRestArg); BType actualType = BUnionType.create(null, restTypes); checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation); } else { checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation); } restType = this.resultType; } else if (!iExpr.restArgs.isEmpty()) { if (listTypeRestArg.tag == TypeTags.ARRAY) { BType elementType = ((BArrayType) listTypeRestArg).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); if (restType != symTable.semanticError && this.resultType == symTable.semanticError) { restType = this.resultType; } } } else { BTupleType tupleType = (BTupleType) listTypeRestArg; List<BType> tupleMemberTypes = tupleType.tupleTypes; BType tupleRestType = tupleType.restType; int tupleMemCount = tupleMemberTypes.size(); for (int j = 0; j < iExpr.restArgs.size(); j++) { BLangExpression restArg = iExpr.restArgs.get(j); BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType; checkTypeParamExpr(restArg, this.env, memType, true); if (restType != symTable.semanticError && this.resultType == symTable.semanticError) { restType = this.resultType; } } } } BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType()); if (restType != symTable.semanticError && Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) && Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) { retType = unifier.build(retType, expType, iExpr, types, symTable, dlog); } boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID); String sortFuncName = "sort"; if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) { checkArrayLibSortFuncArgs(iExpr); } if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) { return this.generateFutureType(invokableSymbol, retType); } else { return retType; } } private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) { if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).getBType()); } if (iExpr.argExprs.size() != 3) { return; } BLangExpression keyFunction = iExpr.argExprs.get(2); BType keyFunctionType = keyFunction.getBType(); if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (keyFunctionType.tag == TypeTags.NIL) { if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).getBType()); } return; } Location pos; BType returnType; if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { pos = keyFunction.pos; returnType = keyFunction.getBType().getReturnType(); } else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) { BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction); pos = arrowFunction.body.expr.pos; returnType = arrowFunction.body.expr.getBType(); if (returnType.tag == TypeTags.SEMANTIC_ERROR) { return; } } else { BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction; pos = keyLambdaFunction.function.pos; returnType = keyLambdaFunction.function.getBType().getReturnType(); } if (!types.isOrderedType(returnType, false)) { dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType); } } private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr, List<BVarSymbol> nonRestParams, List<BVarSymbol> incRecordParams, BVarSymbol incRecordParamAllowAdditionalFields) { for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.getName().value.equals(argName.value)) { return nonRestParam; } } for (BVarSymbol incRecordParam : incRecordParams) { if (incRecordParam.getName().value.equals(argName.value)) { return incRecordParam; } } if (incRecordParamAllowAdditionalFields != null) { BRecordType incRecordType = (BRecordType) incRecordParamAllowAdditionalFields.type; checkExpr(expr, env, incRecordType.restFieldType); if (!incRecordType.fields.containsKey(argName.value)) { return new BVarSymbol(0, names.fromIdNode(argName), names.originalNameFromIdNode(argName), null, symTable.noType, null, argName.pos, VIRTUAL); } } return null; } private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) { boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX); return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart); } private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral); } private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { if (typeParamAnalyzer.notRequireTypeParams(env)) { checkExpr(arg, env, expectedType); return; } if (requireTypeInference(arg, inferTypeForNumericLiteral)) { BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env); BType inferredType = checkExpr(arg, env, expType); typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType); return; } checkExpr(arg, env, expectedType); typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType); } private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) { switch (expr.getKind()) { case GROUP_EXPR: return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral); case ARROW_EXPR: case LIST_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: return true; case ELVIS_EXPR: case TERNARY_EXPR: case NUMERIC_LITERAL: return inferTypeForNumericLiteral; default: return false; } } private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) { BType fieldType = symTable.semanticError; boolean keyValueField = field.isKeyValueField(); boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP; boolean readOnlyConstructorField = false; String fieldName = null; Location pos = null; BLangExpression valueExpr = null; if (keyValueField) { valueExpr = ((BLangRecordKeyValueField) field).valueExpr; } else if (!spreadOpField) { valueExpr = (BLangRecordVarNameField) field; } switch (mappingType.tag) { case TypeTags.RECORD: if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(key.expr, key.computedKey, (BRecordType) mappingType); fieldType = typeSymbolPair.determinedType; key.fieldSymbol = typeSymbolPair.fieldSymbol; readOnlyConstructorField = keyValField.readonly; pos = key.expr.pos; fieldName = getKeyValueFieldName(keyValField); } else if (spreadOpField) { BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; checkExpr(spreadExpr, this.env); BType spreadExprType = spreadExpr.getBType(); if (spreadExprType.tag == TypeTags.MAP) { return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint, getAllFieldType((BRecordType) mappingType), DiagnosticErrorCode.INCOMPATIBLE_TYPES); } if (spreadExprType.tag != TypeTags.RECORD) { dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadExprType); return symTable.semanticError; } boolean errored = false; for (BField bField : ((BRecordType) spreadExprType).fields.values()) { BType specFieldType = bField.type; BSymbol fieldSymbol = symResolver.resolveStructField(spreadExpr.pos, this.env, bField.name, mappingType.tsymbol); BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, fieldSymbol, bField.name, (BRecordType) mappingType); if (expectedFieldType != symTable.semanticError && !types.isAssignable(specFieldType, expectedFieldType)) { dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD, expectedFieldType, bField.name, specFieldType); if (!errored) { errored = true; } } } return errored ? symTable.semanticError : symTable.noType; } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(varNameField, false, (BRecordType) mappingType); fieldType = typeSymbolPair.determinedType; readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } break; case TypeTags.MAP: if (spreadOpField) { BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; BType spreadOpType = checkExpr(spreadExp, this.env); BType spreadOpMemberType; switch (spreadOpType.tag) { case TypeTags.RECORD: List<BType> types = new ArrayList<>(); BRecordType recordType = (BRecordType) spreadOpType; for (BField recField : recordType.fields.values()) { types.add(recField.type); } if (!recordType.sealed) { types.add(recordType.restFieldType); } spreadOpMemberType = getRepresentativeBroadType(types); break; case TypeTags.MAP: spreadOpMemberType = ((BMapType) spreadOpType).constraint; break; default: dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadOpType); return symTable.semanticError; } return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } boolean validMapKey; if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey); readOnlyConstructorField = keyValField.readonly; pos = key.pos; fieldName = getKeyValueFieldName(keyValField); } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError; break; } if (readOnlyConstructorField) { if (types.isSelectivelyImmutableType(fieldType)) { fieldType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) fieldType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } else if (!types.isInherentlyImmutableType(fieldType)) { dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType); fieldType = symTable.semanticError; } } if (spreadOpField) { valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } BLangExpression exprToCheck = valueExpr; if (this.nonErrorLoggingCheck) { exprToCheck = nodeCloner.cloneNode(valueExpr); } else { ((BLangNode) field).setBType(fieldType); } return checkExpr(exprToCheck, this.env, fieldType); } private TypeSymbolPair checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey, BRecordType recordType) { Name fieldName; if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.getBType() == symTable.semanticError) { return new TypeSymbolPair(null, symTable.semanticError); } LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (recordType.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(recordType.restFieldType); } return new TypeSymbolPair(null, BUnionType.create(null, fieldTypes)); } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr; fieldName = names.fromIdNode(varRef.variableName); } else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) { fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value); } else { dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY); return new TypeSymbolPair(null, symTable.semanticError); } BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env, fieldName, recordType.tsymbol); BType type = checkRecordLiteralKeyByName(keyExpr.pos, fieldSymbol, fieldName, recordType); return new TypeSymbolPair(fieldSymbol instanceof BVarSymbol ? (BVarSymbol) fieldSymbol : null, type); } private BType checkRecordLiteralKeyByName(Location location, BSymbol fieldSymbol, Name key, BRecordType recordType) { if (fieldSymbol != symTable.notFoundSymbol) { return fieldSymbol.type; } if (recordType.sealed) { dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key, recordType.tsymbol.type.getKind().typeName(), recordType); return symTable.semanticError; } return recordType.restFieldType; } private BType getAllFieldType(BRecordType recordType) { LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BField field : recordType.fields.values()) { possibleTypes.add(field.type); } BType restFieldType = recordType.restFieldType; if (restFieldType != null && restFieldType != symTable.noType) { possibleTypes.add(restFieldType); } return BUnionType.create(null, possibleTypes); } private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) { if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.getBType() == symTable.semanticError) { return false; } return true; } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF || (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) { return true; } dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY); return false; } private BType addNilForNillableAccessType(BType actualType) { if (actualType.isNullable()) { return actualType; } return BUnionType.create(null, actualType, symTable.nilType); } private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return symTable.semanticError; } if (recordType.sealed) { return symTable.semanticError; } return recordType.restFieldType; } private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess, Name fieldName, BObjectType objectType) { BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos, this.env, fieldName, objectType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, fieldName.value)); fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { dlog.error(bLangFieldBasedAccess.field.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol); return symTable.semanticError; } if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) && !Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) { fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol); fieldSymbol.flags &= ~Flags.ISOLATED; fieldSymbol.type.flags &= ~Flags.ISOLATED; } bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkTupleFieldType(BType tupleType, int indexValue) { BTupleType bTupleType = (BTupleType) tupleType; if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) { return bTupleType.restType; } else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) { return symTable.semanticError; } return bTupleType.tupleTypes.get(indexValue); } private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) { BLangExpression startTagName = bLangXMLElementLiteral.startTagName; checkExpr(startTagName, xmlElementEnv, symTable.stringType); BLangExpression endTagName = bLangXMLElementLiteral.endTagName; if (endTagName == null) { return; } checkExpr(endTagName, xmlElementEnv, symTable.stringType); if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME && startTagName.equals(endTagName)) { return; } if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) { return; } dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH); } private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) { for (BLangExpression expr : exprs) { checkExpr(expr, env); BType type = expr.getBType(); if (type == symTable.semanticError) { continue; } if (!types.isNonNilSimpleBasicTypeOrString(type)) { dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType), type); } } } /** * Concatenate the consecutive text type nodes, and get the reduced set of children. * * @param exprs Child nodes * @param xmlElementEnv * @return Reduced set of children */ private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) { List<BLangExpression> newChildren = new ArrayList<>(); List<BLangExpression> tempConcatExpressions = new ArrayList<>(); for (BLangExpression expr : exprs) { BType exprType; if (expr.getKind() == NodeKind.QUERY_EXPR) { exprType = checkExpr(expr, xmlElementEnv, expType); } else { exprType = checkExpr(expr, xmlElementEnv); } if (TypeTags.isXMLTypeTag(exprType.tag)) { if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); tempConcatExpressions = new ArrayList<>(); } newChildren.add(expr); continue; } BType type = expr.getBType(); if (type.tag >= TypeTags.JSON && !TypeTags.isIntegerTypeTag(type.tag) && !TypeTags.isStringTypeTag(type.tag)) { if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) { dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType, symTable.xmlType), type); } continue; } tempConcatExpressions.add(expr); } if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); } return newChildren; } private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.textFragments = exprs; xmlTextLiteral.pos = exprs.get(0).pos; xmlTextLiteral.setBType(symTable.xmlType); return xmlTextLiteral; } private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) { accessExpr.originalType = actualType; BUnionType unionType = BUnionType.create(null, actualType); if (returnsNull(accessExpr)) { unionType.add(symTable.nilType); } BType parentType = accessExpr.expr.getBType(); if (accessExpr.errorSafeNavigation && (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION && ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) { unionType.add(symTable.errorType); } if (unionType.getMemberTypes().size() == 1) { return unionType.getMemberTypes().toArray(new BType[0])[0]; } return unionType; } private boolean returnsNull(BLangAccessExpression accessExpr) { BType parentType = accessExpr.expr.getBType(); if (parentType.isNullable() && parentType.tag != TypeTags.JSON) { return true; } if (parentType.tag != TypeTags.MAP) { return false; } if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.getBType().tag == TypeTags.MAP) { BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint; return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON; } return false; } private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.OBJECT) { return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return symTable.semanticError; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType == symTable.semanticError) { return fieldType; } return BUnionType.create(null, fieldType, symTable.nilType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.isEmpty()) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private RecordUnionDiagnostics checkRecordUnion(BLangFieldBasedAccess fieldAccessExpr, Set<BType> memberTypes, Name fieldName) { RecordUnionDiagnostics recordUnionDiagnostics = new RecordUnionDiagnostics(); for (BType memberType : memberTypes) { BRecordType recordMember = (BRecordType) memberType; if (recordMember.getFields().containsKey(fieldName.getValue())) { BType individualFieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, recordMember); if (individualFieldType == symTable.semanticError) { recordUnionDiagnostics.optionalInRecords.add(recordMember); } } else { recordUnionDiagnostics.undeclaredInRecords.add(recordMember); } } return recordUnionDiagnostics; } private void logRhsFieldAccExprErrors(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BRecordType recordVarRefType = (BRecordType) varRefType; boolean isFieldDeclared = recordVarRefType.getFields().containsKey(fieldName.getValue()); if (isFieldDeclared) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.FIELD_ACCESS_CANNOT_BE_USED_TO_ACCESS_OPTIONAL_FIELDS); } else if (recordVarRefType.sealed) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_RECORD, fieldName, varRefType); } else { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_IN_RECORD_TYPE, fieldName, varRefType); } } else { LinkedHashSet<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); RecordUnionDiagnostics recUnionInfo = checkRecordUnion(fieldAccessExpr, memberTypes, fieldName); if (recUnionInfo.hasUndeclaredAndOptional()) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_AND_OPTIONAL_FIELDS_IN_UNION_OF_RECORDS, fieldName, recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords), recUnionInfo.recordsToString(recUnionInfo.optionalInRecords)); } else if (recUnionInfo.hasUndeclared()) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_UNION_OF_RECORDS, fieldName, recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords)); } else if (recUnionInfo.hasOptional()) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_IN_UNION_OF_RECORDS, fieldName, recUnionInfo.recordsToString(recUnionInfo.optionalInRecords)); } } } private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) { actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) { actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); if (actualType != symTable.semanticError) { fieldAccessExpr.originalType = actualType; return actualType; } if (!fieldAccessExpr.isLValue) { logRhsFieldAccExprErrors(fieldAccessExpr, varRefType, fieldName); return actualType; } actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType); } } else if (types.isLax(varRefType)) { if (fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT, varRefType); return symTable.semanticError; } if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } BType laxFieldAccessType = getLaxFieldAccessType(varRefType); actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.originalType = laxFieldAccessType; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE); } actualType = symTable.xmlType; fieldAccessExpr.originalType = actualType; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType); } return actualType; } private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr; String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value; BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix)); if (nsSymbol == symTable.notFoundSymbol) { dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsPrefixedFieldAccess.nsPrefix); } else if (nsSymbol.getKind() == SymbolKind.PACKAGE) { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst( nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value, (BPackageSymbol) nsSymbol, fieldAccessExpr.pos); } else { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol; } } private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) { return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType); } private BType getLaxFieldAccessType(BType exprType) { switch (exprType.tag) { case TypeTags.JSON: return symTable.jsonType; case TypeTags.XML: case TypeTags.XML_ELEMENT: return symTable.stringType; case TypeTags.MAP: return ((BMapType) exprType).constraint; case TypeTags.UNION: BUnionType unionType = (BUnionType) exprType; if (types.isSameType(symTable.jsonType, unionType)) { return symTable.jsonType; } LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType))); return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes); } return symTable.semanticError; } private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; boolean nillableExprType = false; BType effectiveType = varRefType; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); } } if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) { actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName); if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD, varRefType, fieldName); } fieldAccessExpr.nilSafeNavigation = nillableExprType; fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (types.isLax(effectiveType)) { BType laxFieldAccessType = getLaxFieldAccessType(effectiveType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType); } if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private boolean accessCouldResultInError(BType type) { if (type.tag == TypeTags.JSON) { return true; } if (type.tag == TypeTags.MAP) { return false; } if (type.tag == TypeTags.XML) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError); } else { return false; } } private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) { BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType()); boolean nillableExprType = false; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } if (nillableExprType) { varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); if (!types.isSubTypeOfMapping(varRefType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } } } } BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; BType actualType = symTable.semanticError; if (types.isSubTypeOfMapping(varRefType)) { checkExpr(indexExpr, this.env, symTable.stringType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType); if (actualType == symTable.semanticError) { if (indexExpr.getBType().tag == TypeTags.STRING && isConst(indexExpr)) { String fieldName = getConstFieldName(indexExpr); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD, fieldName, indexBasedAccessExpr.expr.getBType()); return actualType; } dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType()); return actualType; } indexBasedAccessExpr.nilSafeNavigation = nillableExprType; indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (types.isSubTypeOfList(varRefType)) { checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType); indexBasedAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { if (indexExpr.getBType().tag == TypeTags.INT && isConst(indexExpr)) { dlog.error(indexBasedAccessExpr.indexExpr.pos, DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr)); return actualType; } dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType()); return actualType; } } else if (types.isAssignable(varRefType, symTable.stringType)) { if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } indexBasedAccessExpr.originalType = symTable.charStringType; actualType = symTable.charStringType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (indexBasedAccessExpr.isLValue) { indexExpr.setBType(symTable.semanticError); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE); return actualType; } BType type = checkExpr(indexExpr, this.env, symTable.intType); if (type == symTable.semanticError) { return type; } indexBasedAccessExpr.originalType = varRefType; actualType = varRefType; } else if (varRefType.tag == TypeTags.TABLE) { if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS, varRefType); return symTable.semanticError; } BTableType tableType = (BTableType) indexBasedAccessExpr.expr.getBType(); BType keyTypeConstraint = tableType.keyTypeConstraint; if (tableType.keyTypeConstraint == null) { keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.getBType()). fieldNameList, ((BTableType) indexBasedAccessExpr.expr.getBType()).constraint); if (keyTypeConstraint == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE, indexBasedAccessExpr.expr); return symTable.semanticError; } } if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) { checkExpr(indexExpr, this.env, keyTypeConstraint); if (indexExpr.getBType() == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } else { List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr) indexBasedAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes; if (keyConstraintTypes.size() != multiKeyExpressionList.size()) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } for (int i = 0; i < multiKeyExpressionList.size(); i++) { BLangExpression keyExpr = multiKeyExpressionList.get(i); checkExpr(keyExpr, this.env, keyConstraintTypes.get(i)); if (keyExpr.getBType() == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } } if (expType.tag != TypeTags.NONE) { BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType); if (resultType == symTable.semanticError) { return symTable.semanticError; } } BType constraint = tableType.constraint; actualType = addNilForNillableAccessType(constraint); indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (varRefType == symTable.semanticError) { indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError); return symTable.semanticError; } else { indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } if (nillableExprType && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private Long getConstIndex(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value : (Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private String getConstFieldName(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value : (String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType, BArrayType arrayType) { BType actualType = symTable.semanticError; switch (indexExprType.tag) { case TypeTags.INT: BLangExpression indexExpr = indexBasedAccess.indexExpr; if (!isConst(indexExpr) || arrayType.state == BArrayState.OPEN) { actualType = arrayType.eType; break; } actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType; break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) indexExprType; boolean validIndexExists = false; for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); if (indexValue >= 0 && (arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) { validIndexExists = true; break; } } if (!validIndexExists) { return symTable.semanticError; } actualType = arrayType.eType; break; case TypeTags.UNION: List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(matchedType -> (BFiniteType) matchedType) .collect(Collectors.toList()); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType); if (elementType == symTable.semanticError) { return symTable.semanticError; } actualType = arrayType.eType; } return actualType; } private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.ARRAY) { return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type); } if (type.tag == TypeTags.TUPLE) { return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType()); } LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.INT: if (isConst(indexExpr)) { actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue()); } else { BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType(); LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>()); actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); } break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); BType fieldType = checkTupleFieldType(tuple, indexValue); if (fieldType.tag != TypeTags.SEMANTIC_ERROR) { possibleTypes.add(fieldType); } } if (possibleTypes.size() == 0) { return symTable.semanticError; } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) { tupleType.tupleTypes .forEach(memberType -> { if (memberType.tag == TypeTags.UNION) { collectMemberTypes((BUnionType) memberType, memberTypes); } else { memberTypes.add(memberType); } }); return memberTypes; } private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.MAP) { BType constraint = ((BMapType) type).constraint; return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint); } if (type.tag == TypeTags.RECORD) { return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType()); } BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.STRING: if (isConst(indexExpr)) { String fieldName = IdentifierUtils.escapeSpecialCharacters(getConstFieldName(indexExpr)); actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType != symTable.semanticError) { return actualType; } actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { return actualType; } if (actualType == symTable.neverType) { return actualType; } return addNilForNillableAccessType(actualType); } if (accessExpr.isLValue) { return actualType; } return addNilForNillableAccessType(actualType); } LinkedHashSet<BType> fieldTypes = record.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (record.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(record.restFieldType); } if (fieldTypes.stream().noneMatch(BType::isNullable)) { fieldTypes.add(symTable.nilType); } actualType = BUnionType.create(null, fieldTypes); break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { String fieldName = (String) ((BLangLiteral) finiteMember).value; BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); } if (fieldType != symTable.semanticError) { fieldType = addNilForNillableAccessType(fieldType); } } if (fieldType.tag == TypeTags.SEMANTIC_ERROR) { continue; } possibleTypes.add(fieldType); } if (possibleTypes.isEmpty()) { return symTable.semanticError; } if (possibleTypes.stream().noneMatch(BType::isNullable)) { possibleTypes.add(symTable.nilType); } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private List<BType> getTypesList(BType type) { if (type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) type; return new ArrayList<>(unionType.getMemberTypes()); } else { return Lists.of(type); } } private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType()); LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>(); for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { BType patternExprType = pattern.expr.getBType(); matchExprTypes.addAll(getTypesList(patternExprType)); if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) { return new LinkedHashSet<BType>() { { add(symTable.semanticError); } }; } assignable = this.types.isAssignable(type, pattern.variable.getBType()); if (assignable) { break; } } if (!assignable) { matchExprTypes.add(type); } } return matchExprTypes; } private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) { if (encounteredTypes.contains(type)) { return false; } encounteredTypes.add(type); switch (type.tag) { case TypeTags.UNION: for (BType bType1 : ((BUnionType) type).getMemberTypes()) { if (couldHoldTableValues(bType1, encounteredTypes)) { return true; } } return false; case TypeTags.MAP: return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes); case TypeTags.RECORD: BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (couldHoldTableValues(field.type, encounteredTypes)) { return true; } } return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes); case TypeTags.ARRAY: return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes); case TypeTags.TUPLE: for (BType bType : ((BTupleType) type).getTupleTypes()) { if (couldHoldTableValues(bType, encounteredTypes)) { return true; } } return false; } return false; } private boolean isConst(BLangExpression expression) { if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) { return true; } if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; } private Name getCurrentCompUnit(BLangNode node) { return names.fromString(node.pos.lineRange().filePath()); } private BType getRepresentativeBroadType(List<BType> inferredTypeList) { for (int i = 0; i < inferredTypeList.size(); i++) { BType type = inferredTypeList.get(i); if (type.tag == TypeTags.SEMANTIC_ERROR) { return type; } for (int j = i + 1; j < inferredTypeList.size(); j++) { BType otherType = inferredTypeList.get(j); if (otherType.tag == TypeTags.SEMANTIC_ERROR) { return otherType; } if (types.isAssignable(otherType, type)) { inferredTypeList.remove(j); j -= 1; continue; } if (types.isAssignable(type, otherType)) { inferredTypeList.remove(i); i -= 1; break; } } } if (inferredTypeList.size() == 1) { return inferredTypeList.get(0); } return BUnionType.create(null, inferredTypeList.toArray(new BType[0])); } private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>(); List<BType> restFieldTypes = new ArrayList<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValue.key; BLangExpression expression = keyValue.valueExpr; BLangExpression keyExpr = key.expr; if (key.computedKey) { checkExpr(keyExpr, env, symTable.stringType); BType exprType = checkExpr(expression, env, expType); if (isUniqueType(restFieldTypes, exprType)) { restFieldTypes.add(exprType); } } else { addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr), keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) : checkExpr(expression, env, expType), true, keyValue.readonly); } } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType); int typeTag = type.tag; if (typeTag == TypeTags.MAP) { BType constraintType = ((BMapType) type).constraint; if (isUniqueType(restFieldTypes, constraintType)) { restFieldTypes.add(constraintType); } } if (type.tag != TypeTags.RECORD) { continue; } BRecordType recordType = (BRecordType) type; for (BField recField : recordType.fields.values()) { addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type, !Symbols.isOptional(recField.symbol), false); } if (!recordType.sealed) { BType restFieldType = recordType.restFieldType; if (isUniqueType(restFieldTypes, restFieldType)) { restFieldTypes.add(restFieldType); } } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField), varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) : checkExpr(varNameField, env, expType), true, varNameField.readonly); } } LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); boolean allReadOnlyNonRestFields = true; for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) { FieldInfo fieldInfo = entry.getValue(); List<BType> types = fieldInfo.types; if (types.contains(symTable.semanticError)) { return symTable.semanticError; } String key = entry.getKey(); Name fieldName = names.fromString(key); BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0])); Set<Flag> flags = new HashSet<>(); if (fieldInfo.required) { flags.add(Flag.REQUIRED); } else { flags.add(Flag.OPTIONAL); } if (fieldInfo.readonly) { flags.add(Flag.READONLY); } else if (allReadOnlyNonRestFields) { allReadOnlyNonRestFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol, symTable.builtinPos, VIRTUAL); fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = fields; if (restFieldTypes.contains(symTable.semanticError)) { return symTable.semanticError; } if (restFieldTypes.isEmpty()) { recordType.sealed = true; recordType.restFieldType = symTable.noType; } else if (restFieldTypes.size() == 1) { recordType.restFieldType = restFieldTypes.get(0); } else { recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0])); } recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) { recordType.flags |= Flags.READONLY; recordSymbol.flags |= Flags.READONLY; } BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); return recordType; } private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location, SymbolOrigin origin) { BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)), pkgID, null, env.scope.owner, location, origin); BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false, symTable.builtinPos, VIRTUAL); initFuncSymbol.retType = symTable.nilType; recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, location); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); return recordSymbol; } private String getKeyName(BLangExpression key) { return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? ((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value; } private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString, BType exprType, boolean required, boolean readonly) { if (!nonRestFieldTypes.containsKey(keyString)) { nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required, readonly)); return; } FieldInfo fieldInfo = nonRestFieldTypes.get(keyString); List<BType> typeList = fieldInfo.types; if (isUniqueType(typeList, exprType)) { typeList.add(exprType); } if (required && !fieldInfo.required) { fieldInfo.required = true; } } private boolean isUniqueType(List<BType> typeList, BType type) { boolean isRecord = type.tag == TypeTags.RECORD; for (BType bType : typeList) { if (isRecord) { if (type == bType) { return false; } } else if (types.isSameType(type, bType)) { return false; } } return true; } private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType, BType expType) { if (expType == symTable.semanticError) { return expType; } boolean unionExpType = expType.tag == TypeTags.UNION; if (expType == mutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) { return mutableXmlSubType; } BXMLSubType immutableXmlSubType = (BXMLSubType) ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable, anonymousModelHelper, names); if (expType == immutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) { return immutableXmlSubType; } if (!unionExpType) { dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } List<BType> compatibleTypes = new ArrayList<>(); for (BType memberType : ((BUnionType) expType).getMemberTypes()) { if (compatibleTypes.contains(memberType)) { continue; } if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) { compatibleTypes.add(memberType); continue; } if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) { compatibleTypes.add(mutableXmlSubType); continue; } if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) { compatibleTypes.add(immutableXmlSubType); } } if (compatibleTypes.isEmpty()) { dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } if (compatibleTypes.size() == 1) { return compatibleTypes.get(0); } dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) { for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) { BType childType = modifiedChild.getBType(); if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) { continue; } modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types, (SelectivelyImmutableReferenceType) childType, env, symTable, anonymousModelHelper, names)); if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) { markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild); } } } private void logUndefinedSymbolError(Location pos, String name) { if (!missingNodesHelper.isMissingNode(name)) { dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name); } } private void markTypeAsIsolated(BType actualType) { actualType.flags |= Flags.ISOLATED; actualType.tsymbol.flags |= Flags.ISOLATED; } private boolean isObjectConstructorExpr(BLangTypeInit cIExpr, BType actualType) { return cIExpr.getType() != null && Symbols.isFlagOn(actualType.tsymbol.flags, Flags.ANONYMOUS); } private BLangClassDefinition getClassDefinitionForObjectConstructorExpr(BLangTypeInit cIExpr, SymbolEnv env) { List<BLangClassDefinition> classDefinitions = env.enclPkg.classDefinitions; BLangUserDefinedType userDefinedType = (BLangUserDefinedType) cIExpr.getType(); BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(userDefinedType.pos, env, names.fromIdNode(userDefinedType.pkgAlias), names.fromIdNode(userDefinedType.typeName)); for (BLangClassDefinition classDefinition : classDefinitions) { if (classDefinition.symbol == symbol) { return classDefinition; } } return null; } private void handleObjectConstrExprForReadOnly(BLangTypeInit cIExpr, BObjectType actualObjectType, BLangClassDefinition classDefForConstructor, SymbolEnv env, boolean logErrors) { boolean hasNeverReadOnlyField = false; for (BField field : actualObjectType.fields.values()) { BType fieldType = field.type; if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) { analyzeObjectConstructor(classDefForConstructor, env); hasNeverReadOnlyField = true; if (!logErrors) { return; } dlog.error(field.pos, DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE, fieldType); } } if (hasNeverReadOnlyField) { return; } classDefForConstructor.flagSet.add(Flag.READONLY); actualObjectType.flags |= Flags.READONLY; actualObjectType.tsymbol.flags |= Flags.READONLY; ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types, anonymousModelHelper, symTable, names, cIExpr.pos); analyzeObjectConstructor(classDefForConstructor, env); } private void markConstructedObjectIsolatedness(BObjectType actualObjectType) { if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) { markTypeAsIsolated(actualObjectType); return; } for (BField field : actualObjectType.fields.values()) { if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) || !types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) { return; } } markTypeAsIsolated(actualObjectType); } private void markLeafNode(BLangAccessExpression accessExpression) { BLangNode parent = accessExpression.parent; if (parent == null) { accessExpression.leafNode = true; return; } NodeKind kind = parent.getKind(); while (kind == NodeKind.GROUP_EXPR) { parent = parent.parent; if (parent == null) { accessExpression.leafNode = true; break; } kind = parent.getKind(); } if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) { accessExpression.leafNode = true; } } private static class FieldInfo { List<BType> types; boolean required; boolean readonly; private FieldInfo(List<BType> types, boolean required, boolean readonly) { this.types = types; this.required = required; this.readonly = readonly; } } private static class TypeSymbolPair { private BVarSymbol fieldSymbol; private BType determinedType; public TypeSymbolPair(BVarSymbol fieldSymbol, BType determinedType) { this.fieldSymbol = fieldSymbol; this.determinedType = determinedType; } } private static class RecordUnionDiagnostics { Set<BRecordType> undeclaredInRecords = new LinkedHashSet<>(); Set<BRecordType> optionalInRecords = new LinkedHashSet<>(); boolean hasUndeclaredAndOptional() { return undeclaredInRecords.size() > 0 && optionalInRecords.size() > 0; } boolean hasUndeclared() { return undeclaredInRecords.size() > 0; } boolean hasOptional() { return optionalInRecords.size() > 0; } String recordsToString(Set<BRecordType> recordTypeSet) { StringBuilder recordNames = new StringBuilder(); int recordSetSize = recordTypeSet.size(); int index = 0; for (BRecordType recordType : recordTypeSet) { index++; recordNames.append(recordType.tsymbol.getName().getValue()); if (recordSetSize > 1) { if (index == recordSetSize - 1) { recordNames.append("', and '"); } else if (index < recordSetSize) { recordNames.append("', '"); } } } return recordNames.toString(); } } }
class TypeChecker extends BLangNodeVisitor { private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>(); private static Set<String> listLengthModifierFunctions = new HashSet<>(); private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>(); private static final String LIST_LANG_LIB = "lang.array"; private static final String MAP_LANG_LIB = "lang.map"; private static final String TABLE_LANG_LIB = "lang.table"; private static final String VALUE_LANG_LIB = "lang.value"; private static final String XML_LANG_LIB = "lang.xml"; private static final String FUNCTION_NAME_PUSH = "push"; private static final String FUNCTION_NAME_POP = "pop"; private static final String FUNCTION_NAME_SHIFT = "shift"; private static final String FUNCTION_NAME_UNSHIFT = "unshift"; private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType"; private Names names; private SymbolTable symTable; private SymbolEnter symbolEnter; private SymbolResolver symResolver; private NodeCloner nodeCloner; private Types types; private BLangDiagnosticLog dlog; private SymbolEnv env; private boolean isTypeChecked; private TypeNarrower typeNarrower; private TypeParamAnalyzer typeParamAnalyzer; private BLangAnonymousModelHelper anonymousModelHelper; private SemanticAnalyzer semanticAnalyzer; private Unifier unifier; private boolean nonErrorLoggingCheck = false; private int letCount = 0; private Stack<SymbolEnv> queryEnvs, prevEnvs; private Stack<BLangNode> queryFinalClauses; private boolean checkWithinQueryExpr = false; private BLangMissingNodesHelper missingNodesHelper; private boolean breakToParallelQueryEnv = false; /** * Expected types or inherited types. */ private BType expType; private BType resultType; private DiagnosticCode diagCode; static { listLengthModifierFunctions.add(FUNCTION_NAME_PUSH); listLengthModifierFunctions.add(FUNCTION_NAME_POP); listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT); listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT); modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeAll"); add("setLength"); add("reverse"); add("sort"); add("pop"); add("push"); add("shift"); add("unshift"); }}); modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{ add("put"); add("add"); add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{ add("mergeJson"); }}); modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{ add("setName"); add("setChildren"); add("strip"); }}); } public static TypeChecker getInstance(CompilerContext context) { TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY); if (typeChecker == null) { typeChecker = new TypeChecker(context); } return typeChecker; } public TypeChecker(CompilerContext context) { context.put(TYPE_CHECKER_KEY, this); this.names = Names.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); this.typeNarrower = TypeNarrower.getInstance(context); this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context); this.queryFinalClauses = new Stack<>(); this.queryEnvs = new Stack<>(); this.prevEnvs = new Stack<>(); this.unifier = new Unifier(); } public BType checkExpr(BLangExpression expr, SymbolEnv env) { return checkExpr(expr, env, symTable.noType); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) { return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) { if (expr.typeChecked) { return expr.getBType(); } if (expType.tag == TypeTags.INTERSECTION) { expType = ((BIntersectionType) expType).effectiveType; } SymbolEnv prevEnv = this.env; BType preExpType = this.expType; DiagnosticCode preDiagCode = this.diagCode; this.env = env; this.diagCode = diagCode; this.expType = expType; this.isTypeChecked = true; expr.expectedType = expType; expr.accept(this); if (resultType.tag == TypeTags.INTERSECTION) { resultType = ((BIntersectionType) resultType).effectiveType; } expr.setTypeCheckedType(resultType); expr.typeChecked = isTypeChecked; this.env = prevEnv; this.expType = preExpType; this.diagCode = preDiagCode; validateAndSetExprExpectedType(expr); return resultType; } private void analyzeObjectConstructor(BLangNode node, SymbolEnv env) { if (!nonErrorLoggingCheck) { semanticAnalyzer.analyzeNode(node, env); } } private void validateAndSetExprExpectedType(BLangExpression expr) { if (resultType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null && expr.expectedType.tag == TypeTags.MAP && expr.getBType().tag == TypeTags.RECORD) { return; } expr.expectedType = resultType; } public void visit(BLangLiteral literalExpr) { BType literalType = setLiteralValueAndGetType(literalExpr, expType); if (literalType == symTable.semanticError || literalExpr.isFiniteContext) { return; } resultType = types.checkType(literalExpr, literalType, expType); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { checkXMLNamespacePrefixes(xmlElementAccess.filters); checkExpr(xmlElementAccess.expr, env, symTable.xmlType); resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { checkXMLNamespacePrefixes(xmlNavigation.filters); if (xmlNavigation.childIndex != null) { checkExpr(xmlNavigation.childIndex, env, symTable.intType); } BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType); if (exprType.tag == TypeTags.UNION) { dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS, xmlNavigation.expr.getBType()); } BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN ? symTable.xmlType : symTable.xmlElementSeqType; types.checkType(xmlNavigation, actualType, expType); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { resultType = symTable.xmlType; } else { resultType = symTable.xmlElementSeqType; } } private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) { for (BLangXMLElementFilter filter : filters) { if (!filter.namespace.isEmpty()) { Name nsName = names.fromString(filter.namespace); BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName); filter.namespaceSymbol = nsSymbol; if (nsSymbol == symTable.notFoundSymbol) { dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName); } } } } private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) { BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag); Object literalValue = literalExpr.value; if (literalType.tag == TypeTags.INT || literalType.tag == TypeTags.BYTE) { if (expType.tag == TypeTags.FLOAT) { literalType = symTable.floatType; literalExpr.value = ((Long) literalValue).doubleValue(); } else if (expType.tag == TypeTags.DECIMAL && !NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) { literalType = symTable.decimalType; literalExpr.value = String.valueOf(literalValue); } else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) { literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue); if (literalType == symTable.semanticError) { return symTable.semanticError; } } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); BType intSubType = null; boolean intOrIntCompatibleTypeFound = false; for (BType memType : memberTypes) { if ((memType.tag != TypeTags.INT && TypeTags.isIntegerTypeTag(memType.tag)) || memType.tag == TypeTags.BYTE) { intSubType = memType; } else if (memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) { intOrIntCompatibleTypeFound = true; } } if (intOrIntCompatibleTypeFound) { return setLiteralValueAndGetType(literalExpr, symTable.intType); } if (intSubType != null) { return setLiteralValueAndGetType(literalExpr, intSubType); } BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) { return setLiteralValueAndGetType(literalExpr, symTable.byteType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) { return setLiteralValueAndGetType(literalExpr, symTable.floatType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } } } else if (literalType.tag == TypeTags.FLOAT) { String literal = String.valueOf(literalValue); String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal); boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal); if (expType.tag == TypeTags.DECIMAL) { if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) { dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.floatType); resultType = symTable.semanticError; return resultType; } literalType = symTable.decimalType; literalExpr.value = numericLiteral; } else if (expType.tag == TypeTags.FLOAT) { literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral)); } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (!isDiscriminatedFloat && literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType); if (unionMember != symTable.noType) { return unionMember; } } } else if (literalType.tag == TypeTags.DECIMAL) { return decimalLiteral(literalValue, literalExpr, expType); } else if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) { if (expType.tag == TypeTags.CHAR_STRING) { return symTable.charStringType; } if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); for (BType memType : memberTypes) { if (TypeTags.isStringTypeTag(memType.tag)) { return setLiteralValueAndGetType(literalExpr, memType); } else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) { return setLiteralValueAndGetType(literalExpr, symTable.charStringType); } else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType, literalExpr)) { setLiteralValueForFiniteType(literalExpr, symTable.charStringType); return literalType; } } } boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else { if (this.expType.tag == TypeTags.FINITE) { boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else if (this.expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) this.expType; boolean foundMember = unionType.getMemberTypes() .stream() .anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr)); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } } if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) { literalType = new BArrayType(symTable.byteType); } return literalType; } private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) { Set<BType> memberTypes = expType.getMemberTypes(); if (memberTypes.stream() .anyMatch(memType -> memType.tag == desiredType.tag || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) { return setLiteralValueAndGetType(literalExpr, desiredType); } BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } return symTable.noType; } private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType, int targetMemberTypeTag) { for (BLangExpression valueExpr : finiteType.getValueSpace()) { if (valueExpr.getBType().tag == targetMemberTypeTag && types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) { return true; } } return false; } private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) { String literal = String.valueOf(literalValue); if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) { dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.decimalType); resultType = symTable.semanticError; return resultType; } if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType); if (unionMember != symTable.noType) { return unionMember; } } literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal); resultType = symTable.decimalType; return symTable.decimalType; } private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) { types.setImplicitCastExpr(literalExpr, type, this.expType); this.resultType = type; literalExpr.isFiniteContext = true; } private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) { List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(memFiniteType -> (BFiniteType) memFiniteType) .collect(Collectors.toList()); if (finiteTypeMembers.isEmpty()) { return symTable.semanticError; } int tag = matchType.tag; Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>(); for (BFiniteType finiteType : finiteTypeMembers) { Set<BLangExpression> set = new HashSet<>(); for (BLangExpression expression : finiteType.getValueSpace()) { if (expression.getBType().tag == tag) { set.add(expression); } } matchedValueSpace.addAll(set); } if (matchedValueSpace.isEmpty()) { return symTable.semanticError; } return new BFiniteType(null, matchedValueSpace); } private BType getIntLiteralType(Location location, BType expType, BType literalType, Object literalValue) { switch (expType.tag) { case TypeTags.INT: return symTable.intType; case TypeTags.BYTE: if (types.isByteLiteralValue((Long) literalValue)) { return symTable.byteType; } break; case TypeTags.SIGNED32_INT: if (types.isSigned32LiteralValue((Long) literalValue)) { return symTable.signed32IntType; } break; case TypeTags.SIGNED16_INT: if (types.isSigned16LiteralValue((Long) literalValue)) { return symTable.signed16IntType; } break; case TypeTags.SIGNED8_INT: if (types.isSigned8LiteralValue((Long) literalValue)) { return symTable.signed8IntType; } break; case TypeTags.UNSIGNED32_INT: if (types.isUnsigned32LiteralValue((Long) literalValue)) { return symTable.unsigned32IntType; } break; case TypeTags.UNSIGNED16_INT: if (types.isUnsigned16LiteralValue((Long) literalValue)) { return symTable.unsigned16IntType; } break; case TypeTags.UNSIGNED8_INT: if (types.isUnsigned8LiteralValue((Long) literalValue)) { return symTable.unsigned8IntType; } break; default: } dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, literalType); resultType = symTable.semanticError; return resultType; } @Override public void visit(BLangListConstructorExpr listConstructor) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) { BType inferredType = getInferredTupleType(listConstructor, expType); resultType = inferredType == symTable.semanticError ? symTable.semanticError : types.checkType(listConstructor, inferredType, expType); return; } resultType = checkListConstructorCompatibility(expType, listConstructor); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) { List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { resultType = symTable.semanticError; return; } } if (tableConstructorExpr.recordLiteralList.size() == 0) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE); resultType = symTable.semanticError; return; } BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr); BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { recordLiteral.setBType(inherentMemberType); } if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) { resultType = symTable.semanticError; return; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } resultType = tableType; return; } BType applicableExpType = expType.tag == TypeTags.INTERSECTION ? ((BIntersectionType) expType).effectiveType : expType; if (applicableExpType.tag == TypeTags.TABLE) { List<BType> memTypes = new ArrayList<>(); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { BLangRecordLiteral clonedExpr = recordLiteral; if (this.nonErrorLoggingCheck) { clonedExpr.cloneAttempt++; clonedExpr = nodeCloner.cloneNode(recordLiteral); } BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint); if (recordType == symTable.semanticError) { resultType = symTable.semanticError; return; } memTypes.add(recordType); } BTableType expectedTableType = (BTableType) applicableExpType; if (expectedTableType.constraint.tag == TypeTags.MAP && expectedTableType.isTypeInlineDefined) { validateMapConstraintTable(applicableExpType); return; } if (!(validateKeySpecifierInTableConstructor((BTableType) applicableExpType, tableConstructorExpr.recordLiteralList) && validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) { resultType = symTable.semanticError; return; } BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType), null); if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) { tableType.flags |= Flags.READONLY; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) { tableType.fieldNameList = expectedTableType.fieldNameList; } resultType = tableType; } else if (applicableExpType.tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); List<BType> matchingTypes = new ArrayList<>(); BUnionType expectedType = (BUnionType) applicableExpType; for (BType memType : expectedType.getMemberTypes()) { dlog.resetErrorCount(); BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr); } BType resultType = checkExpr(clonedTableExpr, env, memType); if (resultType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(matchingTypes, resultType)) { matchingTypes.add(resultType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (matchingTypes.isEmpty()) { BLangTableConstructorExpr exprToLog = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; exprToLog = nodeCloner.cloneNode(tableConstructorExpr); } dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getInferredTableType(exprToLog)); } else if (matchingTypes.size() != 1) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); } else { resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0)); return; } resultType = symTable.semanticError; } else { resultType = symTable.semanticError; } } private BType getInferredTableType(BLangTableConstructorExpr exprToLog) { List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null); } private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { if (tableConstructorExpr.tableKeySpecifier != null) { if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr. tableKeySpecifier), tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return true; } tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); } return false; } private BType inferTableMemberType(List<BType> memTypes, BType expType) { if (memTypes.isEmpty()) { return ((BTableType) expType).constraint; } LinkedHashSet<BType> result = new LinkedHashSet<>(); result.add(memTypes.get(0)); BUnionType unionType = BUnionType.create(null, result); for (int i = 1; i < memTypes.size(); i++) { BType source = memTypes.get(i); if (!types.isAssignable(source, unionType)) { result.add(source); unionType = BUnionType.create(null, result); } } if (unionType.getMemberTypes().size() == 1) { return memTypes.get(0); } return unionType; } private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) { BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier; List<String> keySpecifierFieldNames = new ArrayList<>(); Set<BField> allFieldSet = new LinkedHashSet<>(); for (BType memType : memTypes) { allFieldSet.addAll(((BRecordType) memType).fields.values()); } Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet); for (BType memType : memTypes) { commonFieldSet.retainAll(((BRecordType) memType).fields.values()); } List<String> requiredFieldNames = new ArrayList<>(); if (keySpecifier != null) { for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) { requiredFieldNames.add(((BLangIdentifier) identifierNode).value); keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value); } } List<String> fieldNames = new ArrayList<>(); for (BField field : allFieldSet) { String fieldName = field.name.value; if (fieldNames.contains(fieldName)) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY, fieldName); return symTable.semanticError; } fieldNames.add(fieldName); boolean isOptional = true; for (BField commonField : commonFieldSet) { if (commonField.name.value.equals(fieldName)) { isOptional = false; requiredFieldNames.add(commonField.name.value); } } if (isOptional) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL)); } else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY)); } else if (requiredFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)); } } return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos); } private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, Location pos) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL); for (BField field : allFieldSet) { recordSymbol.scope.define(field.name, field.symbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = allFieldSet.stream().collect(getFieldCollector()); recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); recordType.sealed = true; recordType.restFieldType = symTable.noType; return recordType; } private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() { BinaryOperator<BField> mergeFunc = (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new); } private boolean validateTableType(BTableType tableType) { BType constraint = tableType.constraint; if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) { dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint); resultType = symTable.semanticError; return false; } return true; } private boolean validateKeySpecifierInTableConstructor(BTableType tableType, List<BLangRecordLiteral> recordLiterals) { List<String> fieldNameList = tableType.fieldNameList; if (fieldNameList != null) { return validateTableKeyValue(fieldNameList, recordLiterals); } return true; } private boolean validateTableKeyValue(List<String> keySpecifierFieldNames, List<BLangRecordLiteral> recordLiterals) { for (String fieldName : keySpecifierFieldNames) { for (BLangRecordLiteral recordLiteral : recordLiterals) { BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName); if (recordKeyValueField != null && isConstExpression(recordKeyValueField.getValue())) { continue; } dlog.error(recordLiteral.pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName); resultType = symTable.semanticError; return false; } } return true; } private boolean isConstExpression(BLangExpression expression) { switch(expression.getKind()) { case LITERAL: case NUMERIC_LITERAL: case STRING_TEMPLATE_LITERAL: case XML_ELEMENT_LITERAL: case XML_TEXT_LITERAL: case LIST_CONSTRUCTOR_EXPR: case TABLE_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: case TYPE_CONVERSION_EXPR: case UNARY_EXPR: case BINARY_EXPR: case TYPE_TEST_EXPR: case TERNARY_EXPR: return true; case SIMPLE_VARIABLE_REF: return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; case GROUP_EXPR: return isConstExpression(((BLangGroupExpr) expression).expression); default: return false; } } private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral, String fieldName) { for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) { BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField; if (fieldName.equals(recordKeyValueField.key.toString())) { return recordKeyValueField; } } return null; } public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint, Location pos) { for (String fieldName : fieldNameList) { BField field = types.getTableConstraintField(constraint, fieldName); if (field == null) { dlog.error(pos, DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName); resultType = symTable.semanticError; return false; } if (!types.isAssignable(field.type, symTable.anydataType)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint); resultType = symTable.semanticError; return false; } } return true; } private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { BType constraintType = tableType.constraint; if (tableConstructorExpr.tableKeySpecifier != null) { List<String> fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); if (tableType.fieldNameList == null && !validateKeySpecifier(fieldNameList, constraintType.tag != TypeTags.INTERSECTION ? constraintType : ((BIntersectionType) constraintType).effectiveType, tableConstructorExpr.tableKeySpecifier.pos)) { return false; } if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH, tableType.fieldNameList.toString(), fieldNameList.toString()); resultType = symTable.semanticError; return false; } } BType keyTypeConstraint = tableType.keyTypeConstraint; if (keyTypeConstraint != null) { List<BType> memberTypes = new ArrayList<>(); if (keyTypeConstraint.tag == TypeTags.TUPLE) { for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) { memberTypes.add((BType) type); } } else { memberTypes.add(keyTypeConstraint); } if (tableConstructorExpr.tableKeySpecifier == null && keyTypeConstraint.tag == TypeTags.NEVER) { return true; } if (tableConstructorExpr.tableKeySpecifier == null || tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT, memberTypes.size(), tableConstructorExpr.tableKeySpecifier == null ? 0 : tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size()); resultType = symTable.semanticError; return false; } List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier. fieldNameIdentifierList; int index = 0; for (IdentifierNode identifier : fieldNameIdentifierList) { BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value); if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT, fieldNameIdentifierList.toString(), memberTypes.toString()); resultType = symTable.semanticError; return false; } index++; } } return true; } public void validateMapConstraintTable(BType expType) { if (expType != null && (((BTableType) expType).fieldNameList != null || ((BTableType) expType).keyTypeConstraint != null) && !expType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) { dlog.error(((BTableType) expType).keyPos, DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } resultType = expType; } private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) { List<String> fieldNamesList = new ArrayList<>(); for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) { fieldNamesList.add(((BLangIdentifier) identifier).value); } return fieldNamesList; } private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) { if (fieldNames == null) { return symTable.semanticError; } List<BType> memTypes = new ArrayList<>(); for (String fieldName : fieldNames) { BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName); if (tableConstraintField == null) { return symTable.semanticError; } BType fieldType = tableConstraintField.type; memTypes.add(fieldType); } if (memTypes.size() == 1) { return memTypes.get(0); } return new BTupleType(memTypes); } private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.nonErrorLoggingCheck = true; this.dlog.mute(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } dlog.resetErrorCount(); BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (compatibleTypes.isEmpty()) { BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.cloneNode(listConstructor); } BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType); if (!erroredExpType && inferredTupleType != symTable.semanticError) { dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType); } return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor); } if (tag == TypeTags.INTERSECTION) { return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor); } BType possibleType = getListConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.ARRAY: return checkArrayType(listConstructor, (BArrayType) possibleType); case TypeTags.TUPLE: return checkTupleType(listConstructor, (BTupleType) possibleType); case TypeTags.READONLY: return checkReadOnlyListType(listConstructor); case TypeTags.TYPEDESC: List<BType> results = new ArrayList<>(); listConstructor.isTypedescExpr = true; for (int i = 0; i < listConstructor.exprs.size(); i++) { results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType)); } List<BType> actualTypes = new ArrayList<>(); for (int i = 0; i < listConstructor.exprs.size(); i++) { final BLangExpression expr = listConstructor.exprs.get(i); if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) { actualTypes.add(((BLangTypedescExpr) expr).resolvedType); } else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { actualTypes.add(((BLangSimpleVarRef) expr).symbol.type); } else { actualTypes.add(results.get(i)); } } if (actualTypes.size() == 1) { listConstructor.typedescType = actualTypes.get(0); } else { listConstructor.typedescType = new BTupleType(actualTypes); } return new BTypedescType(listConstructor.typedescType, null); } BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.cloneNode(listConstructor); } if (bType == symTable.semanticError) { getInferredTupleType(exprToLog, symTable.semanticError); } else { dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType, getInferredTupleType(exprToLog, symTable.noType)); } return symTable.semanticError; } private BType getListConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.READONLY: case TypeTags.TYPEDESC: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) { BType eType = arrayType.eType; if (arrayType.state == BArrayState.INFERRED) { arrayType.size = listConstructor.exprs.size(); arrayType.state = BArrayState.CLOSED; } else if ((arrayType.state != BArrayState.OPEN) && (arrayType.size != listConstructor.exprs.size())) { if (arrayType.size < listConstructor.exprs.size()) { dlog.error(listConstructor.pos, DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, listConstructor.exprs.size()); return symTable.semanticError; } if (!types.hasFillerValue(eType)) { dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType); return symTable.semanticError; } } boolean errored = false; for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(eType, expr) && !errored) { errored = true; } } return errored ? symTable.semanticError : arrayType; } private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) { List<BLangExpression> exprs = listConstructor.exprs; List<BType> memberTypes = tupleType.tupleTypes; BType restType = tupleType.restType; int listExprSize = exprs.size(); int memberTypeSize = memberTypes.size(); if (listExprSize < memberTypeSize) { for (int i = listExprSize; i < memberTypeSize; i++) { if (!types.hasFillerValue(memberTypes.get(i))) { dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } } } else if (listExprSize > memberTypeSize && restType == null) { dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } boolean errored = false; int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize; for (int i = 0; i < nonRestCountToCheck; i++) { if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) { errored = true; } } for (int i = nonRestCountToCheck; i < exprs.size(); i++) { if (exprIncompatible(restType, exprs.get(i)) && !errored) { errored = true; } } return errored ? symTable.semanticError : tupleType; } private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return types.checkType(listConstructor, inferredType, symTable.readonlyType); } for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(symTable.readonlyType, expr)) { return symTable.semanticError; } } return symTable.readonlyType; } private boolean exprIncompatible(BType eType, BLangExpression expr) { if (expr.typeChecked) { return expr.getBType() == symTable.semanticError; } BLangExpression exprToCheck = expr; if (this.nonErrorLoggingCheck) { expr.cloneAttempt++; exprToCheck = nodeCloner.cloneNode(expr); } return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError; } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) { return checkExprList(exprs, env, symTable.noType); } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) { List<BType> types = new ArrayList<>(); SymbolEnv prevEnv = this.env; BType preExpType = this.expType; this.env = env; this.expType = expType; for (BLangExpression e : exprs) { checkExpr(e, this.env, expType); types.add(resultType); } this.env = prevEnv; this.expType = preExpType; return types; } private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) { List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } BTupleType tupleType = new BTupleType(memTypes); if (expType.tag != TypeTags.READONLY) { return tupleType; } tupleType.flags |= Flags.READONLY; return tupleType; } public void visit(BLangRecordLiteral recordLiteral) { int expTypeTag = expType.tag; if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) { expType = defineInferredRecordType(recordLiteral, expType); } else if (expTypeTag == TypeTags.OBJECT) { dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType); resultType = symTable.semanticError; return; } resultType = getEffectiveMappingType(recordLiteral, checkMappingConstructorCompatibility(expType, recordLiteral)); } private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) { if (applicableMappingType == symTable.semanticError || (applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags, Flags.READONLY))) { return applicableMappingType; } Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>(); LinkedHashMap<String, BField> applicableTypeFields = applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields : new LinkedHashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { continue; } String name; if (field.isKeyValueField()) { BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field; if (!keyValueField.readonly) { continue; } BLangExpression keyExpr = keyValueField.key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { name = ((BLangSimpleVarRef) keyExpr).variableName.value; } else { name = (String) ((BLangLiteral) keyExpr).value; } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; if (!varNameField.readonly) { continue; } name = varNameField.variableName.value; } if (applicableTypeFields.containsKey(name) && Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) { continue; } readOnlyFields.put(name, field); } if (readOnlyFields.isEmpty()) { return applicableMappingType; } PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); LinkedHashMap<String, BField> newFields = new LinkedHashMap<>(); for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) { RecordLiteralNode.RecordField field = readOnlyEntry.getValue(); String key = readOnlyEntry.getKey(); Name fieldName = names.fromString(key); BType readOnlyFieldType; if (field.isKeyValueField()) { readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType(); } else { readOnlyFieldType = ((BLangRecordVarNameField) field).getBType(); } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(Flag.REQUIRED); add(Flag.READONLY); }}), fieldName, pkgID, readOnlyFieldType, recordSymbol, ((BLangNode) field).pos, VIRTUAL); newFields.put(key, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags); if (applicableMappingType.tag == TypeTags.MAP) { recordType.sealed = false; recordType.restFieldType = ((BMapType) applicableMappingType).constraint; } else { BRecordType applicableRecordType = (BRecordType) applicableMappingType; boolean allReadOnlyFields = true; for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) { String fieldName = origEntry.getKey(); BField field = origEntry.getValue(); if (readOnlyFields.containsKey(fieldName)) { continue; } BVarSymbol origFieldSymbol = field.symbol; long origFieldFlags = origFieldSymbol.flags; if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) { allReadOnlyFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID, origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL); newFields.put(fieldName, new BField(field.name, null, fieldSymbol)); recordSymbol.scope.define(field.name, fieldSymbol); } recordType.sealed = applicableRecordType.sealed; recordType.restFieldType = applicableRecordType.restFieldType; if (recordType.sealed && allReadOnlyFields) { recordType.flags |= Flags.READONLY; recordType.tsymbol.flags |= Flags.READONLY; } } recordType.fields = newFields; recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); if (applicableMappingType.tag == TypeTags.MAP) { recordLiteral.expectedType = applicableMappingType; } return recordType; } private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } dlog.resetErrorCount(); BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType, mappingConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (compatibleTypes.isEmpty()) { if (!erroredExpType) { reportIncompatibleMappingConstructorError(mappingConstructor, bType); } validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor); } if (tag == TypeTags.INTERSECTION) { return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor); } BType possibleType = getMappingConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.MAP: return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType : symTable.semanticError; case TypeTags.RECORD: boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType); boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType, mappingConstructor.fields, mappingConstructor.pos); return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError; case TypeTags.READONLY: return checkReadOnlyMappingType(mappingConstructor); } reportIncompatibleMappingConstructorError(mappingConstructor, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return checkMappingConstructorCompatibility(inferredType, mappingConstructor); } for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BLangExpression exprToCheck; if (field.isKeyValueField()) { exprToCheck = ((BLangRecordKeyValueField) field).valueExpr; } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } else { exprToCheck = (BLangRecordVarNameField) field; } if (exprIncompatible(symTable.readonlyType, exprToCheck)) { return symTable.semanticError; } } return symTable.readonlyType; } private BType getMappingConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.MAP: case TypeTags.RECORD: case TypeTags.READONLY: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private boolean isMappingConstructorCompatibleType(BType type) { return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP; } private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) { if (expType == symTable.semanticError) { return; } if (expType.tag != TypeTags.UNION) { dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType); return; } BUnionType unionType = (BUnionType) expType; BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]); if (memberTypes.length == 2) { BRecordType recType = null; if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[0]; } else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[1]; } if (recType != null) { validateSpecifiedFields(mappingConstructorExpr, recType); validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos); return; } } for (BType bType : memberTypes) { if (isMappingConstructorCompatibleType(bType)) { dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR, unionType); return; } } dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType); } private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) { boolean isFieldsValid = true; for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BType checkedType = checkMappingField(field, possibleType); if (isFieldsValid && checkedType == symTable.semanticError) { isFieldsValid = false; } } return isFieldsValid; } private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields, Location pos) { HashSet<String> specFieldNames = getFieldNames(specifiedFields); boolean hasAllRequiredFields = true; for (BField field : type.fields.values()) { String fieldName = field.name.value; if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED) && !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) { dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name); if (hasAllRequiredFields) { hasAllRequiredFields = false; } } } return hasAllRequiredFields; } private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) { HashSet<String> fieldNames = new HashSet<>(); for (RecordLiteralNode.RecordField specifiedField : specifiedFields) { if (specifiedField.isKeyValueField()) { String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField); if (name == null) { continue; } fieldNames.add(name); } else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField)); } else { fieldNames.addAll(getSpreadOpFieldRequiredFieldNames( (BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField)); } } return fieldNames; } private String getKeyValueFieldName(BLangRecordKeyValueField field) { BLangRecordKey key = field.key; if (key.computedKey) { return null; } BLangExpression keyExpr = key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return ((BLangSimpleVarRef) keyExpr).variableName.value; } else if (keyExpr.getKind() == NodeKind.LITERAL) { return (String) ((BLangLiteral) keyExpr).value; } return null; } private String getVarNameFieldName(BLangRecordVarNameField field) { return field.variableName.value; } private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) { BType spreadType = checkExpr(field.expr, env); if (spreadType.tag != TypeTags.RECORD) { return Collections.emptyList(); } List<String> fieldNames = new ArrayList<>(); for (BField bField : ((BRecordType) spreadType).getFields().values()) { if (!Symbols.isOptional(bField.symbol)) { fieldNames.add(bField.name.value); } } return fieldNames; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { if (workerFlushExpr.workerIdentifier != null) { String workerName = workerFlushExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName); } else { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName)); if (symbol != symTable.notFoundSymbol) { workerFlushExpr.workerSymbol = symbol; } } } BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(workerFlushExpr, actualType, expType); } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier)); if (symTable.notFoundSymbol.equals(symbol)) { syncSendExpr.workerType = symTable.semanticError; } else { syncSendExpr.workerType = symbol.type; syncSendExpr.workerSymbol = symbol; } syncSendExpr.env = this.env; checkExpr(syncSendExpr.expr, this.env); if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) { this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND, syncSendExpr.expr.getBType()); } String workerName = syncSendExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName); } syncSendExpr.expectedType = expType; resultType = expType == symTable.noType ? symTable.nilType : expType; } @Override public void visit(BLangWorkerReceive workerReceiveExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier)); workerReceiveExpr.env = this.env; if (symTable.notFoundSymbol.equals(symbol)) { workerReceiveExpr.workerType = symTable.semanticError; } else { workerReceiveExpr.workerType = symbol.type; workerReceiveExpr.workerSymbol = symbol; } if (symTable.noType == this.expType) { this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION); } workerReceiveExpr.setBType(this.expType); resultType = this.expType; } private boolean workerExists(SymbolEnv env, String workerName) { if (workerName.equals(DEFAULT_WORKER_NAME)) { return true; } BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName)); return symbol != this.symTable.notFoundSymbol && symbol.type.tag == TypeTags.FUTURE && ((BFutureType) symbol.type).workerDerivative; } @Override public void visit(BLangConstRef constRef) { constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env, names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName)); types.setImplicitCastExpr(constRef, constRef.getBType(), expType); resultType = constRef.getBType(); } public void visit(BLangSimpleVarRef varRefExpr) { BType actualType = symTable.semanticError; Name varName = names.fromIdNode(varRefExpr.variableName); if (varName == Names.IGNORE) { if (varRefExpr.isLValue) { varRefExpr.setBType(this.symTable.anyType); } else { varRefExpr.setBType(this.symTable.semanticError); dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDERSCORE_NOT_ALLOWED); } varRefExpr.symbol = new BVarSymbol(0, true, varName, names.originalNameFromIdNode(varRefExpr.variableName), env.enclPkg.symbol.pkgID, varRefExpr.getBType(), env.scope.owner, varRefExpr.pos, VIRTUAL); resultType = varRefExpr.getBType(); return; } Name compUnitName = getCurrentCompUnit(varRefExpr); varRefExpr.pkgSymbol = symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName); if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) { varRefExpr.symbol = symTable.notFoundSymbol; dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias); } if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { actualType = symTable.stringType; } else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env, names.fromIdNode(varRefExpr.pkgAlias), varName); if (symbol == symTable.notFoundSymbol && env.enclType != null) { Name objFuncName = names.fromString(Symbols .getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value)); symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName, env.enclType.getBType().tsymbol); } if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) { BVarSymbol varSym = (BVarSymbol) symbol; checkSelfReferences(varRefExpr.pos, env, varSym); varRefExpr.symbol = varSym; actualType = varSym.type; markAndRegisterClosureVariable(symbol, varRefExpr.pos, env); } else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) { actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null); varRefExpr.symbol = symbol; } else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) symbol; varRefExpr.symbol = constSymbol; BType symbolType = symbol.type; if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE || (expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream() .anyMatch(memType -> memType.tag == TypeTags.FINITE && types.isAssignable(symbolType, memType)))) { actualType = symbolType; } else { actualType = constSymbol.literalType; } if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) { actualType = symTable.semanticError; dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE); } } else { varRefExpr.symbol = symbol; logUndefinedSymbolError(varRefExpr.pos, varName.value); } } if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE); return; } resultType = types.checkType(varRefExpr, actualType, expType); } @Override public void visit(BLangRecordVarRef varRefExpr) { LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID); BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName), env.enclPkg.symbol.pkgID, null, env.scope.owner, varRefExpr.pos, SOURCE); symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env); boolean unresolvedReference = false; for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference; bLangVarReference.isLValue = true; checkExpr(recordRefField.variableReference, env); if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol || !isValidVariableReference(recordRefField.variableReference)) { unresolvedReference = true; continue; } BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol; BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos, new BVarSymbol(0, names.fromIdNode(recordRefField.variableName), names.originalNameFromIdNode(recordRefField.variableName), env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol, varRefExpr.pos, SOURCE)); fields.put(field.name.value, field); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { checkExpr(restParam, env); unresolvedReference = !isValidVariableReference(restParam); } if (unresolvedReference) { resultType = symTable.semanticError; return; } BRecordType bRecordType = new BRecordType(recordSymbol); bRecordType.fields = fields; recordSymbol.type = bRecordType; varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, recordSymbol.getOriginalName(), env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos, SOURCE); if (restParam == null) { bRecordType.sealed = true; bRecordType.restFieldType = symTable.noType; } else if (restParam.getBType() == symTable.semanticError) { bRecordType.restFieldType = symTable.mapType; } else { BType restFieldType; if (restParam.getBType().tag == TypeTags.RECORD) { restFieldType = ((BRecordType) restParam.getBType()).restFieldType; } else if (restParam.getBType().tag == TypeTags.MAP) { restFieldType = ((BMapType) restParam.getBType()).constraint; } else { restFieldType = restParam.getBType(); } bRecordType.restFieldType = restFieldType; } resultType = bRecordType; } @Override public void visit(BLangErrorVarRef varRefExpr) { if (varRefExpr.typeNode != null) { BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env); varRefExpr.setBType(bType); checkIndirectErrorVarRef(varRefExpr); resultType = bType; return; } if (varRefExpr.message != null) { varRefExpr.message.isLValue = true; checkExpr(varRefExpr.message, env); if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) { dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, varRefExpr.message.getBType()); } } if (varRefExpr.cause != null) { varRefExpr.cause.isLValue = true; checkExpr(varRefExpr.cause, env); if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) { dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType, varRefExpr.cause.getBType()); } } boolean unresolvedReference = false; for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { BLangVariableReference refItem = (BLangVariableReference) detailItem.expr; refItem.isLValue = true; checkExpr(refItem, env); if (!isValidVariableReference(refItem)) { unresolvedReference = true; continue; } if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN, refItem); unresolvedReference = true; continue; } if (refItem.symbol == null) { unresolvedReference = true; } } if (varRefExpr.restVar != null) { varRefExpr.restVar.isLValue = true; if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { checkExpr(varRefExpr.restVar, env); unresolvedReference = unresolvedReference || varRefExpr.restVar.symbol == null || !isValidVariableReference(varRefExpr.restVar); } } if (unresolvedReference) { resultType = symTable.semanticError; return; } BType errorRefRestFieldType; if (varRefExpr.restVar == null) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR || varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { errorRefRestFieldType = varRefExpr.restVar.getBType(); } else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) { errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint; } else { dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, varRefExpr.restVar.getBType(), symTable.detailType); resultType = symTable.semanticError; return; } BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly ? symTable.errorType.detailType : new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC); resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType); } private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) { for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { checkExpr(detailItem.expr, env); checkExpr(detailItem, env, detailItem.expr.getBType()); } if (varRefExpr.restVar != null) { checkExpr(varRefExpr.restVar, env); } if (varRefExpr.message != null) { varRefExpr.message.isLValue = true; checkExpr(varRefExpr.message, env); } if (varRefExpr.cause != null) { varRefExpr.cause.isLValue = true; checkExpr(varRefExpr.cause, env); } } @Override public void visit(BLangTupleVarRef varRefExpr) { List<BType> results = new ArrayList<>(); for (int i = 0; i < varRefExpr.expressions.size(); i++) { ((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true; results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType)); } BTupleType actualType = new BTupleType(results); if (varRefExpr.restParam != null) { BLangExpression restExpr = (BLangExpression) varRefExpr.restParam; ((BLangVariableReference) restExpr).isLValue = true; BType checkedType = checkExpr(restExpr, env, symTable.noType); if (!(checkedType.tag == TypeTags.ARRAY || checkedType.tag == TypeTags.TUPLE)) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType); resultType = symTable.semanticError; return; } if (checkedType.tag == TypeTags.ARRAY) { actualType.restType = ((BArrayType) checkedType).eType; } else { actualType.restType = checkedType; } } resultType = types.checkType(varRefExpr, actualType, expType); } /** * This method will recursively check if a multidimensional array has at least one open sealed dimension. * * @param arrayType array to check if open sealed * @return true if at least one dimension is open sealed */ public boolean isArrayOpenSealedType(BArrayType arrayType) { if (arrayType.state == BArrayState.INFERRED) { return true; } if (arrayType.eType.tag == TypeTags.ARRAY) { return isArrayOpenSealedType((BArrayType) arrayType.eType); } return false; } /** * This method will recursively traverse and find the symbol environment of a lambda node (which is given as the * enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the * enclosing invokable node's environment, which are outside of the scope of a lambda function. */ private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) { return env.enclEnv; } if (env.enclInvokable != null && env.enclInvokable == encInvokable) { return findEnclosingInvokableEnv(env.enclEnv, encInvokable); } return env; } private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) { return env.enclEnv; } if (env.enclType != null && env.enclType == recordTypeNode) { return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode); } return env; } private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) { return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) && param.getBType().tag == symbol.type.tag)); } public void visit(BLangFieldBasedAccess fieldAccessExpr) { markLeafNode(fieldAccessExpr); BLangExpression containerExpression = fieldAccessExpr.expr; if (containerExpression instanceof BLangValueExpression) { ((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue; ((BLangValueExpression) containerExpression).isCompoundAssignmentLValue = fieldAccessExpr.isCompoundAssignmentLValue; } BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, env)); if (fieldAccessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess && !isXmlAccess(fieldAccessExpr)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION); resultType = symTable.semanticError; return; } BType actualType; if (fieldAccessExpr.optionalFieldAccess) { if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS); resultType = symTable.semanticError; return; } actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); } else { actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); if (actualType != symTable.semanticError && (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) { if (isAllReadonlyTypes(varRefType)) { if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return; } } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) && isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, fieldAccessExpr.field.value, varRefType); resultType = symTable.semanticError; return; } } } resultType = types.checkType(fieldAccessExpr, actualType, this.expType); } private boolean isAllReadonlyTypes(BType type) { if (type.tag != TypeTags.UNION) { return Symbols.isFlagOn(type.flags, Flags.READONLY); } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isAllReadonlyTypes(memberType)) { return false; } } return true; } private boolean isInitializationInInit(BType type) { BObjectType objectType = (BObjectType) type; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; return env.enclInvokable != null && initializerFunc != null && env.enclInvokable.symbol == initializerFunc.symbol; } private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) { if (type.tag == TypeTags.RECORD) { if (Symbols.isFlagOn(type.flags, Flags.READONLY)) { return true; } BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (!field.name.value.equals(fieldName)) { continue; } return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY); } return recordType.sealed; } boolean allInvalidUpdates = true; for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) { allInvalidUpdates = false; } } return allInvalidUpdates; } private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangExpression expr = fieldAccessExpr.expr; BType exprType = expr.getBType(); if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) { return true; } if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr) && exprType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes(); return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType); } return false; } public void visit(BLangIndexBasedAccess indexBasedAccessExpr) { markLeafNode(indexBasedAccessExpr); BLangExpression containerExpression = indexBasedAccessExpr.expr; if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, ((BLangTypedescExpr) containerExpression).typeNode); resultType = symTable.semanticError; return; } if (containerExpression instanceof BLangValueExpression) { ((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue; ((BLangValueExpression) containerExpression).isCompoundAssignmentLValue = indexBasedAccessExpr.isCompoundAssignmentLValue; } boolean isStringValue = containerExpression.getBType() != null && containerExpression.getBType().tag == TypeTags.STRING; if (!isStringValue) { checkExpr(containerExpression, this.env, symTable.noType); } if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY && containerExpression.getBType().tag != TypeTags.TABLE) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED, containerExpression.getBType()); resultType = symTable.semanticError; return; } BType actualType = checkIndexAccessExpr(indexBasedAccessExpr); BType exprType = containerExpression.getBType(); BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; if (actualType != symTable.semanticError && (indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) { if (isAllReadonlyTypes(exprType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, exprType); resultType = symTable.semanticError; return; } else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) && (indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) && isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, getConstFieldName(indexExpr), exprType); resultType = symTable.semanticError; return; } } if (indexBasedAccessExpr.isLValue) { indexBasedAccessExpr.originalType = actualType; indexBasedAccessExpr.setBType(actualType); resultType = actualType; return; } this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType); } public void visit(BLangInvocation iExpr) { if (iExpr.expr == null) { checkFunctionInvocationExpr(iExpr); return; } if (invalidModuleAliasUsage(iExpr)) { return; } checkExpr(iExpr.expr, this.env, symTable.noType); BType varRefType = iExpr.expr.getBType(); switch (varRefType.tag) { case TypeTags.OBJECT: checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType); break; case TypeTags.RECORD: checkFieldFunctionPointer(iExpr, this.env); break; case TypeTags.NONE: dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name); break; case TypeTags.SEMANTIC_ERROR: break; default: checkInLangLib(iExpr, varRefType); } } public void visit(BLangErrorConstructorExpr errorConstructorExpr) { BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef; if (userProvidedTypeRef != null) { symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR); } validateErrorConstructorPositionalArgs(errorConstructorExpr); List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr); List<BType> errorDetailTypes = new ArrayList<>(); for (BType expandedCandidate : expandedCandidates) { BType detailType = ((BErrorType) expandedCandidate).detailType; errorDetailTypes.add(detailType); } BType detailCandidate; if (errorDetailTypes.size() == 1) { detailCandidate = errorDetailTypes.get(0); } else { detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes)); } BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr); BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env); int index = errorDetailTypes.indexOf(inferredDetailType); BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index); if (selectedCandidate != symTable.semanticError && (userProvidedTypeRef == null || userProvidedTypeRef.getBType() == selectedCandidate)) { checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType); resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); return; } if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) { dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType); } BErrorType errorType; if (userProvidedTypeRef != null && userProvidedTypeRef.getBType().tag == TypeTags.ERROR) { errorType = (BErrorType) userProvidedTypeRef.getBType(); } else if (expandedCandidates.size() == 1) { errorType = (BErrorType) expandedCandidates.get(0); } else { errorType = symTable.errorType; } List<BLangNamedArgsExpression> namedArgs = checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType); BType detailType = errorType.detailType; if (detailType.tag == TypeTags.MAP) { BType errorDetailTypeConstraint = ((BMapType) detailType).constraint; for (BLangNamedArgsExpression namedArgExpr: namedArgs) { if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) { dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType()); } } } else if (detailType.tag == TypeTags.RECORD) { BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType; LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream() .filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED) .map(f -> f.name.value) .collect(Collectors.toCollection(LinkedList::new)); LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields; for (BLangNamedArgsExpression namedArg : namedArgs) { BField field = targetFields.get(namedArg.name.value); Location pos = namedArg.pos; if (field == null) { if (targetErrorDetailRec.sealed) { dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC, namedArg.name, targetErrorDetailRec); } else if (targetFields.isEmpty() && !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) { dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE, namedArg.name, targetErrorDetailRec); } } else { missingRequiredFields.remove(namedArg.name.value); if (!types.isAssignable(namedArg.expr.getBType(), field.type)) { dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArg.name, field.type, namedArg.expr.getBType()); } } } for (String requiredField : missingRequiredFields) { dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField); } } if (userProvidedTypeRef != null) { errorConstructorExpr.setBType(userProvidedTypeRef.getBType()); } else { errorConstructorExpr.setBType(errorType); } resultType = errorConstructorExpr.getBType(); } private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) { if (errorConstructorExpr.positionalArgs.isEmpty()) { return; } checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType); int positionalArgCount = errorConstructorExpr.positionalArgs.size(); if (positionalArgCount > 1) { checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType); } } private BType checkExprSilent(BLangExpression expr, BType expType, SymbolEnv env) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); BType type = checkExpr(expr, env, expType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } return type; } private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) { BLangRecordKeyValueField field = (BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue(); field.valueExpr = (BLangExpression) namedArg.getExpression(); BLangLiteral expr = new BLangLiteral(); expr.value = namedArg.getName().value; expr.setBType(symTable.stringType); field.key = new BLangRecordKey(expr); recordLiteral.fields.add(field); } return recordLiteral; } private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) { BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef; if (errorTypeRef == null) { if (expType.tag == TypeTags.ERROR) { return List.of(expType); } else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) { return expandExpectedErrorTypes(expType); } } else { if (errorTypeRef.getBType().tag != TypeTags.ERROR) { if (errorTypeRef.getBType().tag != TypeTags.SEMANTIC_ERROR) { dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef); } } else { return List.of(errorTypeRef.getBType()); } } return List.of(symTable.errorType); } private List<BType> expandExpectedErrorTypes(BType candidateType) { List<BType> expandedCandidates = new ArrayList<>(); if (candidateType.tag == TypeTags.UNION) { for (BType memberType : ((BUnionType) candidateType).getMemberTypes()) { if (types.isAssignable(memberType, symTable.errorType)) { if (memberType.tag == TypeTags.INTERSECTION) { expandedCandidates.add(((BIntersectionType) memberType).effectiveType); } else { expandedCandidates.add(memberType); } } } } else if (types.isAssignable(candidateType, symTable.errorType)) { if (candidateType.tag == TypeTags.INTERSECTION) { expandedCandidates.add(((BIntersectionType) candidateType).effectiveType); } else { expandedCandidates.add(candidateType); } } return expandedCandidates; } public void visit(BLangInvocation.BLangActionInvocation aInv) { if (aInv.expr == null) { checkFunctionInvocationExpr(aInv); return; } if (invalidModuleAliasUsage(aInv)) { return; } checkExpr(aInv.expr, this.env, symTable.noType); BLangExpression varRef = aInv.expr; switch (varRef.getBType().tag) { case TypeTags.OBJECT: checkActionInvocation(aInv, (BObjectType) varRef.getBType()); break; case TypeTags.RECORD: checkFieldFunctionPointer(aInv, this.env); break; case TypeTags.NONE: dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name); resultType = symTable.semanticError; break; case TypeTags.SEMANTIC_ERROR: default: dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType()); resultType = symTable.semanticError; break; } } private boolean invalidModuleAliasUsage(BLangInvocation invocation) { Name pkgAlias = names.fromIdNode(invocation.pkgAlias); if (pkgAlias != Names.EMPTY) { dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE); return true; } return false; } public void visit(BLangLetExpression letExpression) { BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())), new Name(String.format("$let_symbol_%d$", letCount++)), env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner, letExpression.pos); letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env); } BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType); types.checkType(letExpression, exprType, this.expType); } private void checkInLangLib(BLangInvocation iExpr, BType varRefType) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value, iExpr.expr.getBType()); resultType = symTable.semanticError; return; } if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) { return; } checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType); } private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType, BSymbol langLibMethodSymbol) { if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) { return false; } String packageId = langLibMethodSymbol.pkgID.name.value; if (!modifierFunctions.containsKey(packageId)) { return false; } String funcName = langLibMethodSymbol.name.value; if (!modifierFunctions.get(packageId).contains(funcName)) { return false; } if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) { return false; } if (funcName.equals("strip") && TypeTags.isXMLTypeTag(varRefType.tag)) { return false; } dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return true; } private boolean isFixedLengthList(BType type) { switch(type.tag) { case TypeTags.ARRAY: return (((BArrayType) type).state != BArrayState.OPEN); case TypeTags.TUPLE: return (((BTupleType) type).restType == null); case TypeTags.UNION: BUnionType unionType = (BUnionType) type; for (BType member : unionType.getMemberTypes()) { if (!isFixedLengthList(member)) { return false; } } return true; default: return false; } } private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) { String invocationName = iExpr.name.getValue(); if (!listLengthModifierFunctions.contains(invocationName)) { return; } if (isFixedLengthList(varRefType)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName, varRefType); resultType = symTable.semanticError; return; } if (isShiftOnIncompatibleTuples(varRefType, invocationName)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName, varRefType); resultType = symTable.semanticError; return; } } private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) { if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) && hasDifferentTypeThanRest((BTupleType) varRefType)) { return true; } if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) { BUnionType unionVarRef = (BUnionType) varRefType; boolean allMemberAreFixedShapeTuples = true; for (BType member : unionVarRef.getMemberTypes()) { if (member.tag != TypeTags.TUPLE) { allMemberAreFixedShapeTuples = false; break; } if (!hasDifferentTypeThanRest((BTupleType) member)) { allMemberAreFixedShapeTuples = false; break; } } return allMemberAreFixedShapeTuples; } return false; } private boolean hasDifferentTypeThanRest(BTupleType tupleType) { if (tupleType.restType == null) { return false; } for (BType member : tupleType.getTupleTypes()) { if (!types.isSameType(tupleType.restType, member)) { return true; } } return false; } private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) { BType type = checkExpr(iExpr.expr, env); BLangIdentifier invocationIdentifier = iExpr.name; if (type == symTable.semanticError) { return false; } BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier), type.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD, invocationIdentifier, type); return false; } if (fieldSymbol.kind != SymbolKind.FUNCTION) { checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD, fieldSymbol.type); return false; } iExpr.symbol = fieldSymbol; iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType); checkInvocationParamAndReturnType(iExpr); iExpr.functionPointerInvocation = true; return true; } private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos, DiagnosticErrorCode errCode, Object... diagMsgArgs) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(pos, errCode, diagMsgArgs); resultType = symTable.semanticError; } else { checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol); } } @Override public void visit(BLangObjectConstructorExpression objectCtorExpression) { if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) { BObjectType objectType = (BObjectType) objectCtorExpression.classNode.getBType(); if (objectCtorExpression.expectedType.tag == TypeTags.OBJECT) { BObjectType expObjType = (BObjectType) objectCtorExpression.expectedType; objectType.typeIdSet = expObjType.typeIdSet; } else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) { if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) { dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR, objectCtorExpression.expectedType); resultType = symTable.semanticError; return; } } } visit(objectCtorExpression.typeInit); } private boolean isDefiniteObjectType(BType type, Set<BTypeIdSet> typeIdSets) { if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) { return false; } Set<BType> visitedTypes = new HashSet<>(); if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) { return false; } return typeIdSets.size() <= 1; } private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) { if (type.tag == TypeTags.OBJECT) { var objectType = (BObjectType) type; typeIdSets.add(objectType.typeIdSet); return true; } if (type.tag == TypeTags.UNION) { if (!visitedTypes.add(type)) { return true; } for (BType member : ((BUnionType) type).getMemberTypes()) { if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) { return false; } } return true; } return false; } private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) { Set<BTypeIdSet> typeIdSets = new HashSet<>(); if (!isDefiniteObjectType(type, typeIdSets)) { return false; } if (typeIdSets.isEmpty()) { objectType.typeIdSet = BTypeIdSet.emptySet(); return true; } var typeIdIterator = typeIdSets.iterator(); if (typeIdIterator.hasNext()) { BTypeIdSet typeIdSet = typeIdIterator.next(); objectType.typeIdSet = typeIdSet; return true; } return true; } public void visit(BLangTypeInit cIExpr) { if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType); resultType = symTable.semanticError; return; } BType actualType; if (cIExpr.userDefinedType != null) { actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env); } else { actualType = expType; } if (actualType == symTable.semanticError) { resultType = symTable.semanticError; return; } if (actualType.tag == TypeTags.INTERSECTION) { actualType = ((BIntersectionType) actualType).effectiveType; } switch (actualType.tag) { case TypeTags.OBJECT: BObjectType actualObjectType = (BObjectType) actualType; if (isObjectConstructorExpr(cIExpr, actualObjectType)) { BLangClassDefinition classDefForConstructor = getClassDefinitionForObjectConstructorExpr(cIExpr, env); List<BLangType> typeRefs = classDefForConstructor.typeRefs; SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol); if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv, false); } else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv, true); } else { analyzeObjectConstructor(classDefForConstructor, pkgEnv); } markConstructedObjectIsolatedness(actualObjectType); } if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return; } if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); } else { if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) { return; } } break; case TypeTags.STREAM: if (cIExpr.initInvocation.argExprs.size() > 1) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation); resultType = symTable.semanticError; return; } BStreamType actualStreamType = (BStreamType) actualType; if (actualStreamType.completionType != null) { BType completionType = actualStreamType.completionType; if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) { dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString()); resultType = symTable.semanticError; return; } } if (!cIExpr.initInvocation.argExprs.isEmpty()) { BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0); BType constructType = checkExpr(iteratorExpr, env, symTable.noType); BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType); if (constructType.tag != TypeTags.OBJECT) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType, BLangCompilerConstants.CLOSE_FUNC); if (closeFunc != null) { BType closeableIteratorType = symTable.langQueryModuleSymbol.scope .lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type; if (!types.isAssignable(constructType, closeableIteratorType)) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } } else { BType iteratorType = symTable.langQueryModuleSymbol.scope .lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type; if (!types.isAssignable(constructType, iteratorType)) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } } BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType); if (nextReturnType != null) { types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } else { dlog.error(constructType.tsymbol.getPosition(), DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType); } } if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType, actualType); resultType = symTable.semanticError; return; } resultType = actualType; return; case TypeTags.UNION: List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType); BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType); cIExpr.initInvocation.setBType(symTable.nilType); if (matchedType.tag == TypeTags.OBJECT) { if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); actualType = matchedType; break; } else { if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) { return; } } } types.checkType(cIExpr, matchedType, expType); cIExpr.setBType(matchedType); resultType = matchedType; return; default: dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType); resultType = symTable.semanticError; return; } if (cIExpr.initInvocation.getBType() == null) { cIExpr.initInvocation.setBType(symTable.nilType); } BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType()); resultType = types.checkType(cIExpr, actualTypeInitType, expType); } private BUnionType createNextReturnType(Location pos, BStreamType streamType) { BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS); recordType.restFieldType = symTable.noType; recordType.sealed = true; Name fieldName = Names.VALUE; BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC, fieldName, env.enclPkg.packageID, streamType.constraint, env.scope.owner, pos, VIRTUAL)); field.type = streamType.constraint; recordType.fields.put(field.name.value, field); recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, recordType, env.scope.owner, pos, VIRTUAL); recordType.tsymbol.scope = new Scope(env.scope.owner); recordType.tsymbol.scope.define(fieldName, field.symbol); LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(recordType); retTypeMembers.addAll(types.getAllTypes(streamType.completionType)); BUnionType unionType = BUnionType.create(null); unionType.addAll(retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL); return unionType; } private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) { if (!cIExpr.initInvocation.argExprs.isEmpty() && ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) { dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.name.value); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return false; } return true; } private BType getObjectConstructorReturnType(BType objType, BType initRetType) { if (initRetType.tag == TypeTags.UNION) { LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(objType); retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes()); retTypeMembers.remove(symTable.nilType); BUnionType unionType = BUnionType.create(null, retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, symTable.builtinPos, VIRTUAL); return unionType; } else if (initRetType.tag == TypeTags.NIL) { return objType; } return symTable.semanticError; } private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) { int objectCount = 0; for (BType memberType : lhsUnionType.getMemberTypes()) { int tag = memberType.tag; if (tag == TypeTags.OBJECT) { objectCount++; continue; } if (tag != TypeTags.INTERSECTION) { continue; } if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) { objectCount++; } } boolean containsSingleObject = objectCount == 1; List<BType> matchingLhsMemberTypes = new ArrayList<>(); for (BType memberType : lhsUnionType.getMemberTypes()) { if (memberType.tag != TypeTags.OBJECT) { continue; } if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, lhsUnionType.tsymbol); } if (containsSingleObject) { return Collections.singletonList(memberType); } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc; if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) { matchingLhsMemberTypes.add(memberType); } } return matchingLhsMemberTypes; } private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) { if (matchingLhsMembers.isEmpty()) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } else if (matchingLhsMembers.size() == 1) { return matchingLhsMembers.get(0).tsymbol.type; } else { dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } } private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) { invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType)); if (function == null) { return invocationArguments.isEmpty(); } if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) { return true; } List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); List<BLangExpression> positionalArgs = new ArrayList<>(); for (BLangExpression argument : invocationArguments) { if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) { namedArgs.add((BLangNamedArgsExpression) argument); } else { positionalArgs.add(argument); } } List<BVarSymbol> requiredParams = function.symbol.params.stream() .filter(param -> !param.isDefaultable) .collect(Collectors.toList()); if (requiredParams.size() > invocationArguments.size()) { return false; } List<BVarSymbol> defaultableParams = function.symbol.params.stream() .filter(param -> param.isDefaultable) .collect(Collectors.toList()); int givenRequiredParamCount = 0; for (int i = 0; i < positionalArgs.size(); i++) { if (function.symbol.params.size() > i) { givenRequiredParamCount++; BVarSymbol functionParam = function.symbol.params.get(i); if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); continue; } if (function.symbol.restParam != null) { BType restParamType = ((BArrayType) function.symbol.restParam.type).eType; if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) { return false; } continue; } return false; } for (BLangNamedArgsExpression namedArg : namedArgs) { boolean foundNamedArg = false; List<BVarSymbol> params = function.symbol.params; for (int i = givenRequiredParamCount; i < params.size(); i++) { BVarSymbol functionParam = params.get(i); if (!namedArg.name.value.equals(functionParam.name.value)) { continue; } foundNamedArg = true; BType namedArgExprType = checkExpr(namedArg.expr, env); if (!types.isAssignable(functionParam.type, namedArgExprType)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); } if (!foundNamedArg) { return false; } } return requiredParams.size() <= 0; } public void visit(BLangWaitForAllExpr waitForAllExpr) { switch (expType.tag) { case TypeTags.RECORD: checkTypesForRecords(waitForAllExpr); break; case TypeTags.MAP: checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint); LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypesForMap.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypesForMap.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap); resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol); break; case TypeTags.NONE: case TypeTags.ANY: checkTypesForMap(waitForAllExpr, expType); LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypes.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintType = BUnionType.create(null, memberTypes); resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol); break; default: dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos)); resultType = symTable.semanticError; break; } waitForAllExpr.setBType(resultType); if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType); } } private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr, Location pos) { BRecordType retType = new BRecordType(null, Flags.ANONYMOUS); List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs; for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BLangIdentifier fieldName; if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { fieldName = keyVal.key; } else { fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName; } BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName)); BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type; BField field = new BField(names.fromIdNode(keyVal.key), null, new BVarSymbol(0, names.fromIdNode(keyVal.key), names.originalNameFromIdNode(keyVal.key), env.enclPkg.packageID, fieldType, null, keyVal.pos, VIRTUAL)); retType.fields.put(field.name.value, field); } retType.restFieldType = symTable.noType; retType.sealed = true; retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null, pos, VIRTUAL); return retType; } private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) { LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType(); if (bType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) bType).constraint); } else { memberTypes.add(bType); } } return memberTypes; } private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) { List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs; keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType)); } private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) { List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs(); Map<String, BField> lhsFields = ((BRecordType) expType).fields; if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) { dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitExpr, waitExpr.pos)); resultType = symTable.semanticError; return; } for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) { String key = keyVal.key.value; if (!lhsFields.containsKey(key)) { if (((BRecordType) expType).sealed) { dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType); resultType = symTable.semanticError; } else { BType restFieldType = ((BRecordType) expType).restFieldType; checkWaitKeyValExpr(keyVal, restFieldType); } } else { checkWaitKeyValExpr(keyVal, lhsFields.get(key).type); } } checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos); if (symTable.semanticError != resultType) { resultType = expType; } } private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs, Location pos) { type.fields.values().forEach(field -> { boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value)); if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name); } }); } private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) { BLangExpression expr; if (keyVal.keyExpr != null) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode (((BLangSimpleVarRef) keyVal.keyExpr).variableName)); keyVal.keyExpr.setBType(symbol.type); expr = keyVal.keyExpr; } else { expr = keyVal.valueExpr; } BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null); checkExpr(expr, env, futureType); setEventualTypeForExpression(expr, type); } private void setEventualTypeForExpression(BLangExpression expression, BType currentExpectedType) { if (expression == null) { return; } if (isSimpleWorkerReference(expression)) { return; } BFutureType futureType = (BFutureType) expression.expectedType; BType currentType = futureType.constraint; if (types.containsErrorType(currentType)) { return; } BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType); if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) && !types.isAssignable(eventualType, currentExpectedType)) { dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); } futureType.constraint = eventualType; } private void setEventualTypeForWaitExpression(BLangExpression expression, Location pos) { if ((resultType == symTable.semanticError) || (types.containsErrorType(resultType))) { return; } if (isSimpleWorkerReference(expression)) { return; } BType currentExpectedType = ((BFutureType) expType).constraint; BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType); if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) { resultType = eventualType; return; } if (!types.isAssignable(eventualType, currentExpectedType)) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); resultType = symTable.semanticError; return; } if (resultType.tag == TypeTags.FUTURE) { ((BFutureType) resultType).constraint = eventualType; } else { resultType = eventualType; } } private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) { if ((resultType == symTable.semanticError) || (expression.getKind() != NodeKind.BINARY_EXPR) || (types.containsErrorType(resultType))) { return; } if (types.containsErrorType(resultType)) { return; } if (!isReferencingNonWorker((BLangBinaryExpr) expression)) { return; } BType currentExpectedType = ((BFutureType) expType).constraint; BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType); if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) { resultType = eventualType; return; } if (!types.isAssignable(eventualType, currentExpectedType)) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); resultType = symTable.semanticError; return; } if (resultType.tag == TypeTags.FUTURE) { ((BFutureType) resultType).constraint = eventualType; } else { resultType = eventualType; } } private boolean isSimpleWorkerReference(BLangExpression expression) { if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression); BSymbol varRefSymbol = simpleVarRef.symbol; if (varRefSymbol == null) { return false; } if (workerExists(env, simpleVarRef.variableName.value)) { return true; } return false; } private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) { BLangExpression lhsExpr = binaryExpr.lhsExpr; BLangExpression rhsExpr = binaryExpr.rhsExpr; if (isReferencingNonWorker(lhsExpr)) { return true; } return isReferencingNonWorker(rhsExpr); } private boolean isReferencingNonWorker(BLangExpression expression) { if (expression.getKind() == NodeKind.BINARY_EXPR) { return isReferencingNonWorker((BLangBinaryExpr) expression); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression; BSymbol varRefSymbol = simpleVarRef.symbol; String varRefSymbolName = varRefSymbol.getName().value; if (workerExists(env, varRefSymbolName)) { return false; } } return true; } public void visit(BLangTernaryExpr ternaryExpr) { BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType); SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env); BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType); SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env); BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType); if (condExprType == symTable.semanticError || thenType == symTable.semanticError || elseType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isAssignable(elseType, thenType)) { resultType = thenType; } else if (types.isAssignable(thenType, elseType)) { resultType = elseType; } else { dlog.error(ternaryExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, thenType, elseType); resultType = symTable.semanticError; } } else { resultType = expType; } } public void visit(BLangWaitExpr waitExpr) { expType = new BFutureType(TypeTags.FUTURE, expType, null); checkExpr(waitExpr.getExpression(), env, expType); if (resultType.tag == TypeTags.UNION) { LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>()); if (memberTypes.size() == 1) { resultType = memberTypes.toArray(new BType[0])[0]; } else { resultType = BUnionType.create(null, memberTypes); } } else if (resultType != symTable.semanticError) { resultType = ((BFutureType) resultType).constraint; } BLangExpression waitFutureExpression = waitExpr.getExpression(); if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) { setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos); } else { setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos); } waitExpr.setBType(resultType); if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint); } } private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) { for (BType memberType : unionType.getMemberTypes()) { if (memberType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) memberType).constraint); } else { memberTypes.add(memberType); } } return memberTypes; } @Override public void visit(BLangTrapExpr trapExpr) { boolean firstVisit = trapExpr.expr.getBType() == null; BType actualType; BType exprType = checkExpr(trapExpr.expr, env, expType); boolean definedWithVar = expType == symTable.noType; if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = trapExpr.getBType(); exprType = trapExpr.expr.getBType(); } } if (expType == symTable.semanticError || exprType == symTable.semanticError) { actualType = symTable.semanticError; } else { LinkedHashSet<BType> resultTypes = new LinkedHashSet<>(); if (exprType.tag == TypeTags.UNION) { resultTypes.addAll(((BUnionType) exprType).getMemberTypes()); } else { resultTypes.add(exprType); } resultTypes.add(symTable.errorType); actualType = BUnionType.create(null, resultTypes); } resultType = types.checkType(trapExpr, actualType, expType); if (definedWithVar && resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType); } } public void visit(BLangBinaryExpr binaryExpr) { if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) { BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType); BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType); if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) { resultType = symTable.semanticError; return; } resultType = BUnionType.create(null, lhsResultType, rhsResultType); return; } checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr); SymbolEnv rhsExprEnv; BType lhsType; if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) { lhsType = checkAndGetType(binaryExpr.lhsExpr, env, binaryExpr); } else { lhsType = checkExpr(binaryExpr.lhsExpr, env); } if (binaryExpr.opKind == OperatorKind.AND) { rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true); } else if (binaryExpr.opKind == OperatorKind.OR) { rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env); } else { rhsExprEnv = env; } BType rhsType; if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) { rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr); } else { rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv); } BType actualType = symTable.semanticError; switch (binaryExpr.opKind) { case ADD: BType leftConstituent = getXMLConstituents(lhsType); BType rightConstituent = getXMLConstituents(rhsType); if (leftConstituent != null && rightConstituent != null) { actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null); break; } default: if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) { BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryBitwiseOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType, binaryExpr, env); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind, lhsType, rhsType); } else { binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; actualType = opSymbol.type.getReturnType(); } } } resultType = types.checkType(binaryExpr, actualType, expType); } private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); expr.cloneAttempt++; BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (errorCount == 0 && exprCompatibleType != symTable.semanticError) { return checkExpr(expr, env, binaryExpr.expectedType); } else { return checkExpr(expr, env); } } private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) { while (env != null && env.node != node) { env = env.enclEnv; } return env != null && env.enclEnv != null ? env.enclEnv.createClone() : new SymbolEnv(node, null); } private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) { SymbolEnv clone = env.createClone(); while (clone != null && clone.node != node) { clone = clone.enclEnv; } if (clone != null) { clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv)); } else { clone = new SymbolEnv(node, null); } return clone; } private BLangNode getLastInputNodeFromEnv(SymbolEnv env) { while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) { env = env.enclEnv; } return env != null ? env.node : null; } public void visit(BLangTransactionalExpr transactionalExpr) { resultType = types.checkType(transactionalExpr, symTable.booleanType, expType); } public void visit(BLangCommitExpr commitExpr) { BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(commitExpr, actualType, expType); } private BType getXMLConstituents(BType type) { BType constituent = null; if (type.tag == TypeTags.XML) { constituent = ((BXMLType) type).constraint; } else if (TypeTags.isXMLNonSequenceType(type.tag)) { constituent = type; } return constituent; } private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) { if (expType.tag != TypeTags.DECIMAL) { return; } switch (binaryExpr.opKind) { case ADD: case SUB: case MUL: case DIV: checkExpr(binaryExpr.lhsExpr, env, expType); checkExpr(binaryExpr.rhsExpr, env, expType); break; default: break; } } public void visit(BLangElvisExpr elvisExpr) { BType lhsType = checkExpr(elvisExpr.lhsExpr, env); BType actualType = symTable.semanticError; if (lhsType != symTable.semanticError) { if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) { BUnionType unionType = (BUnionType) lhsType; LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream() .filter(type -> type.tag != TypeTags.NIL) .collect(Collectors.toCollection(LinkedHashSet::new)); if (memberTypes.size() == 1) { actualType = memberTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, memberTypes); } } else { dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS, lhsType); } } BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType); BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isSameType(rhsReturnType, lhsReturnType)) { resultType = lhsReturnType; } else { dlog.error(elvisExpr.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType); resultType = symTable.semanticError; } } else { resultType = expType; } } @Override public void visit(BLangGroupExpr groupExpr) { resultType = checkExpr(groupExpr.expression, env, expType); } public void visit(BLangTypedescExpr accessExpr) { if (accessExpr.resolvedType == null) { accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env); } int resolveTypeTag = accessExpr.resolvedType.tag; final BType actualType; if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) { actualType = new BTypedescType(accessExpr.resolvedType, null); } else { actualType = accessExpr.resolvedType; } resultType = types.checkType(accessExpr, actualType, expType); } public void visit(BLangUnaryExpr unaryExpr) { BType exprType; BType actualType = symTable.semanticError; if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = exprType; } } else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = new BTypedescType(exprType, null); } } else { boolean decimalNegation = OperatorKind.SUB.equals(unaryExpr.operator) && expType.tag == TypeTags.DECIMAL; boolean isAdd = OperatorKind.ADD.equals(unaryExpr.operator); exprType = (decimalNegation || isAdd) ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType); if (symbol == symTable.notFoundSymbol) { dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES, unaryExpr.operator, exprType); } else { unaryExpr.opSymbol = (BOperatorSymbol) symbol; actualType = symbol.type.getReturnType(); } } } resultType = types.checkType(unaryExpr, actualType, expType); } public void visit(BLangTypeConversionExpr conversionExpr) { BType actualType = symTable.semanticError; for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) { annAttachment.attachPoints.add(AttachPoint.Point.TYPE); semanticAnalyzer.analyzeNode(annAttachment, this.env); } BLangExpression expr = conversionExpr.expr; if (conversionExpr.typeNode == null) { if (!conversionExpr.annAttachments.isEmpty()) { resultType = checkExpr(expr, env, this.expType); } return; } BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos, symResolver.resolveTypeNode(conversionExpr.typeNode, env)); conversionExpr.targetType = targetType; boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, targetType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) { checkExpr(expr, env, targetType); } else { checkExpr(expr, env, symTable.noType); } BType exprType = expr.getBType(); if (types.isTypeCastable(expr, exprType, targetType, this.env)) { actualType = targetType; } else if (exprType != symTable.semanticError && exprType != symTable.noType) { dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType); } resultType = types.checkType(conversionExpr, actualType, this.expType); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType()); bLangLambdaFunction.capturedClosureEnv = env.createClone(); if (!this.nonErrorLoggingCheck) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); } resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BType expectedType = expType; if (expectedType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expectedType; BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE) .collect(Collectors.collectingAndThen(Collectors.toList(), list -> { if (list.size() != 1) { return null; } return list.get(0); } )); if (invokableType != null) { expectedType = invokableType; } } if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) { dlog.error(bLangArrowFunction.pos, DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS); resultType = symTable.semanticError; return; } BInvokableType expectedInvocation = (BInvokableType) expectedType; populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes); bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType)); if (expectedInvocation.retType.tag == TypeTags.NONE) { expectedInvocation.retType = bLangArrowFunction.body.expr.getBType(); } resultType = bLangArrowFunction.funcType = expectedInvocation; } public void visit(BLangXMLQName bLangXMLQName) { String prefix = bLangXMLQName.prefix.value; resultType = types.checkType(bLangXMLQName, symTable.stringType, expType); if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty() && bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix); bLangXMLQName.setBType(symTable.semanticError); return; } if (bLangXMLQName.prefix.value.isEmpty()) { return; } BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix)); if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { return; } if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { logUndefinedSymbolError(bLangXMLQName.pos, prefix); bLangXMLQName.setBType(symTable.semanticError); return; } if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) { xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value, (BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos); } if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) { resultType = symTable.semanticError; return; } bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol; bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI; } private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix, BPackageSymbol pkgSymbol, Location pos) { BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env, names.fromString(localname), SymTag.CONSTANT); if (constSymbol == symTable.notFoundSymbol) { if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) { dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname); } return null; } BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol; if (constantSymbol.literalType.tag != TypeTags.STRING) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType); return null; } String constVal = (String) constantSymbol.value.value; int s = constVal.indexOf('{'); int e = constVal.lastIndexOf('}'); if (e > s + 1) { pkgSymbol.isUsed = true; String nsURI = constVal.substring(s + 1, e); String local = constVal.substring(e); return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos, SOURCE); } dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname); return null; } public void visit(BLangXMLAttribute bLangXMLAttribute) { SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env); BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name; checkExpr(name, xmlAttributeEnv, symTable.stringType); if (name.prefix.value.isEmpty()) { name.namespaceURI = null; } checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType); symbolEnter.defineNode(bLangXMLAttribute, env); } public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) { SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env); Set<String> usedPrefixes = new HashSet<>(); BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix; if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) { usedPrefixes.add(elemNamePrefix.value); } for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) { if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) { BLangXMLQuotedString value = attribute.value; if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) { dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION); } checkExpr(attribute, xmlElementEnv, symTable.noType); } BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix; if (prefix != null && !prefix.value.isEmpty()) { usedPrefixes.add(prefix.value); } } bLangXMLElementLiteral.attributes.forEach(attribute -> { if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) { checkExpr(attribute, xmlElementEnv, symTable.noType); } }); Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv); Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX); if (namespaces.containsKey(defaultNs)) { bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs); } for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) { if (usedPrefixes.contains(nsEntry.getKey().value)) { bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue()); } } validateTags(bLangXMLElementLiteral, xmlElementEnv); bLangXMLElementLiteral.modifiedChildren = concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType, this.expType); if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) { markChildrenAsImmutable(bLangXMLElementLiteral); } } private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) { BLangXMLQName attrName = (BLangXMLQName) attribute.name; return (attrName.prefix.value.isEmpty() && attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) || attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE); } public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) { if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) { return symTable.xmlElementType; } if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) { return symTable.xmlTextType; } if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) { return symTable.xmlPIType; } return symTable.xmlCommentType; } public void muteErrorLog() { this.nonErrorLoggingCheck = true; this.dlog.mute(); } public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) { this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } } public BType getXMLSequenceType(BType xmlSubType) { switch (xmlSubType.tag) { case TypeTags.XML_ELEMENT: return new BXMLType(symTable.xmlElementType, null); case TypeTags.XML_COMMENT: return new BXMLType(symTable.xmlCommentType, null); case TypeTags.XML_PI: return new BXMLType(symTable.xmlPIType, null); default: return symTable.xmlTextType; } } public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT && expType != symTable.noType) { dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, "XML Sequence"); resultType = symTable.semanticError; return; } List<BType> xmlTypesInSequence = new ArrayList<>(); for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) { resultType = checkExpr(expressionItem, env, expType); if (!xmlTypesInSequence.contains(resultType)) { xmlTypesInSequence.add(resultType); } } if (expType.tag == TypeTags.XML || expType == symTable.noType) { if (xmlTypesInSequence.size() == 1) { resultType = getXMLSequenceType(xmlTypesInSequence.get(0)); return; } resultType = symTable.xmlType; return; } if (expType.tag == TypeTags.XML_TEXT) { resultType = symTable.xmlTextType; return; } for (BType item : ((BUnionType) expType).getMemberTypes()) { if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) { dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.xmlType); resultType = symTable.semanticError; return; } } resultType = symTable.xmlType; } public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) { List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments; checkStringTemplateExprs(literalValues); BLangExpression xmlExpression = literalValues.get(0); if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL && ((String) ((BLangLiteral) xmlExpression).value).isEmpty()) { resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType); return; } resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType); } public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) { checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType, this.expType); } public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) { checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType); checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType); } public void visit(BLangXMLQuotedString bLangXMLQuotedString) { checkStringTemplateExprs(bLangXMLQuotedString.textFragments); resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType); } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { dlog.error(xmlAttributeAccessExpr.pos, DiagnosticErrorCode.DEPRECATED_XML_ATTRIBUTE_ACCESS); resultType = symTable.semanticError; } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { checkStringTemplateExprs(stringTemplateLiteral.exprs); resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType); if (type == symTable.semanticError) { resultType = type; return; } BObjectType literalType = (BObjectType) type; BType stringsType = literalType.fields.get("strings").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } BType insertionsType = literalType.fields.get("insertions").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } resultType = type; } private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) { if (expType == symTable.noType || containsAnyType(expType)) { return symTable.rawTemplateType; } BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos); BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE); if (type == symTable.semanticError) { return type; } if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type); return symTable.semanticError; } BObjectType litObjType = (BObjectType) type; BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol; if (litObjType.fields.size() > 2) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType); type = symTable.semanticError; } if (!objTSymbol.attachedFuncs.isEmpty()) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType); type = symTable.semanticError; } return type; } private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType, DiagnosticCode code, Location pos) { BType listType = fieldType.tag != TypeTags.INTERSECTION ? fieldType : ((BIntersectionType) fieldType).effectiveType; boolean errored = false; if (listType.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) listType; if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) { dlog.error(pos, code, arrayType.size, exprs.size()); return false; } for (BLangExpression expr : exprs) { errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored; } } else if (listType.tag == TypeTags.TUPLE) { BTupleType tupleType = (BTupleType) listType; final int size = exprs.size(); final int requiredItems = tupleType.tupleTypes.size(); if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) { dlog.error(pos, code, requiredItems, size); return false; } int i; List<BType> memberTypes = tupleType.tupleTypes; for (i = 0; i < requiredItems; i++) { errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored; } if (size > requiredItems) { for (; i < size; i++) { errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored; } } } else { throw new IllegalStateException("Expected a list type, but found: " + listType); } return errored; } private boolean containsAnyType(BType type) { if (type == symTable.anyType) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().contains(symTable.anyType); } return false; } private BType getCompatibleRawTemplateType(BType expType, Location pos) { if (expType.tag != TypeTags.UNION) { return expType; } BUnionType unionType = (BUnionType) expType; List<BType> compatibleTypes = new ArrayList<>(); for (BType type : unionType.getMemberTypes()) { if (types.isAssignable(type, symTable.rawTemplateType)) { compatibleTypes.add(type); } } if (compatibleTypes.size() == 0) { return expType; } if (compatibleTypes.size() > 1) { dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType, expType); return symTable.semanticError; } return compatibleTypes.get(0); } @Override public void visit(BLangIntRangeExpression intRangeExpression) { checkExpr(intRangeExpression.startExpr, env, symTable.intType); checkExpr(intRangeExpression.endExpr, env, symTable.intType); resultType = new BArrayType(symTable.intType); } @Override public void visit(BLangRestArgsExpression bLangRestArgExpression) { resultType = checkExpr(bLangRestArgExpression.expr, env, expType); } @Override public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) { if (expType.tag != TypeTags.TYPEDESC) { dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc); resultType = symTable.semanticError; return; } resultType = expType; } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType); bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType()); } @Override public void visit(BLangMatchExpression bLangMatchExpression) { SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env); checkExpr(bLangMatchExpression.expr, matchExprEnv); bLangMatchExpression.patternClauses.forEach(pattern -> { if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) { symbolEnter.defineNode(pattern.variable, matchExprEnv); } checkExpr(pattern.expr, matchExprEnv, expType); pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv)); }); LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression); BType actualType; if (matchExprTypes.contains(symTable.semanticError)) { actualType = symTable.semanticError; } else if (matchExprTypes.size() == 1) { actualType = matchExprTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, matchExprTypes); } resultType = types.checkType(bLangMatchExpression, actualType, expType); } @Override public void visit(BLangCheckedExpr checkedExpr) { checkWithinQueryExpr = isWithinQuery(); visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangQueryExpr queryExpr) { boolean cleanPrevEnvs = false; if (prevEnvs.empty()) { prevEnvs.push(env); cleanPrevEnvs = true; } if (breakToParallelQueryEnv) { queryEnvs.push(prevEnvs.peek()); } else { queryEnvs.push(env); } queryFinalClauses.push(queryExpr.getSelectClause()); List<BLangNode> clauses = queryExpr.getQueryClauses(); BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection(); clauses.forEach(clause -> clause.accept(this)); BType actualType = resolveQueryType(queryEnvs.peek(), ((BLangSelectClause) queryFinalClauses.peek()).expression, collectionNode.getBType(), expType, queryExpr); actualType = (actualType == symTable.semanticError) ? actualType : types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); queryFinalClauses.pop(); queryEnvs.pop(); if (cleanPrevEnvs) { prevEnvs.pop(); } if (actualType.tag == TypeTags.TABLE) { BTableType tableType = (BTableType) actualType; tableType.constraintPos = queryExpr.pos; tableType.isTypeInlineDefined = true; if (!validateTableType(tableType)) { resultType = symTable.semanticError; return; } } checkWithinQueryExpr = false; resultType = actualType; } private boolean isWithinQuery() { return !queryEnvs.isEmpty() && !queryFinalClauses.isEmpty(); } private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType, BType targetType, BLangQueryExpr queryExpr) { List<BType> resultTypes = types.getAllTypes(targetType).stream() .filter(t -> !types.isAssignable(t, symTable.errorType)) .filter(t -> !types.isAssignable(t, symTable.nilType)) .collect(Collectors.toList()); if (resultTypes.isEmpty()) { resultTypes.add(symTable.noType); } BType actualType = symTable.semanticError; List<BType> selectTypes = new ArrayList<>(); List<BType> resolvedTypes = new ArrayList<>(); BType selectType, resolvedType; for (BType type : resultTypes) { switch (type.tag) { case TypeTags.ARRAY: selectType = checkExpr(selectExp, env, ((BArrayType) type).eType); resolvedType = new BArrayType(selectType); break; case TypeTags.TABLE: selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint, true, true)); resolvedType = symTable.tableType; break; case TypeTags.STREAM: selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint, true, true)); resolvedType = symTable.streamType; break; case TypeTags.STRING: case TypeTags.XML: selectType = checkExpr(selectExp, env, type); resolvedType = selectType; break; case TypeTags.NONE: default: selectType = checkExpr(selectExp, env, type); resolvedType = getNonContextualQueryType(selectType, collectionType); break; } if (selectType != symTable.semanticError) { if (resolvedType.tag == TypeTags.STREAM) { queryExpr.isStream = true; } if (resolvedType.tag == TypeTags.TABLE) { queryExpr.isTable = true; } selectTypes.add(selectType); resolvedTypes.add(resolvedType); } } if (selectTypes.size() == 1) { BType errorType = getErrorType(collectionType, queryExpr); selectType = selectTypes.get(0); if (queryExpr.isStream) { return new BStreamType(TypeTags.STREAM, selectType, errorType, null); } else if (queryExpr.isTable) { actualType = getQueryTableType(queryExpr, selectType); } else { actualType = resolvedTypes.get(0); } if (errorType != null && errorType.tag != TypeTags.NIL) { return BUnionType.create(null, actualType, errorType); } else { return actualType; } } else if (selectTypes.size() > 1) { dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes); return actualType; } else { return actualType; } } private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) { final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null); if (!queryExpr.fieldNameIdentifierList.isEmpty()) { validateKeySpecifier(queryExpr.fieldNameIdentifierList, constraintType); markReadOnlyForConstraintType(constraintType); tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream() .map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList()); return BUnionType.create(null, tableType, symTable.errorType); } return tableType; } private void validateKeySpecifier(List<IdentifierNode> fieldList, BType constraintType) { for (IdentifierNode identifier : fieldList) { BField field = types.getTableConstraintField(constraintType, identifier.getValue()); if (field == null) { dlog.error(identifier.getPosition(), DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, identifier.getValue(), constraintType); } else if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { field.symbol.flags |= Flags.READONLY; } } } private void markReadOnlyForConstraintType(BType constraintType) { if (constraintType.tag != TypeTags.RECORD) { return; } BRecordType recordType = (BRecordType) constraintType; for (BField field : recordType.fields.values()) { if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { return; } } if (recordType.sealed) { recordType.flags |= Flags.READONLY; recordType.tsymbol.flags |= Flags.READONLY; } } private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr) { if (collectionType.tag == TypeTags.SEMANTIC_ERROR) { return null; } BType returnType = null, errorType = null; switch (collectionType.tag) { case TypeTags.STREAM: errorType = ((BStreamType) collectionType).completionType; break; case TypeTags.OBJECT: returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType); break; default: BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); if (itrSymbol == this.symTable.notFoundSymbol) { return null; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol; returnType = types.getResultTypeOfNextInvocation((BObjectType) invokableSymbol.retType); } List<BType> errorTypes = new ArrayList<>(); if (returnType != null) { types.getAllTypes(returnType).stream() .filter(t -> types.isAssignable(t, symTable.errorType)) .forEach(errorTypes::add); } if (checkWithinQueryExpr && queryExpr.isStream) { if (errorTypes.isEmpty()) { errorTypes.add(symTable.nilType); } errorTypes.add(symTable.errorType); } if (!errorTypes.isEmpty()) { if (errorTypes.size() == 1) { errorType = errorTypes.get(0); } else { errorType = BUnionType.create(null, errorTypes.toArray(new BType[0])); } } return errorType; } private BType getNonContextualQueryType(BType staticType, BType basicType) { BType resultType; switch (basicType.tag) { case TypeTags.TABLE: resultType = symTable.tableType; break; case TypeTags.STREAM: resultType = symTable.streamType; break; case TypeTags.XML: resultType = new BXMLType(staticType, null); break; case TypeTags.STRING: resultType = symTable.stringType; break; default: resultType = new BArrayType(staticType); break; } return resultType; } @Override public void visit(BLangQueryAction queryAction) { if (prevEnvs.empty()) { prevEnvs.push(env); } else { prevEnvs.push(prevEnvs.peek()); } queryEnvs.push(prevEnvs.peek()); BLangDoClause doClause = queryAction.getDoClause(); queryFinalClauses.push(doClause); List<BLangNode> clauses = queryAction.getQueryClauses(); clauses.forEach(clause -> clause.accept(this)); semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek())); BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); queryFinalClauses.pop(); queryEnvs.pop(); prevEnvs.pop(); } @Override public void visit(BLangFromClause fromClause) { boolean prevBreakToParallelEnv = this.breakToParallelQueryEnv; this.breakToParallelQueryEnv = true; SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop()); fromClause.env = fromEnv; queryEnvs.push(fromEnv); checkExpr(fromClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(fromClause); handleInputClauseVariables(fromClause, queryEnvs.peek()); this.breakToParallelQueryEnv = prevBreakToParallelEnv; } @Override public void visit(BLangJoinClause joinClause) { boolean prevBreakEnv = this.breakToParallelQueryEnv; this.breakToParallelQueryEnv = true; SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop()); joinClause.env = joinEnv; queryEnvs.push(joinEnv); checkExpr(joinClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(joinClause); handleInputClauseVariables(joinClause, queryEnvs.peek()); if (joinClause.onClause != null) { ((BLangOnClause) joinClause.onClause).accept(this); } this.breakToParallelQueryEnv = prevBreakEnv; } @Override public void visit(BLangLetClause letClause) { SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop()); letClause.env = letEnv; queryEnvs.push(letEnv); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv); } } @Override public void visit(BLangWhereClause whereClause) { whereClause.env = handleFilterClauses(whereClause.expression); } @Override public void visit(BLangSelectClause selectClause) { SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop()); selectClause.env = selectEnv; queryEnvs.push(selectEnv); } @Override public void visit(BLangDoClause doClause) { SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop()); doClause.env = letEnv; queryEnvs.push(letEnv); } @Override public void visit(BLangOnConflictClause onConflictClause) { BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType); if (!types.isAssignable(exprType, symTable.errorType)) { dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, symTable.errorType, exprType); } } @Override public void visit(BLangLimitClause limitClause) { BType exprType = checkExpr(limitClause.expression, queryEnvs.peek()); if (!types.isAssignable(exprType, symTable.intType)) { dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType, exprType); } } @Override public void visit(BLangOnClause onClause) { BType lhsType, rhsType; BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek()); onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode); lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv); onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode); rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek()); if (!types.isAssignable(lhsType, rhsType)) { dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType); } } @Override public void visit(BLangOrderByClause orderByClause) { orderByClause.env = queryEnvs.peek(); for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) { BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env); if (!types.isOrderedType(exprType, false)) { dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED); } } } @Override public void visit(BLangDo doNode) { if (doNode.onFailClause != null) { doNode.onFailClause.accept(this); } } public void visit(BLangOnFailClause onFailClause) { onFailClause.body.stmts.forEach(stmt -> stmt.accept(this)); } private SymbolEnv handleFilterClauses (BLangExpression filterExpression) { checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType); BType actualType = filterExpression.getBType(); if (TypeTags.TUPLE == actualType.tag) { dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.booleanType, actualType); } SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, queryFinalClauses.peek(), queryEnvs.pop()); queryEnvs.push(filterEnv); return filterEnv; } private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) { if (bLangInputClause.variableDefinitionNode == null) { return; } BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable(); if (bLangInputClause.isDeclaredWithVar) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv); if (types.isAssignable(bLangInputClause.varType, typeNodeType)) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } if (typeNodeType != symTable.semanticError) { dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bLangInputClause.varType, typeNodeType); } semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) { String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic"; BLangExpression exprWithCheckingKeyword = checkedExpr.expr; boolean firstVisit = exprWithCheckingKeyword.getBType() == null; BType checkExprCandidateType; if (expType == symTable.noType) { checkExprCandidateType = symTable.noType; } else { BType exprType = getCandidateType(checkedExpr, expType); if (exprType == symTable.semanticError) { checkExprCandidateType = BUnionType.create(null, expType, symTable.errorType); } else { checkExprCandidateType = addDefaultErrorIfNoErrorComponentFound(expType); } } if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(expType)) { rewriteWithEnsureTypeFunc(checkedExpr, checkExprCandidateType); } BType exprType = checkExpr(checkedExpr.expr, env, checkExprCandidateType); if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = checkedExpr.getBType(); exprType = checkedExpr.expr.getBType(); } } boolean isErrorType = types.isAssignable(exprType, symTable.errorType); if (exprType.tag != TypeTags.UNION && !isErrorType) { if (exprType.tag == TypeTags.READONLY) { checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{ add(symTable.errorType); }}; resultType = symTable.anyAndReadonly; return; } else if (exprType != symTable.semanticError) { dlog.error(checkedExpr.expr.pos, DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); } checkedExpr.setBType(symTable.semanticError); return; } List<BType> errorTypes = new ArrayList<>(); List<BType> nonErrorTypes = new ArrayList<>(); if (!isErrorType) { for (BType memberType : ((BUnionType) exprType).getMemberTypes()) { if (memberType.tag == TypeTags.READONLY) { errorTypes.add(symTable.errorType); nonErrorTypes.add(symTable.anyAndReadonly); continue; } if (types.isAssignable(memberType, symTable.errorType)) { errorTypes.add(memberType); continue; } nonErrorTypes.add(memberType); } } else { errorTypes.add(exprType); } checkedExpr.equivalentErrorTypeList = errorTypes; if (errorTypes.isEmpty()) { dlog.error(checkedExpr.expr.pos, DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); checkedExpr.setBType(symTable.semanticError); return; } BType actualType; if (nonErrorTypes.size() == 0) { actualType = symTable.neverType; } else if (nonErrorTypes.size() == 1) { actualType = nonErrorTypes.get(0); } else { actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes)); } resultType = types.checkType(checkedExpr, actualType, expType); } private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type) { BType rhsType = getCandidateType(checkedExpr, type); if (rhsType == symTable.semanticError) { rhsType = getCandidateType(checkedExpr, rhsType); } BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType); if (!types.isLax(candidateLaxType)) { return; } ArrayList<BLangExpression> argExprs = new ArrayList<>(); BType typedescType = new BTypedescType(expType, null); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = expType; typedescExpr.setBType(typedescType); argExprs.add(typedescExpr); BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE, argExprs, checkedExpr.expr, checkedExpr.pos); invocation.symbol = symResolver.lookupLangLibMethod(type, names.fromString(invocation.name.value)); invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); checkedExpr.expr = invocation; } private BType getCandidateLaxType(BLangNode expr, BType rhsType) { if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { return types.getSafeType(rhsType, false, true); } return rhsType; } private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); checkedExpr.expr.cloneAttempt++; BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr); BType rhsType; if (checkExprCandidateType == symTable.semanticError) { rhsType = checkExpr(clone, env); } else { rhsType = checkExpr(clone, env, checkExprCandidateType); } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } return rhsType; } private BType addDefaultErrorIfNoErrorComponentFound(BType type) { for (BType t : types.getAllTypes(type)) { if (types.isAssignable(t, symTable.errorType)) { return type; } } return BUnionType.create(null, type, symTable.errorType); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { resultType = serviceConstructorExpr.serviceNode.symbol.type; } @Override public void visit(BLangTypeTestExpr typeTestExpr) { typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env)); checkExpr(typeTestExpr.expr, env); resultType = types.checkType(typeTestExpr, symTable.booleanType, expType); } public void visit(BLangAnnotAccessExpr annotAccessExpr) { checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc); BType actualType = symTable.semanticError; BSymbol symbol = this.symResolver.resolveAnnotation(annotAccessExpr.pos, env, names.fromString(annotAccessExpr.pkgAlias.getValue()), names.fromString(annotAccessExpr.annotationName.getValue())); if (symbol == this.symTable.notFoundSymbol) { this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION, annotAccessExpr.annotationName.getValue()); } else { annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol; BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType : ((BAnnotationSymbol) symbol).attachedType.type; actualType = BUnionType.create(null, annotType, symTable.nilType); } this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType); } private boolean isValidVariableReference(BLangExpression varRef) { switch (varRef.getKind()) { case SIMPLE_VARIABLE_REF: case RECORD_VARIABLE_REF: case TUPLE_VARIABLE_REF: case ERROR_VARIABLE_REF: case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case XML_ATTRIBUTE_ACCESS_EXPR: return true; default: dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType()); return false; } } private BType getEffectiveReadOnlyType(Location pos, BType origTargetType) { if (origTargetType == symTable.readonlyType) { if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } if (origTargetType.tag != TypeTags.UNION) { return origTargetType; } boolean hasReadOnlyType = false; LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>(); for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) { if (memberType == symTable.readonlyType) { hasReadOnlyType = true; continue; } nonReadOnlyTypes.add(memberType); } if (!hasReadOnlyType) { return origTargetType; } if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes); nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>())); return nonReadOnlyUnion; } private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) { SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env); bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv)); return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType); } private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) { if (paramTypes.size() != bLangArrowFunction.params.size()) { dlog.error(bLangArrowFunction.pos, DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH, paramTypes.size(), bLangArrowFunction.params.size()); resultType = symTable.semanticError; bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError)); return; } for (int i = 0; i < bLangArrowFunction.params.size(); i++) { BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i); BType bType = paramTypes.get(i); BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); valueTypeNode.setTypeKind(bType.getKind()); valueTypeNode.pos = symTable.builtinPos; paramIdentifier.setTypeNode(valueTypeNode); paramIdentifier.setBType(bType); } } public void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) { if (env.enclVarSym == varSymbol) { dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name); } } public List<BType> getListWithErrorTypes(int count) { List<BType> list = new ArrayList<>(count); for (int i = 0; i < count; i++) { list.add(symTable.semanticError); } return list; } private void checkFunctionInvocationExpr(BLangInvocation iExpr) { Name funcName = names.fromIdNode(iExpr.name); Name pkgAlias = names.fromIdNode(iExpr.pkgAlias); BSymbol funcSymbol = symTable.notFoundSymbol; BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr)); if (pkgSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias); } else { if (funcSymbol == symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { funcSymbol = symbol; } if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) && (symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) { funcSymbol = symbol; } } if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) { BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol; } } if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) { if (!missingNodesHelper.isMissingNode(funcName)) { dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName); } iExpr.argExprs.forEach(arg -> checkExpr(arg, env)); resultType = symTable.semanticError; return; } if (isFunctionPointer(funcSymbol)) { iExpr.functionPointerInvocation = true; markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID); if (langLibPackageID) { this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); } iExpr.symbol = funcSymbol; checkInvocationParamAndReturnType(iExpr); if (langLibPackageID && !iExpr.argExprs.isEmpty()) { checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol); } } protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) { BLangInvokableNode encInvokable = env.enclInvokable; if (symbol.closure || (symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE && env.node.getKind() != NodeKind.ARROW_EXPR && env.node.getKind() != NodeKind.EXPR_FUNCTION_BODY && encInvokable != null && !encInvokable.flagSet.contains(Flag.LAMBDA)) { return; } if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) && !isFunctionArgument(symbol, encInvokable.requiredParams)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.node.getKind() == NodeKind.ARROW_EXPR && !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; ((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && encInvokable != null && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } } private boolean isNotFunction(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION || (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) { return false; } if (isFunctionPointer(funcSymbol)) { return false; } return true; } private boolean isFunctionPointer(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) { return false; } return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE && funcSymbol.kind == SymbolKind.FUNCTION && (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE; } private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr, BType expectedType) { List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) { BType target = getErrorCtorNamedArgTargetType(namedArgsExpression, expectedType); BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression); BType type = checkExpr(clone, env, target); if (type == symTable.semanticError) { checkExpr(namedArgsExpression, env); } else { checkExpr(namedArgsExpression, env, target); } namedArgs.add(namedArgsExpression); } return namedArgs; } private BType getErrorCtorNamedArgTargetType(BLangNamedArgsExpression namedArgsExpression, BType expectedType) { if (expectedType == symTable.semanticError) { return symTable.semanticError; } if (expectedType.tag == TypeTags.MAP) { return ((BMapType) expectedType).constraint; } if (expectedType.tag != TypeTags.RECORD) { return symTable.semanticError; } BRecordType recordType = (BRecordType) expectedType; BField targetField = recordType.fields.get(namedArgsExpression.name.value); if (targetField != null) { return targetField.type; } if (!recordType.sealed && !recordType.fields.isEmpty()) { dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name, recordType); } return recordType.sealed ? symTable.noType : recordType.restFieldType; } private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) { if (objectType.getKind() == TypeKind.SERVICE && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION); return; } Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value)); BSymbol funcSymbol = symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol); if (funcSymbol == symTable.notFoundSymbol) { BSymbol invocableField = symResolver.resolveInvocableObjectField( iExpr.pos, env, names.fromIdNode(iExpr.name), (BObjectTypeSymbol) objectType.tsymbol); if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) { funcSymbol = invocableField; iExpr.functionPointerInvocation = true; } } if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) { if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value, objectType); resultType = symTable.semanticError; return; } } else { iExpr.symbol = funcSymbol; } if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } checkInvocationParamAndReturnType(iExpr); } private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) { if (checkInvalidActionInvocation(aInv)) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, aInv.expr.getBType()); this.resultType = symTable.semanticError; aInv.symbol = symTable.notFoundSymbol; return; } Name remoteMethodQName = names .fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value)); Name actionName = names.fromIdNode(aInv.name); BSymbol remoteFuncSymbol = symResolver .resolveObjectMethod(aInv.pos, env, remoteMethodQName, (BObjectTypeSymbol) expType.tsymbol); if (remoteFuncSymbol == symTable.notFoundSymbol) { BSymbol invocableField = symResolver.resolveInvocableObjectField( aInv.pos, env, names.fromIdNode(aInv.name), (BObjectTypeSymbol) expType.tsymbol); if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) { remoteFuncSymbol = invocableField; aInv.functionPointerInvocation = true; } } if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) { dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType); resultType = symTable.semanticError; return; } if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName); this.resultType = symTable.semanticError; return; } if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && Symbols.isFlagOn(expType.flags, Flags.CLIENT) && types.isNeverTypeOrStructureTypeWithARequiredNeverMember ((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL); } aInv.symbol = remoteFuncSymbol; checkInvocationParamAndReturnType(aInv); } private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) { return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol; } private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) { Name funcName = names.fromString(iExpr.name.value); BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName); if (funcSymbol == symTable.notFoundSymbol) { return symTable.notFoundSymbol; } iExpr.symbol = funcSymbol; iExpr.langLibInvocation = true; SymbolEnv enclEnv = this.env; this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); iExpr.argExprs.add(0, iExpr.expr); checkInvocationParamAndReturnType(iExpr); this.env = enclEnv; return funcSymbol; } private void checkInvocationParamAndReturnType(BLangInvocation iExpr) { BType actualType = checkInvocationParam(iExpr); resultType = types.checkType(iExpr, actualType, this.expType); } private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams, Set<String> requiredParamNames) { if (openIncRecordParams.size() != 1) { return null; } LinkedHashMap<String, BField> fields = ((BRecordType) openIncRecordParams.get(0).type).fields; for (String paramName : requiredParamNames) { if (!fields.containsKey(paramName)) { return null; } } return openIncRecordParams.get(0); } private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol, List<BVarSymbol> incRecordParams) { Set<String> requiredParamNames = new HashSet<>(); List<BVarSymbol> openIncRecordParams = new ArrayList<>(); for (BVarSymbol paramSymbol : invokableSymbol.params) { if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) && paramSymbol.type.getKind() == TypeKind.RECORD) { boolean recordWithDisallowFieldsOnly = true; LinkedHashMap<String, BField> fields = ((BRecordType) paramSymbol.type).fields; for (String fieldName : fields.keySet()) { BField field = fields.get(fieldName); if (field.symbol.type.tag != TypeTags.NEVER) { recordWithDisallowFieldsOnly = false; incRecordParams.add(field.symbol); requiredParamNames.add(fieldName); } } if (recordWithDisallowFieldsOnly && ((BRecordType) paramSymbol.type).restFieldType != symTable.noType) { openIncRecordParams.add(paramSymbol); } } else { requiredParamNames.add(paramSymbol.name.value); } } return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames); } private BType checkInvocationParam(BLangInvocation iExpr) { if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE); return symTable.semanticError; } if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type); return symTable.noType; } BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol); List<BType> paramTypes = ((BInvokableType) invokableSymbol.type).getParameterTypes(); List<BVarSymbol> incRecordParams = new ArrayList<>(); BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol, incRecordParams); int parameterCountForPositionalArgs = paramTypes.size(); int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size(); iExpr.requiredArgs = new ArrayList<>(); for (BVarSymbol symbol : invokableSymbol.params) { if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) || symbol.type.tag != TypeTags.RECORD) { continue; } LinkedHashMap<String, BField> fields = ((BRecordType) symbol.type).fields; if (fields.isEmpty()) { continue; } for (String field : fields.keySet()) { if (fields.get(field).type.tag != TypeTags.NEVER) { parameterCountForNamedArgs = parameterCountForNamedArgs - 1; break; } } } int i = 0; BLangExpression vararg = null; boolean foundNamedArg = false; for (BLangExpression expr : iExpr.argExprs) { switch (expr.getKind()) { case NAMED_ARGS_EXPR: foundNamedArg = true; if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) { iExpr.requiredArgs.add(expr); } else { dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); } i++; break; case REST_ARGS_EXPR: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG); continue; } vararg = expr; break; default: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG); } if (i < parameterCountForPositionalArgs) { iExpr.requiredArgs.add(expr); } else { iExpr.restArgs.add(expr); } i++; break; } } return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams, incRecordParamAllowAdditionalFields); } private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg, List<BVarSymbol> incRecordParams, BVarSymbol incRecordParamAllowAdditionalFields) { BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol; BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type; BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol; List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params); List<BLangExpression> nonRestArgs = iExpr.requiredArgs; List<BVarSymbol> valueProvidedParams = new ArrayList<>(); List<BVarSymbol> requiredParams = new ArrayList<>(); List<BVarSymbol> requiredIncRecordParams = new ArrayList<>(); for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.isDefaultable) { continue; } requiredParams.add(nonRestParam); } for (BVarSymbol incRecordParam : incRecordParams) { if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) { requiredIncRecordParams.add(incRecordParam); } } int i = 0; for (; i < nonRestArgs.size(); i++) { BLangExpression arg = nonRestArgs.get(i); if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) { BType expectedType = paramTypes.get(i); BType actualType = arg.getBType(); if (expectedType == symTable.charStringType) { arg.cloneAttempt++; BLangExpression clonedArg = nodeCloner.cloneNode(arg); BType argType = checkExprSilent(clonedArg, expectedType, env); if (argType != symTable.semanticError) { actualType = argType; } } types.checkType(arg.pos, actualType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); types.setImplicitCastExpr(arg, arg.getBType(), expectedType); } if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) { if (i < nonRestParams.size()) { BVarSymbol param = nonRestParams.get(i); checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation); valueProvidedParams.add(param); requiredParams.remove(param); continue; } break; } if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) { BLangIdentifier argName = ((NamedArgNode) arg).getName(); BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr, nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields); if (varSym == null) { dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName); break; } requiredParams.remove(varSym); requiredIncRecordParams.remove(varSym); if (valueProvidedParams.contains(varSym)) { dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value); continue; } checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation); valueProvidedParams.add(varSym); } } BVarSymbol restParam = invokableTypeSymbol.restParam; boolean errored = false; if (!requiredParams.isEmpty() && vararg == null) { for (BVarSymbol requiredParam : requiredParams) { if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) { dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name, iExpr.name.value); errored = true; } } } if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) { for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) { for (BVarSymbol requiredParam : requiredParams) { if (requiredParam.type == requiredIncRecordParam.owner.type) { dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredIncRecordParam.name, iExpr.name.value); errored = true; } } } } if (restParam == null && (!iExpr.restArgs.isEmpty() || (vararg != null && valueProvidedParams.size() == nonRestParams.size()))) { dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); errored = true; } if (errored) { return symTable.semanticError; } BType listTypeRestArg = restParam == null ? null : restParam.type; BRecordType mappingTypeRestArg = null; if (vararg != null && nonRestArgs.size() < nonRestParams.size()) { PackageID pkgID = env.enclPkg.symbol.pkgID; List<BType> tupleMemberTypes = new ArrayList<>(); BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL); mappingTypeRestArg = new BRecordType(recordSymbol); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); BType tupleRestType = null; BVarSymbol fieldSymbol; for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) { BType paramType = paramTypes.get(j); BVarSymbol nonRestParam = nonRestParams.get(j); Name paramName = nonRestParam.name; tupleMemberTypes.add(paramType); boolean required = requiredParams.contains(nonRestParam); fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName, nonRestParam.getOriginalName(), pkgID, paramType, recordSymbol, symTable.builtinPos, VIRTUAL); fields.put(paramName.value, new BField(paramName, null, fieldSymbol)); } if (listTypeRestArg != null) { if (listTypeRestArg.tag == TypeTags.ARRAY) { tupleRestType = ((BArrayType) listTypeRestArg).eType; } else if (listTypeRestArg.tag == TypeTags.TUPLE) { BTupleType restTupleType = (BTupleType) listTypeRestArg; tupleMemberTypes.addAll(restTupleType.tupleTypes); if (restTupleType.restType != null) { tupleRestType = restTupleType.restType; } } } BTupleType tupleType = new BTupleType(tupleMemberTypes); tupleType.restType = tupleRestType; listTypeRestArg = tupleType; mappingTypeRestArg.sealed = true; mappingTypeRestArg.restFieldType = symTable.noType; mappingTypeRestArg.fields = fields; recordSymbol.type = mappingTypeRestArg; mappingTypeRestArg.tsymbol = recordSymbol; } if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) { dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); return symTable.semanticError; } BType restType = null; if (vararg != null && !iExpr.restArgs.isEmpty()) { BType elementType = ((BArrayType) listTypeRestArg).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); } checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation); iExpr.restArgs.add(vararg); restType = this.resultType; } else if (vararg != null) { iExpr.restArgs.add(vararg); if (mappingTypeRestArg != null) { LinkedHashSet<BType> restTypes = new LinkedHashSet<>(); restTypes.add(listTypeRestArg); restTypes.add(mappingTypeRestArg); BType actualType = BUnionType.create(null, restTypes); checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation); } else { checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation); } restType = this.resultType; } else if (!iExpr.restArgs.isEmpty()) { if (listTypeRestArg.tag == TypeTags.ARRAY) { BType elementType = ((BArrayType) listTypeRestArg).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); if (restType != symTable.semanticError && this.resultType == symTable.semanticError) { restType = this.resultType; } } } else { BTupleType tupleType = (BTupleType) listTypeRestArg; List<BType> tupleMemberTypes = tupleType.tupleTypes; BType tupleRestType = tupleType.restType; int tupleMemCount = tupleMemberTypes.size(); for (int j = 0; j < iExpr.restArgs.size(); j++) { BLangExpression restArg = iExpr.restArgs.get(j); BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType; checkTypeParamExpr(restArg, this.env, memType, true); if (restType != symTable.semanticError && this.resultType == symTable.semanticError) { restType = this.resultType; } } } } BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType()); if (restType != symTable.semanticError && Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) && Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) { retType = unifier.build(retType, expType, iExpr, types, symTable, dlog); } boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID); String sortFuncName = "sort"; if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) { checkArrayLibSortFuncArgs(iExpr); } if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) { return this.generateFutureType(invokableSymbol, retType); } else { return retType; } } private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) { if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).getBType()); } if (iExpr.argExprs.size() != 3) { return; } BLangExpression keyFunction = iExpr.argExprs.get(2); BType keyFunctionType = keyFunction.getBType(); if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (keyFunctionType.tag == TypeTags.NIL) { if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).getBType()); } return; } Location pos; BType returnType; if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { pos = keyFunction.pos; returnType = keyFunction.getBType().getReturnType(); } else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) { BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction); pos = arrowFunction.body.expr.pos; returnType = arrowFunction.body.expr.getBType(); if (returnType.tag == TypeTags.SEMANTIC_ERROR) { return; } } else { BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction; pos = keyLambdaFunction.function.pos; returnType = keyLambdaFunction.function.getBType().getReturnType(); } if (!types.isOrderedType(returnType, false)) { dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType); } } private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr, List<BVarSymbol> nonRestParams, List<BVarSymbol> incRecordParams, BVarSymbol incRecordParamAllowAdditionalFields) { for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.getName().value.equals(argName.value)) { return nonRestParam; } } for (BVarSymbol incRecordParam : incRecordParams) { if (incRecordParam.getName().value.equals(argName.value)) { return incRecordParam; } } if (incRecordParamAllowAdditionalFields != null) { BRecordType incRecordType = (BRecordType) incRecordParamAllowAdditionalFields.type; checkExpr(expr, env, incRecordType.restFieldType); if (!incRecordType.fields.containsKey(argName.value)) { return new BVarSymbol(0, names.fromIdNode(argName), names.originalNameFromIdNode(argName), null, symTable.noType, null, argName.pos, VIRTUAL); } } return null; } private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) { boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX); return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart); } private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral); } private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { if (typeParamAnalyzer.notRequireTypeParams(env)) { checkExpr(arg, env, expectedType); return; } if (requireTypeInference(arg, inferTypeForNumericLiteral)) { BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env); BType inferredType = checkExpr(arg, env, expType); typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType); return; } checkExpr(arg, env, expectedType); typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType); } private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) { switch (expr.getKind()) { case GROUP_EXPR: return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral); case ARROW_EXPR: case LIST_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: return true; case ELVIS_EXPR: case TERNARY_EXPR: case NUMERIC_LITERAL: return inferTypeForNumericLiteral; default: return false; } } private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) { BType fieldType = symTable.semanticError; boolean keyValueField = field.isKeyValueField(); boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP; boolean readOnlyConstructorField = false; String fieldName = null; Location pos = null; BLangExpression valueExpr = null; if (keyValueField) { valueExpr = ((BLangRecordKeyValueField) field).valueExpr; } else if (!spreadOpField) { valueExpr = (BLangRecordVarNameField) field; } switch (mappingType.tag) { case TypeTags.RECORD: if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(key.expr, key.computedKey, (BRecordType) mappingType); fieldType = typeSymbolPair.determinedType; key.fieldSymbol = typeSymbolPair.fieldSymbol; readOnlyConstructorField = keyValField.readonly; pos = key.expr.pos; fieldName = getKeyValueFieldName(keyValField); } else if (spreadOpField) { BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; checkExpr(spreadExpr, this.env); BType spreadExprType = spreadExpr.getBType(); if (spreadExprType.tag == TypeTags.MAP) { return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint, getAllFieldType((BRecordType) mappingType), DiagnosticErrorCode.INCOMPATIBLE_TYPES); } if (spreadExprType.tag != TypeTags.RECORD) { dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadExprType); return symTable.semanticError; } boolean errored = false; for (BField bField : ((BRecordType) spreadExprType).fields.values()) { BType specFieldType = bField.type; BSymbol fieldSymbol = symResolver.resolveStructField(spreadExpr.pos, this.env, bField.name, mappingType.tsymbol); BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, fieldSymbol, bField.name, (BRecordType) mappingType); if (expectedFieldType != symTable.semanticError && !types.isAssignable(specFieldType, expectedFieldType)) { dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD, expectedFieldType, bField.name, specFieldType); if (!errored) { errored = true; } } } return errored ? symTable.semanticError : symTable.noType; } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(varNameField, false, (BRecordType) mappingType); fieldType = typeSymbolPair.determinedType; readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } break; case TypeTags.MAP: if (spreadOpField) { BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; BType spreadOpType = checkExpr(spreadExp, this.env); BType spreadOpMemberType; switch (spreadOpType.tag) { case TypeTags.RECORD: List<BType> types = new ArrayList<>(); BRecordType recordType = (BRecordType) spreadOpType; for (BField recField : recordType.fields.values()) { types.add(recField.type); } if (!recordType.sealed) { types.add(recordType.restFieldType); } spreadOpMemberType = getRepresentativeBroadType(types); break; case TypeTags.MAP: spreadOpMemberType = ((BMapType) spreadOpType).constraint; break; default: dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadOpType); return symTable.semanticError; } return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } boolean validMapKey; if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey); readOnlyConstructorField = keyValField.readonly; pos = key.pos; fieldName = getKeyValueFieldName(keyValField); } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError; break; } if (readOnlyConstructorField) { if (types.isSelectivelyImmutableType(fieldType)) { fieldType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) fieldType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } else if (!types.isInherentlyImmutableType(fieldType)) { dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType); fieldType = symTable.semanticError; } } if (spreadOpField) { valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } BLangExpression exprToCheck = valueExpr; if (this.nonErrorLoggingCheck) { exprToCheck = nodeCloner.cloneNode(valueExpr); } else { ((BLangNode) field).setBType(fieldType); } return checkExpr(exprToCheck, this.env, fieldType); } private TypeSymbolPair checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey, BRecordType recordType) { Name fieldName; if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.getBType() == symTable.semanticError) { return new TypeSymbolPair(null, symTable.semanticError); } LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (recordType.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(recordType.restFieldType); } return new TypeSymbolPair(null, BUnionType.create(null, fieldTypes)); } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr; fieldName = names.fromIdNode(varRef.variableName); } else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) { fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value); } else { dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY); return new TypeSymbolPair(null, symTable.semanticError); } BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env, fieldName, recordType.tsymbol); BType type = checkRecordLiteralKeyByName(keyExpr.pos, fieldSymbol, fieldName, recordType); return new TypeSymbolPair(fieldSymbol instanceof BVarSymbol ? (BVarSymbol) fieldSymbol : null, type); } private BType checkRecordLiteralKeyByName(Location location, BSymbol fieldSymbol, Name key, BRecordType recordType) { if (fieldSymbol != symTable.notFoundSymbol) { return fieldSymbol.type; } if (recordType.sealed) { dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key, recordType.tsymbol.type.getKind().typeName(), recordType); return symTable.semanticError; } return recordType.restFieldType; } private BType getAllFieldType(BRecordType recordType) { LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BField field : recordType.fields.values()) { possibleTypes.add(field.type); } BType restFieldType = recordType.restFieldType; if (restFieldType != null && restFieldType != symTable.noType) { possibleTypes.add(restFieldType); } return BUnionType.create(null, possibleTypes); } private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) { if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.getBType() == symTable.semanticError) { return false; } return true; } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF || (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) { return true; } dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY); return false; } private BType addNilForNillableAccessType(BType actualType) { if (actualType.isNullable()) { return actualType; } return BUnionType.create(null, actualType, symTable.nilType); } private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return symTable.semanticError; } if (recordType.sealed) { return symTable.semanticError; } return recordType.restFieldType; } private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess, Name fieldName, BObjectType objectType) { BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos, this.env, fieldName, objectType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, fieldName.value)); fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { dlog.error(bLangFieldBasedAccess.field.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol); return symTable.semanticError; } if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) && !Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) { fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol); fieldSymbol.flags &= ~Flags.ISOLATED; fieldSymbol.type.flags &= ~Flags.ISOLATED; } bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkTupleFieldType(BType tupleType, int indexValue) { BTupleType bTupleType = (BTupleType) tupleType; if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) { return bTupleType.restType; } else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) { return symTable.semanticError; } return bTupleType.tupleTypes.get(indexValue); } private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) { BLangExpression startTagName = bLangXMLElementLiteral.startTagName; checkExpr(startTagName, xmlElementEnv, symTable.stringType); BLangExpression endTagName = bLangXMLElementLiteral.endTagName; if (endTagName == null) { return; } checkExpr(endTagName, xmlElementEnv, symTable.stringType); if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME && startTagName.equals(endTagName)) { return; } if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) { return; } dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH); } private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) { for (BLangExpression expr : exprs) { checkExpr(expr, env); BType type = expr.getBType(); if (type == symTable.semanticError) { continue; } if (!types.isNonNilSimpleBasicTypeOrString(type)) { dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType), type); } } } /** * Concatenate the consecutive text type nodes, and get the reduced set of children. * * @param exprs Child nodes * @param xmlElementEnv * @return Reduced set of children */ private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) { List<BLangExpression> newChildren = new ArrayList<>(); List<BLangExpression> tempConcatExpressions = new ArrayList<>(); for (BLangExpression expr : exprs) { BType exprType; if (expr.getKind() == NodeKind.QUERY_EXPR) { exprType = checkExpr(expr, xmlElementEnv, expType); } else { exprType = checkExpr(expr, xmlElementEnv); } if (TypeTags.isXMLTypeTag(exprType.tag)) { if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); tempConcatExpressions = new ArrayList<>(); } newChildren.add(expr); continue; } BType type = expr.getBType(); if (type.tag >= TypeTags.JSON && !TypeTags.isIntegerTypeTag(type.tag) && !TypeTags.isStringTypeTag(type.tag)) { if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) { dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType, symTable.xmlType), type); } continue; } tempConcatExpressions.add(expr); } if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); } return newChildren; } private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.textFragments = exprs; xmlTextLiteral.pos = exprs.get(0).pos; xmlTextLiteral.setBType(symTable.xmlType); return xmlTextLiteral; } private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) { accessExpr.originalType = actualType; BUnionType unionType = BUnionType.create(null, actualType); if (returnsNull(accessExpr)) { unionType.add(symTable.nilType); } BType parentType = accessExpr.expr.getBType(); if (accessExpr.errorSafeNavigation && (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION && ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) { unionType.add(symTable.errorType); } if (unionType.getMemberTypes().size() == 1) { return unionType.getMemberTypes().toArray(new BType[0])[0]; } return unionType; } private boolean returnsNull(BLangAccessExpression accessExpr) { BType parentType = accessExpr.expr.getBType(); if (parentType.isNullable() && parentType.tag != TypeTags.JSON) { return true; } if (parentType.tag != TypeTags.MAP) { return false; } if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.getBType().tag == TypeTags.MAP) { BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint; return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON; } return false; } private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.OBJECT) { return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return symTable.semanticError; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType == symTable.semanticError) { return fieldType; } return addNilForNillableAccessType(fieldType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.isEmpty()) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private RecordUnionDiagnostics checkRecordUnion(BLangFieldBasedAccess fieldAccessExpr, Set<BType> memberTypes, Name fieldName) { RecordUnionDiagnostics recordUnionDiagnostics = new RecordUnionDiagnostics(); for (BType memberType : memberTypes) { BRecordType recordMember = (BRecordType) memberType; if (recordMember.getFields().containsKey(fieldName.getValue())) { BType individualFieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, recordMember); if (individualFieldType == symTable.semanticError) { recordUnionDiagnostics.optionalInRecords.add(recordMember); } } else { recordUnionDiagnostics.undeclaredInRecords.add(recordMember); } } return recordUnionDiagnostics; } private void logRhsFieldAccExprErrors(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BRecordType recordVarRefType = (BRecordType) varRefType; boolean isFieldDeclared = recordVarRefType.getFields().containsKey(fieldName.getValue()); if (isFieldDeclared) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.FIELD_ACCESS_CANNOT_BE_USED_TO_ACCESS_OPTIONAL_FIELDS); } else if (recordVarRefType.sealed) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_RECORD, fieldName, varRefType); } else { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_IN_RECORD_TYPE, fieldName, varRefType); } } else { LinkedHashSet<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); RecordUnionDiagnostics recUnionInfo = checkRecordUnion(fieldAccessExpr, memberTypes, fieldName); if (recUnionInfo.hasUndeclaredAndOptional()) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_AND_OPTIONAL_FIELDS_IN_UNION_OF_RECORDS, fieldName, recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords), recUnionInfo.recordsToString(recUnionInfo.optionalInRecords)); } else if (recUnionInfo.hasUndeclared()) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_UNION_OF_RECORDS, fieldName, recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords)); } else if (recUnionInfo.hasOptional()) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_IN_UNION_OF_RECORDS, fieldName, recUnionInfo.recordsToString(recUnionInfo.optionalInRecords)); } } } private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) { actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) { actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); if (actualType != symTable.semanticError) { fieldAccessExpr.originalType = actualType; return actualType; } if (!fieldAccessExpr.isLValue) { logRhsFieldAccExprErrors(fieldAccessExpr, varRefType, fieldName); return actualType; } actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType); } } else if (types.isLax(varRefType)) { if (fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT, varRefType); return symTable.semanticError; } if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } BType laxFieldAccessType = getLaxFieldAccessType(varRefType); actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.originalType = laxFieldAccessType; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE); } actualType = symTable.xmlType; fieldAccessExpr.originalType = actualType; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType); } return actualType; } private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr; String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value; BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix)); if (nsSymbol == symTable.notFoundSymbol) { dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsPrefixedFieldAccess.nsPrefix); } else if (nsSymbol.getKind() == SymbolKind.PACKAGE) { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst( nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value, (BPackageSymbol) nsSymbol, fieldAccessExpr.pos); } else { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol; } } private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) { return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType); } private BType getLaxFieldAccessType(BType exprType) { switch (exprType.tag) { case TypeTags.JSON: return symTable.jsonType; case TypeTags.XML: case TypeTags.XML_ELEMENT: return symTable.stringType; case TypeTags.MAP: return ((BMapType) exprType).constraint; case TypeTags.UNION: BUnionType unionType = (BUnionType) exprType; if (types.isSameType(symTable.jsonType, unionType)) { return symTable.jsonType; } LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType))); return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes); } return symTable.semanticError; } private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; boolean nillableExprType = false; BType effectiveType = varRefType; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); } } if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) { actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName); if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD, varRefType, fieldName); } fieldAccessExpr.nilSafeNavigation = nillableExprType; fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (types.isLax(effectiveType)) { BType laxFieldAccessType = getLaxFieldAccessType(effectiveType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType); } if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private boolean accessCouldResultInError(BType type) { if (type.tag == TypeTags.JSON) { return true; } if (type.tag == TypeTags.MAP) { return false; } if (type.tag == TypeTags.XML) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError); } else { return false; } } private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) { BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType()); boolean nillableExprType = false; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } if (nillableExprType) { varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); if (!types.isSubTypeOfMapping(varRefType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } } } } BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; BType actualType = symTable.semanticError; if (types.isSubTypeOfMapping(varRefType)) { checkExpr(indexExpr, this.env, symTable.stringType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType); if (actualType == symTable.semanticError) { if (indexExpr.getBType().tag == TypeTags.STRING && isConst(indexExpr)) { String fieldName = getConstFieldName(indexExpr); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD, fieldName, indexBasedAccessExpr.expr.getBType()); return actualType; } dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType()); return actualType; } indexBasedAccessExpr.nilSafeNavigation = nillableExprType; indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (types.isSubTypeOfList(varRefType)) { checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType); indexBasedAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { if (indexExpr.getBType().tag == TypeTags.INT && isConst(indexExpr)) { dlog.error(indexBasedAccessExpr.indexExpr.pos, DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr)); return actualType; } dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType()); return actualType; } } else if (types.isAssignable(varRefType, symTable.stringType)) { if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } indexBasedAccessExpr.originalType = symTable.charStringType; actualType = symTable.charStringType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (indexBasedAccessExpr.isLValue) { indexExpr.setBType(symTable.semanticError); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE); return actualType; } BType type = checkExpr(indexExpr, this.env, symTable.intType); if (type == symTable.semanticError) { return type; } indexBasedAccessExpr.originalType = varRefType; actualType = varRefType; } else if (varRefType.tag == TypeTags.TABLE) { if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS, varRefType); return symTable.semanticError; } BTableType tableType = (BTableType) indexBasedAccessExpr.expr.getBType(); BType keyTypeConstraint = tableType.keyTypeConstraint; if (tableType.keyTypeConstraint == null) { keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.getBType()). fieldNameList, ((BTableType) indexBasedAccessExpr.expr.getBType()).constraint); if (keyTypeConstraint == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE, indexBasedAccessExpr.expr); return symTable.semanticError; } } if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) { checkExpr(indexExpr, this.env, keyTypeConstraint); if (indexExpr.getBType() == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } else { List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr) indexBasedAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes; if (keyConstraintTypes.size() != multiKeyExpressionList.size()) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } for (int i = 0; i < multiKeyExpressionList.size(); i++) { BLangExpression keyExpr = multiKeyExpressionList.get(i); checkExpr(keyExpr, this.env, keyConstraintTypes.get(i)); if (keyExpr.getBType() == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } } if (expType.tag != TypeTags.NONE) { BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType); if (resultType == symTable.semanticError) { return symTable.semanticError; } } BType constraint = tableType.constraint; actualType = addNilForNillableAccessType(constraint); indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (varRefType == symTable.semanticError) { indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError); return symTable.semanticError; } else { indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } if (nillableExprType && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private Long getConstIndex(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value : (Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private String getConstFieldName(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value : (String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType, BArrayType arrayType) { BType actualType = symTable.semanticError; switch (indexExprType.tag) { case TypeTags.INT: BLangExpression indexExpr = indexBasedAccess.indexExpr; if (!isConst(indexExpr) || arrayType.state == BArrayState.OPEN) { actualType = arrayType.eType; break; } actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType; break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) indexExprType; boolean validIndexExists = false; for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); if (indexValue >= 0 && (arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) { validIndexExists = true; break; } } if (!validIndexExists) { return symTable.semanticError; } actualType = arrayType.eType; break; case TypeTags.UNION: List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(matchedType -> (BFiniteType) matchedType) .collect(Collectors.toList()); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType); if (elementType == symTable.semanticError) { return symTable.semanticError; } actualType = arrayType.eType; } return actualType; } private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.ARRAY) { return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type); } if (type.tag == TypeTags.TUPLE) { return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType()); } LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.INT: if (isConst(indexExpr)) { actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue()); } else { BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType(); LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>()); actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); } break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); BType fieldType = checkTupleFieldType(tuple, indexValue); if (fieldType.tag != TypeTags.SEMANTIC_ERROR) { possibleTypes.add(fieldType); } } if (possibleTypes.size() == 0) { return symTable.semanticError; } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) { tupleType.tupleTypes .forEach(memberType -> { if (memberType.tag == TypeTags.UNION) { collectMemberTypes((BUnionType) memberType, memberTypes); } else { memberTypes.add(memberType); } }); return memberTypes; } private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.MAP) { BType constraint = ((BMapType) type).constraint; return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint); } if (type.tag == TypeTags.RECORD) { return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType()); } BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.STRING: if (isConst(indexExpr)) { String fieldName = IdentifierUtils.escapeSpecialCharacters(getConstFieldName(indexExpr)); actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType != symTable.semanticError) { return actualType; } actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { return actualType; } if (actualType == symTable.neverType) { return actualType; } return addNilForNillableAccessType(actualType); } if (accessExpr.isLValue) { return actualType; } return addNilForNillableAccessType(actualType); } LinkedHashSet<BType> fieldTypes = record.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (record.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(record.restFieldType); } if (fieldTypes.stream().noneMatch(BType::isNullable)) { fieldTypes.add(symTable.nilType); } actualType = BUnionType.create(null, fieldTypes); break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { String fieldName = (String) ((BLangLiteral) finiteMember).value; BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); } if (fieldType != symTable.semanticError) { fieldType = addNilForNillableAccessType(fieldType); } } if (fieldType.tag == TypeTags.SEMANTIC_ERROR) { continue; } possibleTypes.add(fieldType); } if (possibleTypes.isEmpty()) { return symTable.semanticError; } if (possibleTypes.stream().noneMatch(BType::isNullable)) { possibleTypes.add(symTable.nilType); } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private List<BType> getTypesList(BType type) { if (type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) type; return new ArrayList<>(unionType.getMemberTypes()); } else { return Lists.of(type); } } private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType()); LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>(); for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { BType patternExprType = pattern.expr.getBType(); matchExprTypes.addAll(getTypesList(patternExprType)); if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) { return new LinkedHashSet<BType>() { { add(symTable.semanticError); } }; } assignable = this.types.isAssignable(type, pattern.variable.getBType()); if (assignable) { break; } } if (!assignable) { matchExprTypes.add(type); } } return matchExprTypes; } private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) { if (encounteredTypes.contains(type)) { return false; } encounteredTypes.add(type); switch (type.tag) { case TypeTags.UNION: for (BType bType1 : ((BUnionType) type).getMemberTypes()) { if (couldHoldTableValues(bType1, encounteredTypes)) { return true; } } return false; case TypeTags.MAP: return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes); case TypeTags.RECORD: BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (couldHoldTableValues(field.type, encounteredTypes)) { return true; } } return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes); case TypeTags.ARRAY: return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes); case TypeTags.TUPLE: for (BType bType : ((BTupleType) type).getTupleTypes()) { if (couldHoldTableValues(bType, encounteredTypes)) { return true; } } return false; } return false; } private boolean isConst(BLangExpression expression) { if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) { return true; } if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; } private Name getCurrentCompUnit(BLangNode node) { return names.fromString(node.pos.lineRange().filePath()); } private BType getRepresentativeBroadType(List<BType> inferredTypeList) { for (int i = 0; i < inferredTypeList.size(); i++) { BType type = inferredTypeList.get(i); if (type.tag == TypeTags.SEMANTIC_ERROR) { return type; } for (int j = i + 1; j < inferredTypeList.size(); j++) { BType otherType = inferredTypeList.get(j); if (otherType.tag == TypeTags.SEMANTIC_ERROR) { return otherType; } if (types.isAssignable(otherType, type)) { inferredTypeList.remove(j); j -= 1; continue; } if (types.isAssignable(type, otherType)) { inferredTypeList.remove(i); i -= 1; break; } } } if (inferredTypeList.size() == 1) { return inferredTypeList.get(0); } return BUnionType.create(null, inferredTypeList.toArray(new BType[0])); } private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>(); List<BType> restFieldTypes = new ArrayList<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValue.key; BLangExpression expression = keyValue.valueExpr; BLangExpression keyExpr = key.expr; if (key.computedKey) { checkExpr(keyExpr, env, symTable.stringType); BType exprType = checkExpr(expression, env, expType); if (isUniqueType(restFieldTypes, exprType)) { restFieldTypes.add(exprType); } } else { addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr), keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) : checkExpr(expression, env, expType), true, keyValue.readonly); } } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType); int typeTag = type.tag; if (typeTag == TypeTags.MAP) { BType constraintType = ((BMapType) type).constraint; if (isUniqueType(restFieldTypes, constraintType)) { restFieldTypes.add(constraintType); } } if (type.tag != TypeTags.RECORD) { continue; } BRecordType recordType = (BRecordType) type; for (BField recField : recordType.fields.values()) { addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type, !Symbols.isOptional(recField.symbol), false); } if (!recordType.sealed) { BType restFieldType = recordType.restFieldType; if (isUniqueType(restFieldTypes, restFieldType)) { restFieldTypes.add(restFieldType); } } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField), varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) : checkExpr(varNameField, env, expType), true, varNameField.readonly); } } LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); boolean allReadOnlyNonRestFields = true; for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) { FieldInfo fieldInfo = entry.getValue(); List<BType> types = fieldInfo.types; if (types.contains(symTable.semanticError)) { return symTable.semanticError; } String key = entry.getKey(); Name fieldName = names.fromString(key); BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0])); Set<Flag> flags = new HashSet<>(); if (fieldInfo.required) { flags.add(Flag.REQUIRED); } else { flags.add(Flag.OPTIONAL); } if (fieldInfo.readonly) { flags.add(Flag.READONLY); } else if (allReadOnlyNonRestFields) { allReadOnlyNonRestFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol, symTable.builtinPos, VIRTUAL); fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = fields; if (restFieldTypes.contains(symTable.semanticError)) { return symTable.semanticError; } if (restFieldTypes.isEmpty()) { recordType.sealed = true; recordType.restFieldType = symTable.noType; } else if (restFieldTypes.size() == 1) { recordType.restFieldType = restFieldTypes.get(0); } else { recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0])); } recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) { recordType.flags |= Flags.READONLY; recordSymbol.flags |= Flags.READONLY; } BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); return recordType; } private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location, SymbolOrigin origin) { BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)), pkgID, null, env.scope.owner, location, origin); BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false, symTable.builtinPos, VIRTUAL); initFuncSymbol.retType = symTable.nilType; recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, location); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); return recordSymbol; } private String getKeyName(BLangExpression key) { return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? ((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value; } private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString, BType exprType, boolean required, boolean readonly) { if (!nonRestFieldTypes.containsKey(keyString)) { nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required, readonly)); return; } FieldInfo fieldInfo = nonRestFieldTypes.get(keyString); List<BType> typeList = fieldInfo.types; if (isUniqueType(typeList, exprType)) { typeList.add(exprType); } if (required && !fieldInfo.required) { fieldInfo.required = true; } } private boolean isUniqueType(List<BType> typeList, BType type) { boolean isRecord = type.tag == TypeTags.RECORD; for (BType bType : typeList) { if (isRecord) { if (type == bType) { return false; } } else if (types.isSameType(type, bType)) { return false; } } return true; } private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType, BType expType) { if (expType == symTable.semanticError) { return expType; } boolean unionExpType = expType.tag == TypeTags.UNION; if (expType == mutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) { return mutableXmlSubType; } BXMLSubType immutableXmlSubType = (BXMLSubType) ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable, anonymousModelHelper, names); if (expType == immutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) { return immutableXmlSubType; } if (!unionExpType) { dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } List<BType> compatibleTypes = new ArrayList<>(); for (BType memberType : ((BUnionType) expType).getMemberTypes()) { if (compatibleTypes.contains(memberType)) { continue; } if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) { compatibleTypes.add(memberType); continue; } if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) { compatibleTypes.add(mutableXmlSubType); continue; } if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) { compatibleTypes.add(immutableXmlSubType); } } if (compatibleTypes.isEmpty()) { dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } if (compatibleTypes.size() == 1) { return compatibleTypes.get(0); } dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) { for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) { BType childType = modifiedChild.getBType(); if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) { continue; } modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types, (SelectivelyImmutableReferenceType) childType, env, symTable, anonymousModelHelper, names)); if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) { markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild); } } } private void logUndefinedSymbolError(Location pos, String name) { if (!missingNodesHelper.isMissingNode(name)) { dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name); } } private void markTypeAsIsolated(BType actualType) { actualType.flags |= Flags.ISOLATED; actualType.tsymbol.flags |= Flags.ISOLATED; } private boolean isObjectConstructorExpr(BLangTypeInit cIExpr, BType actualType) { return cIExpr.getType() != null && Symbols.isFlagOn(actualType.tsymbol.flags, Flags.ANONYMOUS); } private BLangClassDefinition getClassDefinitionForObjectConstructorExpr(BLangTypeInit cIExpr, SymbolEnv env) { List<BLangClassDefinition> classDefinitions = env.enclPkg.classDefinitions; BLangUserDefinedType userDefinedType = (BLangUserDefinedType) cIExpr.getType(); BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(userDefinedType.pos, env, names.fromIdNode(userDefinedType.pkgAlias), names.fromIdNode(userDefinedType.typeName)); for (BLangClassDefinition classDefinition : classDefinitions) { if (classDefinition.symbol == symbol) { return classDefinition; } } return null; } private void handleObjectConstrExprForReadOnly(BLangTypeInit cIExpr, BObjectType actualObjectType, BLangClassDefinition classDefForConstructor, SymbolEnv env, boolean logErrors) { boolean hasNeverReadOnlyField = false; for (BField field : actualObjectType.fields.values()) { BType fieldType = field.type; if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) { analyzeObjectConstructor(classDefForConstructor, env); hasNeverReadOnlyField = true; if (!logErrors) { return; } dlog.error(field.pos, DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE, fieldType); } } if (hasNeverReadOnlyField) { return; } classDefForConstructor.flagSet.add(Flag.READONLY); actualObjectType.flags |= Flags.READONLY; actualObjectType.tsymbol.flags |= Flags.READONLY; ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types, anonymousModelHelper, symTable, names, cIExpr.pos); analyzeObjectConstructor(classDefForConstructor, env); } private void markConstructedObjectIsolatedness(BObjectType actualObjectType) { if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) { markTypeAsIsolated(actualObjectType); return; } for (BField field : actualObjectType.fields.values()) { if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) || !types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) { return; } } markTypeAsIsolated(actualObjectType); } private void markLeafNode(BLangAccessExpression accessExpression) { BLangNode parent = accessExpression.parent; if (parent == null) { accessExpression.leafNode = true; return; } NodeKind kind = parent.getKind(); while (kind == NodeKind.GROUP_EXPR) { parent = parent.parent; if (parent == null) { accessExpression.leafNode = true; break; } kind = parent.getKind(); } if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) { accessExpression.leafNode = true; } } private static class FieldInfo { List<BType> types; boolean required; boolean readonly; private FieldInfo(List<BType> types, boolean required, boolean readonly) { this.types = types; this.required = required; this.readonly = readonly; } } private static class TypeSymbolPair { private BVarSymbol fieldSymbol; private BType determinedType; public TypeSymbolPair(BVarSymbol fieldSymbol, BType determinedType) { this.fieldSymbol = fieldSymbol; this.determinedType = determinedType; } } private static class RecordUnionDiagnostics { Set<BRecordType> undeclaredInRecords = new LinkedHashSet<>(); Set<BRecordType> optionalInRecords = new LinkedHashSet<>(); boolean hasUndeclaredAndOptional() { return undeclaredInRecords.size() > 0 && optionalInRecords.size() > 0; } boolean hasUndeclared() { return undeclaredInRecords.size() > 0; } boolean hasOptional() { return optionalInRecords.size() > 0; } String recordsToString(Set<BRecordType> recordTypeSet) { StringBuilder recordNames = new StringBuilder(); int recordSetSize = recordTypeSet.size(); int index = 0; for (BRecordType recordType : recordTypeSet) { index++; recordNames.append(recordType.tsymbol.getName().getValue()); if (recordSetSize > 1) { if (index == recordSetSize - 1) { recordNames.append("', and '"); } else if (index < recordSetSize) { recordNames.append("', '"); } } } return recordNames.toString(); } } }
Ah, I forgot to leave an explanation about this. I didn't squash the original commits to make it easy to continue reviewing. Actually I have extracted the "else if ... else" to a helper method named `getCheckpointException`. But it does happen in fixup commit but in the original commit "[FLINK-13905][checkpointing] Separate checkpoint triggering into several asynchronous stages.". I thought it's enough as a helper method. And thanks for the reminding to keep per fixup as a separate commit. I'll try to separate the last big fixup commit :)
private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException { final long checkpointId = pendingCheckpoint.getCheckpointId(); final CompletedCheckpoint completedCheckpoint; Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates(); sharedStateRegistry.registerAll(operatorStates.values()); try { try { completedCheckpoint = pendingCheckpoint.finalizeCheckpoint(); failureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId()); } catch (Exception e1) { if (!pendingCheckpoint.isDiscarded()) { abortPendingCheckpoint( pendingCheckpoint, new CheckpointException( CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1)); } throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1); } Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null); try { completedCheckpointStore.addCheckpoint(completedCheckpoint); } catch (Exception exception) { executor.execute(new Runnable() { @Override public void run() { try { completedCheckpoint.discardOnFailedStoring(); } catch (Throwable t) { LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t); } } }); throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception); } } finally { pendingCheckpoints.remove(checkpointId); triggerQueuedRequests(); } rememberRecentCheckpointId(checkpointId); dropSubsumedCheckpoints(checkpointId); lastCheckpointCompletionRelativeTime = clock.relativeTimeMillis(); LOG.info("Completed checkpoint {} for job {} ({} bytes in {} ms).", checkpointId, job, completedCheckpoint.getStateSize(), completedCheckpoint.getDuration()); if (LOG.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("Checkpoint state: "); for (OperatorState state : completedCheckpoint.getOperatorStates().values()) { builder.append(state); builder.append(", "); } builder.setLength(builder.length() - 2); LOG.debug(builder.toString()); } final long timestamp = completedCheckpoint.getTimestamp(); for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointComplete(checkpointId, timestamp); } } }
}
private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException { final long checkpointId = pendingCheckpoint.getCheckpointId(); final CompletedCheckpoint completedCheckpoint; Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates(); sharedStateRegistry.registerAll(operatorStates.values()); try { try { completedCheckpoint = pendingCheckpoint.finalizeCheckpoint(); failureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId()); } catch (Exception e1) { if (!pendingCheckpoint.isDiscarded()) { abortPendingCheckpoint( pendingCheckpoint, new CheckpointException( CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1)); } throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1); } Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null); try { completedCheckpointStore.addCheckpoint(completedCheckpoint); } catch (Exception exception) { executor.execute(new Runnable() { @Override public void run() { try { completedCheckpoint.discardOnFailedStoring(); } catch (Throwable t) { LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t); } } }); throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception); } } finally { pendingCheckpoints.remove(checkpointId); resumePeriodicTriggering(); } rememberRecentCheckpointId(checkpointId); dropSubsumedCheckpoints(checkpointId); lastCheckpointCompletionRelativeTime = clock.relativeTimeMillis(); LOG.info("Completed checkpoint {} for job {} ({} bytes in {} ms).", checkpointId, job, completedCheckpoint.getStateSize(), completedCheckpoint.getDuration()); if (LOG.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("Checkpoint state: "); for (OperatorState state : completedCheckpoint.getOperatorStates().values()) { builder.append(state); builder.append(", "); } builder.setLength(builder.length() - 2); LOG.debug(builder.toString()); } final long timestamp = completedCheckpoint.getTimestamp(); for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointComplete(checkpointId, timestamp); } } }
class CheckpointCoordinator { private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class); /** The number of recent checkpoints whose IDs are remembered. */ private static final int NUM_GHOST_CHECKPOINT_IDS = 16; /** Coordinator-wide lock to safeguard the checkpoint updates. */ private final Object lock = new Object(); /** The job whose checkpoint this coordinator coordinates. */ private final JobID job; /** Default checkpoint properties. **/ private final CheckpointProperties checkpointProperties; /** The executor used for asynchronous calls, like potentially blocking I/O. */ private final Executor executor; /** Tasks who need to be sent a message when a checkpoint is started. */ private final ExecutionVertex[] tasksToTrigger; /** Tasks who need to acknowledge a checkpoint before it succeeds. */ private final ExecutionVertex[] tasksToWaitFor; /** Tasks who need to be sent a message when a checkpoint is confirmed. */ private final ExecutionVertex[] tasksToCommitTo; /** Map from checkpoint ID to the pending checkpoint. */ private final Map<Long, PendingCheckpoint> pendingCheckpoints; /** Completed checkpoints. Implementations can be blocking. Make sure calls to methods * accessing this don't block the job manager actor and run asynchronously. */ private final CompletedCheckpointStore completedCheckpointStore; /** The root checkpoint state backend, which is responsible for initializing the * checkpoint, storing the metadata, and cleaning up the checkpoint. */ private final CheckpointStorageCoordinatorView checkpointStorage; /** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */ private final ArrayDeque<Long> recentPendingCheckpoints; /** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these * need to be ascending across job managers. */ private final CheckpointIDCounter checkpointIdCounter; /** The base checkpoint interval. Actual trigger time may be affected by the * max concurrent checkpoints and minimum-pause values */ private final long baseInterval; /** The max time (in ms) that a checkpoint may take. */ private final long checkpointTimeout; /** The min time(in ms) to delay after a checkpoint could be triggered. Allows to * enforce minimum processing time between checkpoint attempts */ private final long minPauseBetweenCheckpoints; /** The maximum number of checkpoints that may be in progress at the same time. */ private final int maxConcurrentCheckpointAttempts; /** The timer that handles the checkpoint timeouts and triggers periodic checkpoints. * It must be single-threaded. Eventually it will be replaced by main thread executor. */ private final ScheduledExecutor timer; /** The master checkpoint hooks executed by this checkpoint coordinator. */ private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks; /** Actor that receives status updates from the execution graph this coordinator works for. */ private JobStatusListener jobStatusListener; /** The number of consecutive failed trigger attempts. */ private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0); /** A handle to the current periodic trigger, to cancel it when necessary. */ private ScheduledFuture<?> currentPeriodicTrigger; /** The timestamp (via {@link Clock * completed. */ private long lastCheckpointCompletionRelativeTime; /** Flag whether a triggered checkpoint should immediately schedule the next checkpoint. * Non-volatile, because only accessed in synchronized scope */ private boolean periodicScheduling; /** Flag whether a trigger request could not be handled immediately. Non-volatile, because only * accessed in synchronized scope */ private boolean triggerRequestQueued; /** Flag marking the coordinator as shut down (not accepting any messages any more). */ private volatile boolean shutdown; /** Optional tracker for checkpoint statistics. */ @Nullable private CheckpointStatsTracker statsTracker; /** A factory for SharedStateRegistry objects. */ private final SharedStateRegistryFactory sharedStateRegistryFactory; /** Registry that tracks state which is shared across (incremental) checkpoints. */ private SharedStateRegistry sharedStateRegistry; private boolean isPreferCheckpointForRecovery; private final CheckpointFailureManager failureManager; private final Clock clock; public CheckpointCoordinator( JobID job, CheckpointCoordinatorConfiguration chkConfig, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, ScheduledExecutor timer, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager) { this( job, chkConfig, tasksToTrigger, tasksToWaitFor, tasksToCommitTo, checkpointIDCounter, completedCheckpointStore, checkpointStateBackend, executor, timer, sharedStateRegistryFactory, failureManager, SystemClock.getInstance()); } @VisibleForTesting public CheckpointCoordinator( JobID job, CheckpointCoordinatorConfiguration chkConfig, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, ScheduledExecutor timer, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager, Clock clock) { checkNotNull(checkpointStateBackend); long minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints(); if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) { minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000; } long baseInterval = chkConfig.getCheckpointInterval(); if (baseInterval < minPauseBetweenCheckpoints) { baseInterval = minPauseBetweenCheckpoints; } this.job = checkNotNull(job); this.baseInterval = baseInterval; this.checkpointTimeout = chkConfig.getCheckpointTimeout(); this.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints; this.maxConcurrentCheckpointAttempts = chkConfig.getMaxConcurrentCheckpoints(); this.tasksToTrigger = checkNotNull(tasksToTrigger); this.tasksToWaitFor = checkNotNull(tasksToWaitFor); this.tasksToCommitTo = checkNotNull(tasksToCommitTo); this.pendingCheckpoints = new LinkedHashMap<>(); this.checkpointIdCounter = checkNotNull(checkpointIDCounter); this.completedCheckpointStore = checkNotNull(completedCheckpointStore); this.executor = checkNotNull(executor); this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory); this.sharedStateRegistry = sharedStateRegistryFactory.create(executor); this.isPreferCheckpointForRecovery = chkConfig.isPreferCheckpointForRecovery(); this.failureManager = checkNotNull(failureManager); this.clock = checkNotNull(clock); this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS); this.masterHooks = new HashMap<>(); this.timer = timer; this.checkpointProperties = CheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy()); try { this.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job); checkpointStorage.initializeBaseLocations(); } catch (IOException e) { throw new FlinkRuntimeException("Failed to create checkpoint storage at checkpoint coordinator side.", e); } try { checkpointIDCounter.start(); } catch (Throwable t) { throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t); } } /** * Adds the given master hook to the checkpoint coordinator. This method does nothing, if * the checkpoint coordinator already contained a hook with the same ID (as defined via * {@link MasterTriggerRestoreHook * * @param hook The hook to add. * @return True, if the hook was added, false if the checkpoint coordinator already * contained a hook with the same ID. */ public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) { checkNotNull(hook); final String id = hook.getIdentifier(); checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id"); synchronized (lock) { if (!masterHooks.containsKey(id)) { masterHooks.put(id, hook); return true; } else { return false; } } } /** * Gets the number of currently register master hooks. */ public int getNumberOfRegisteredMasterHooks() { synchronized (lock) { return masterHooks.size(); } } /** * Sets the checkpoint stats tracker. * * @param statsTracker The checkpoint stats tracker. */ public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) { this.statsTracker = statsTracker; } /** * Shuts down the checkpoint coordinator. * * <p>After this method has been called, the coordinator does not accept * and further messages and cannot trigger any further checkpoints. */ public void shutdown(JobStatus jobStatus) throws Exception { synchronized (lock) { if (!shutdown) { shutdown = true; LOG.info("Stopping checkpoint coordinator for job {}.", job); periodicScheduling = false; triggerRequestQueued = false; MasterHooks.close(masterHooks.values(), LOG); masterHooks.clear(); abortPendingCheckpoints( new CheckpointException( CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN)); completedCheckpointStore.shutdown(jobStatus); checkpointIdCounter.shutdown(jobStatus); } } } public boolean isShutdown() { return shutdown; } /** * Triggers a savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSavepoint( final long timestamp, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSavepoint(); return triggerSavepointInternal(timestamp, properties, false, targetLocation); } /** * Triggers a synchronous savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline * to fire any registered event-time timers. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSynchronousSavepoint( final long timestamp, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSyncSavepoint(); return triggerSavepointInternal(timestamp, properties, advanceToEndOfEventTime, targetLocation); } private CompletableFuture<CompletedCheckpoint> triggerSavepointInternal( final long timestamp, final CheckpointProperties checkpointProperties, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { checkNotNull(checkpointProperties); final CompletableFuture<CompletedCheckpoint> resultFuture = new CompletableFuture<>(); timer.execute(() -> { try { triggerCheckpoint( timestamp, checkpointProperties, targetLocation, false, advanceToEndOfEventTime). whenComplete((completedCheckpoint, throwable) -> { if (throwable == null) { resultFuture.complete(completedCheckpoint); } else { resultFuture.completeExceptionally(throwable); } }); } catch (CheckpointException e) { Throwable cause = new CheckpointException("Failed to trigger savepoint.", e.getCheckpointFailureReason()); resultFuture.completeExceptionally(cause); } }); return resultFuture; } /** * Triggers a new standard checkpoint and uses the given timestamp as the checkpoint * timestamp. The return value is a future. It completes when the checkpoint triggered finishes * or an error occurred. * * @param timestamp The timestamp for the checkpoint. * @param isPeriodic Flag indicating whether this triggered checkpoint is * periodic. If this flag is true, but the periodic scheduler is disabled, * the checkpoint will be declined. * @return a future to the completed checkpoint. */ public CompletableFuture<CompletedCheckpoint> triggerCheckpoint(long timestamp, boolean isPeriodic) { try { return triggerCheckpoint(timestamp, checkpointProperties, null, isPeriodic, false); } catch (CheckpointException e) { long latestGeneratedCheckpointId = getCheckpointIdCounter().get(); failureManager.handleJobLevelCheckpointException(e, -1 * latestGeneratedCheckpointId); return FutureUtils.completedExceptionally(e); } } @VisibleForTesting public CompletableFuture<CompletedCheckpoint> triggerCheckpoint( long timestamp, CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime) throws CheckpointException { if (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) { throw new IllegalArgumentException("Only synchronous savepoints are allowed to advance the watermark to MAX."); } synchronized (lock) { preCheckBeforeTriggeringCheckpoint(isPeriodic, props.forceCheckpoint()); } Execution[] executions = new Execution[tasksToTrigger.length]; for (int i = 0; i < tasksToTrigger.length; i++) { Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt(); if (ee == null) { LOG.info("Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job); throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } else if (ee.getState() == ExecutionState.RUNNING) { executions[i] = ee; } else { LOG.info("Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job, ExecutionState.RUNNING, ee.getState()); throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length); for (ExecutionVertex ev : tasksToWaitFor) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ackTasks.put(ee.getAttemptId(), ev); } else { LOG.info("Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.", ev.getTaskNameWithSubtaskIndex(), job); throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } final CheckpointStorageLocation checkpointStorageLocation; final long checkpointID; try { checkpointID = checkpointIdCounter.getAndIncrement(); checkpointStorageLocation = props.isSavepoint() ? checkpointStorage.initializeLocationForSavepoint(checkpointID, externalSavepointLocation) : checkpointStorage.initializeLocationForCheckpoint(checkpointID); } catch (Throwable t) { int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint for job {} ({} consecutive failed attempts so far).", job, numUnsuccessful, t); throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t); } final PendingCheckpoint checkpoint = new PendingCheckpoint( job, checkpointID, timestamp, ackTasks, masterHooks.keySet(), props, checkpointStorageLocation, executor); if (statsTracker != null) { PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint( checkpointID, timestamp, props); checkpoint.setStatsCallback(callback); } final Runnable canceller = () -> { synchronized (lock) { if (!checkpoint.isDiscarded()) { LOG.info("Checkpoint {} of job {} expired before completing.", checkpointID, job); abortPendingCheckpoint( checkpoint, new CheckpointException(CheckpointFailureReason.CHECKPOINT_EXPIRED)); } } }; try { synchronized (lock) { preCheckBeforeTriggeringCheckpoint(isPeriodic, props.forceCheckpoint()); LOG.info("Triggering checkpoint {} @ {} for job {}.", checkpointID, timestamp, job); pendingCheckpoints.put(checkpointID, checkpoint); ScheduledFuture<?> cancellerHandle = timer.schedule( canceller, checkpointTimeout, TimeUnit.MILLISECONDS); if (!checkpoint.setCancellerHandle(cancellerHandle)) { cancellerHandle.cancel(false); } for (MasterTriggerRestoreHook<?> masterHook : masterHooks.values()) { final MasterState masterState = MasterHooks.triggerHook(masterHook, checkpointID, timestamp, executor) .get(checkpointTimeout, TimeUnit.MILLISECONDS); checkpoint.acknowledgeMasterState(masterHook.getIdentifier(), masterState); } Preconditions.checkState(checkpoint.areMasterStatesFullyAcknowledged()); } final CheckpointOptions checkpointOptions = new CheckpointOptions( props.getCheckpointType(), checkpointStorageLocation.getLocationReference()); for (Execution execution: executions) { if (props.isSynchronous()) { execution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime); } else { execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions); } } numUnsuccessfulCheckpointsTriggers.set(0); return checkpoint.getCompletionFuture(); } catch (Throwable t) { int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)", checkpointID, job, numUnsuccessful, t); synchronized (lock) { if (!checkpoint.isDiscarded()) { abortPendingCheckpoint( checkpoint, new CheckpointException( CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, t)); } } if (t instanceof CheckpointException) { throw (CheckpointException) t; } throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t); } } /** * Receives a {@link DeclineCheckpoint} message for a pending checkpoint. * * @param message Checkpoint decline from the task manager * @param taskManagerLocationInfo The location info of the decline checkpoint message's sender */ public void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) { if (shutdown || message == null) { return; } if (!job.equals(message.getJob())) { throw new IllegalArgumentException("Received DeclineCheckpoint message for job " + message.getJob() + " from " + taskManagerLocationInfo + " while this coordinator handles job " + job); } final long checkpointId = message.getCheckpointId(); final String reason = (message.getReason() != null ? message.getReason().getMessage() : ""); PendingCheckpoint checkpoint; synchronized (lock) { if (shutdown) { return; } checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null) { Preconditions.checkState( !checkpoint.isDiscarded(), "Received message for discarded but non-removed checkpoint " + checkpointId); LOG.info("Decline checkpoint {} by task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo); final CheckpointException checkpointException; if (message.getReason() == null) { checkpointException = new CheckpointException(CheckpointFailureReason.CHECKPOINT_DECLINED); } else if (message.getReason() instanceof CheckpointException) { checkpointException = (CheckpointException) message.getReason(); } else { checkpointException = new CheckpointException( CheckpointFailureReason.JOB_FAILURE, message.getReason()); } abortPendingCheckpoint( checkpoint, checkpointException, message.getTaskExecutionId()); } else if (LOG.isDebugEnabled()) { if (recentPendingCheckpoints.contains(checkpointId)) { LOG.debug("Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } else { LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } } } } /** * Receives an AcknowledgeCheckpoint message and returns whether the * message was associated with a pending checkpoint. * * @param message Checkpoint ack from the task manager * * @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender * @return Flag indicating whether the ack'd checkpoint was associated * with a pending checkpoint. * * @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store. */ public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message, String taskManagerLocationInfo) throws CheckpointException { if (shutdown || message == null) { return false; } if (!job.equals(message.getJob())) { LOG.error("Received wrong AcknowledgeCheckpoint message for job {} from {} : {}", job, taskManagerLocationInfo, message); return false; } final long checkpointId = message.getCheckpointId(); synchronized (lock) { if (shutdown) { return false; } final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) { case SUCCESS: LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); if (checkpoint.areTasksFullyAcknowledged()) { completePendingCheckpoint(checkpoint); } break; case DUPLICATE: LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); break; case UNKNOWN: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the task's execution attempt id was unknown. Discarding " + "the state handle to avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); break; case DISCARDED: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the pending checkpoint had been discarded. Discarding the " + "state handle tp avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); } return true; } else if (checkpoint != null) { throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else { boolean wasPendingCheckpoint; if (recentPendingCheckpoints.contains(checkpointId)) { wasPendingCheckpoint = true; LOG.warn("Received late message for now expired checkpoint attempt {} from task " + "{} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); } else { LOG.debug("Received message for an unknown checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); wasPendingCheckpoint = false; } discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); return wasPendingCheckpoint; } } } /** * Try to complete the given pending checkpoint. * * <p>Important: This method should only be called in the checkpoint lock scope. * * @param pendingCheckpoint to complete * @throws CheckpointException if the completion failed */ /** * Fails all pending checkpoints which have not been acknowledged by the given execution * attempt id. * * @param executionAttemptId for which to discard unacknowledged pending checkpoints * @param cause of the failure */ public void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) { synchronized (lock) { final PendingCheckpoint[] pendingCheckpointsToFail = pendingCheckpoints .values() .stream() .filter( pendingCheckpoint -> !pendingCheckpoint.isAcknowledgedBy(executionAttemptId)) .toArray(PendingCheckpoint[]::new); abortPendingCheckpoints( pendingCheckpointsToFail, new CheckpointException(CheckpointFailureReason.TASK_FAILURE, cause)); } } private void rememberRecentCheckpointId(long id) { if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) { recentPendingCheckpoints.removeFirst(); } recentPendingCheckpoints.addLast(id); } private void dropSubsumedCheckpoints(long checkpointId) { PendingCheckpoint[] checkpointsToSubsume = pendingCheckpoints .values() .stream() .filter( pendingCheckpoint -> pendingCheckpoint.getCheckpointId() < checkpointId && pendingCheckpoint.canBeSubsumed()) .toArray(PendingCheckpoint[]::new); abortPendingCheckpoints( checkpointsToSubsume, new CheckpointException(CheckpointFailureReason.CHECKPOINT_SUBSUMED)); } /** * Triggers the queued request, if there is one. * * <p>NOTE: The caller of this method must hold the lock when invoking the method! */ private void triggerQueuedRequests() { if (triggerRequestQueued) { triggerRequestQueued = false; if (periodicScheduling) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); } currentPeriodicTrigger = scheduleTriggerWithDelay(0L); } else { timer.execute(new ScheduledTrigger()); } } } /** * Restores the latest checkpointed state. * * @param tasks Map of job vertices to restore. State for these vertices is * restored via {@link Execution * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ @Deprecated public boolean restoreLatestCheckpointedState( Map<JobVertexID, ExecutionJobVertex> tasks, boolean errorIfNoCheckpoint, boolean allowNonRestoredState) throws Exception { return restoreLatestCheckpointedState(new HashSet<>(tasks.values()), errorIfNoCheckpoint, allowNonRestoredState); } /** * Restores the latest checkpointed state. * * @param tasks Set of job vertices to restore. State for these vertices is * restored via {@link Execution * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedState( final Set<ExecutionJobVertex> tasks, final boolean errorIfNoCheckpoint, final boolean allowNonRestoredState) throws Exception { synchronized (lock) { if (shutdown) { throw new IllegalStateException("CheckpointCoordinator is shut down"); } sharedStateRegistry.close(); sharedStateRegistry = sharedStateRegistryFactory.create(executor); completedCheckpointStore.recover(); for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) { completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry); } LOG.debug("Status of the shared state registry of job {} after restore: {}.", job, sharedStateRegistry); CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(isPreferCheckpointForRecovery); if (latest == null) { if (errorIfNoCheckpoint) { throw new IllegalStateException("No completed checkpoint available"); } else { LOG.debug("Resetting the master hooks."); MasterHooks.reset(masterHooks.values(), LOG); return false; } } LOG.info("Restoring job {} from latest valid checkpoint: {}.", job, latest); final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates(); StateAssignmentOperation stateAssignmentOperation = new StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState); stateAssignmentOperation.assignStates(); MasterHooks.restoreMasterHooks( masterHooks, latest.getMasterHookStates(), latest.getCheckpointID(), allowNonRestoredState, LOG); if (statsTracker != null) { long restoreTimestamp = System.currentTimeMillis(); RestoredCheckpointStats restored = new RestoredCheckpointStats( latest.getCheckpointID(), latest.getProperties(), restoreTimestamp, latest.getExternalPointer()); statsTracker.reportRestoredCheckpoint(restored); } return true; } } /** * Restore the state with given savepoint. * * @param savepointPointer The pointer to the savepoint. * @param allowNonRestored True if allowing checkpoint state that cannot be * mapped to any job vertex in tasks. * @param tasks Map of job vertices to restore. State for these * vertices is restored via * {@link Execution * @param userClassLoader The class loader to resolve serialized classes in * legacy savepoint versions. */ public boolean restoreSavepoint( String savepointPointer, boolean allowNonRestored, Map<JobVertexID, ExecutionJobVertex> tasks, ClassLoader userClassLoader) throws Exception { Preconditions.checkNotNull(savepointPointer, "The savepoint path cannot be null."); LOG.info("Starting job {} from savepoint {} ({})", job, savepointPointer, (allowNonRestored ? "allowing non restored state" : "")); final CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer); CompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint( job, tasks, checkpointLocation, userClassLoader, allowNonRestored); completedCheckpointStore.addCheckpoint(savepoint); long nextCheckpointId = savepoint.getCheckpointID() + 1; checkpointIdCounter.setCount(nextCheckpointId); LOG.info("Reset the checkpoint ID of job {} to {}.", job, nextCheckpointId); return restoreLatestCheckpointedState(new HashSet<>(tasks.values()), true, allowNonRestored); } public int getNumberOfPendingCheckpoints() { return this.pendingCheckpoints.size(); } public int getNumberOfRetainedSuccessfulCheckpoints() { synchronized (lock) { return completedCheckpointStore.getNumberOfRetainedCheckpoints(); } } public Map<Long, PendingCheckpoint> getPendingCheckpoints() { synchronized (lock) { return new HashMap<>(this.pendingCheckpoints); } } public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception { synchronized (lock) { return completedCheckpointStore.getAllCheckpoints(); } } public CheckpointStorageCoordinatorView getCheckpointStorage() { return checkpointStorage; } public CompletedCheckpointStore getCheckpointStore() { return completedCheckpointStore; } public CheckpointIDCounter getCheckpointIdCounter() { return checkpointIdCounter; } public long getCheckpointTimeout() { return checkpointTimeout; } @VisibleForTesting boolean isCurrentPeriodicTriggerAvailable() { return currentPeriodicTrigger != null; } /** * Returns whether periodic checkpointing has been configured. * * @return <code>true</code> if periodic checkpoints have been configured. */ public boolean isPeriodicCheckpointingConfigured() { return baseInterval != Long.MAX_VALUE; } public void startCheckpointScheduler() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } stopCheckpointScheduler(); periodicScheduling = true; currentPeriodicTrigger = scheduleTriggerWithDelay(getRandomInitDelay()); } } public void stopCheckpointScheduler() { synchronized (lock) { triggerRequestQueued = false; periodicScheduling = false; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } abortPendingCheckpoints( new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND)); numUnsuccessfulCheckpointsTriggers.set(0); } } /** * Aborts all the pending checkpoints due to en exception. * @param exception The exception. */ public void abortPendingCheckpoints(CheckpointException exception) { synchronized (lock) { abortPendingCheckpoints( pendingCheckpoints.values().toArray(new PendingCheckpoint[0]), exception); } } private void abortPendingCheckpoints( PendingCheckpoint[] checkpoints, CheckpointException exception) { assert Thread.holdsLock(lock); for (PendingCheckpoint pendingCheckpoint : checkpoints) { abortPendingCheckpoint(pendingCheckpoint, exception); } } /** * If too many checkpoints are currently in progress, we need to mark that a request is queued. * * @throws CheckpointException If too many checkpoints are currently in progress. */ private void checkConcurrentCheckpoints() throws CheckpointException { if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { triggerRequestQueued = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } } /** * Make sure the minimum interval between checkpoints has passed. * * @throws CheckpointException If the minimum interval between checkpoints has not passed. */ private void checkMinPauseBetweenCheckpoints() throws CheckpointException { final long nextCheckpointTriggerRelativeTime = lastCheckpointCompletionRelativeTime + minPauseBetweenCheckpoints; final long durationTillNextMillis = nextCheckpointTriggerRelativeTime - clock.relativeTimeMillis(); if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } currentPeriodicTrigger = scheduleTriggerWithDelay(durationTillNextMillis); throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } private long getRandomInitDelay() { return ThreadLocalRandom.current().nextLong(minPauseBetweenCheckpoints, baseInterval + 1L); } private ScheduledFuture<?> scheduleTriggerWithDelay(long initDelay) { return timer.scheduleAtFixedRate( new ScheduledTrigger(), initDelay, baseInterval, TimeUnit.MILLISECONDS); } public JobStatusListener createActivatorDeactivator() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } if (jobStatusListener == null) { jobStatusListener = new CheckpointCoordinatorDeActivator(this); } return jobStatusListener; } } private final class ScheduledTrigger implements Runnable { @Override public void run() { try { triggerCheckpoint(System.currentTimeMillis(), true); } catch (Exception e) { LOG.error("Exception while triggering checkpoint for job {}.", job, e); } } } /** * Discards the given state object asynchronously belonging to the given job, execution attempt * id and checkpoint id. * * @param jobId identifying the job to which the state object belongs * @param executionAttemptID identifying the task to which the state object belongs * @param checkpointId of the state object * @param subtaskState to discard asynchronously */ private void discardSubtaskState( final JobID jobId, final ExecutionAttemptID executionAttemptID, final long checkpointId, final TaskStateSnapshot subtaskState) { if (subtaskState != null) { executor.execute(new Runnable() { @Override public void run() { try { subtaskState.discardState(); } catch (Throwable t2) { LOG.warn("Could not properly discard state object of checkpoint {} " + "belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2); } } }); } } private void abortPendingCheckpoint( PendingCheckpoint pendingCheckpoint, CheckpointException exception) { abortPendingCheckpoint(pendingCheckpoint, exception, null); } private void abortPendingCheckpoint( PendingCheckpoint pendingCheckpoint, CheckpointException exception, @Nullable final ExecutionAttemptID executionAttemptID) { assert(Thread.holdsLock(lock)); if (!pendingCheckpoint.isDiscarded()) { try { pendingCheckpoint.abort( exception.getCheckpointFailureReason(), exception.getCause()); if (pendingCheckpoint.getProps().isSavepoint() && pendingCheckpoint.getProps().isSynchronous()) { failureManager.handleSynchronousSavepointFailure(exception); } else if (executionAttemptID != null) { failureManager.handleTaskLevelCheckpointException( exception, pendingCheckpoint.getCheckpointId(), executionAttemptID); } else { failureManager.handleJobLevelCheckpointException( exception, pendingCheckpoint.getCheckpointId()); } } finally { pendingCheckpoints.remove(pendingCheckpoint.getCheckpointId()); rememberRecentCheckpointId(pendingCheckpoint.getCheckpointId()); triggerQueuedRequests(); } } } private void preCheckBeforeTriggeringCheckpoint(boolean isPeriodic, boolean forceCheckpoint) throws CheckpointException { if (shutdown) { throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } if (isPeriodic && !periodicScheduling) { throw new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN); } if (!forceCheckpoint) { if (triggerRequestQueued) { LOG.warn("Trying to trigger another checkpoint for job {} while one was queued already.", job); throw new CheckpointException(CheckpointFailureReason.ALREADY_QUEUED); } checkConcurrentCheckpoints(); checkMinPauseBetweenCheckpoints(); } } }
class CheckpointCoordinator { private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class); /** The number of recent checkpoints whose IDs are remembered. */ private static final int NUM_GHOST_CHECKPOINT_IDS = 16; /** Coordinator-wide lock to safeguard the checkpoint updates. */ private final Object lock = new Object(); /** The job whose checkpoint this coordinator coordinates. */ private final JobID job; /** Default checkpoint properties. **/ private final CheckpointProperties checkpointProperties; /** The executor used for asynchronous calls, like potentially blocking I/O. */ private final Executor executor; /** Tasks who need to be sent a message when a checkpoint is started. */ private final ExecutionVertex[] tasksToTrigger; /** Tasks who need to acknowledge a checkpoint before it succeeds. */ private final ExecutionVertex[] tasksToWaitFor; /** Tasks who need to be sent a message when a checkpoint is confirmed. */ private final ExecutionVertex[] tasksToCommitTo; /** Map from checkpoint ID to the pending checkpoint. */ private final Map<Long, PendingCheckpoint> pendingCheckpoints; /** Completed checkpoints. Implementations can be blocking. Make sure calls to methods * accessing this don't block the job manager actor and run asynchronously. */ private final CompletedCheckpointStore completedCheckpointStore; /** The root checkpoint state backend, which is responsible for initializing the * checkpoint, storing the metadata, and cleaning up the checkpoint. */ private final CheckpointStorageCoordinatorView checkpointStorage; /** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */ private final ArrayDeque<Long> recentPendingCheckpoints; /** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these * need to be ascending across job managers. */ private final CheckpointIDCounter checkpointIdCounter; /** The base checkpoint interval. Actual trigger time may be affected by the * max concurrent checkpoints and minimum-pause values */ private final long baseInterval; /** The max time (in ms) that a checkpoint may take. */ private final long checkpointTimeout; /** The min time(in ms) to delay after a checkpoint could be triggered. Allows to * enforce minimum processing time between checkpoint attempts */ private final long minPauseBetweenCheckpoints; /** The maximum number of checkpoints that may be in progress at the same time. */ private final int maxConcurrentCheckpointAttempts; /** The timer that handles the checkpoint timeouts and triggers periodic checkpoints. * It must be single-threaded. Eventually it will be replaced by main thread executor. */ private final ScheduledExecutor timer; /** The master checkpoint hooks executed by this checkpoint coordinator. */ private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks; /** Actor that receives status updates from the execution graph this coordinator works for. */ private JobStatusListener jobStatusListener; /** The number of consecutive failed trigger attempts. */ private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0); /** A handle to the current periodic trigger, to cancel it when necessary. */ private ScheduledFuture<?> currentPeriodicTrigger; /** The timestamp (via {@link Clock * completed. */ private long lastCheckpointCompletionRelativeTime; /** Flag whether a triggered checkpoint should immediately schedule the next checkpoint. * Non-volatile, because only accessed in synchronized scope */ private boolean periodicScheduling; /** Flag whether periodic triggering is suspended (too many concurrent pending checkpoint). * Non-volatile, because only accessed in synchronized scope */ private boolean periodicTriggeringSuspended; /** Flag marking the coordinator as shut down (not accepting any messages any more). */ private volatile boolean shutdown; /** Optional tracker for checkpoint statistics. */ @Nullable private CheckpointStatsTracker statsTracker; /** A factory for SharedStateRegistry objects. */ private final SharedStateRegistryFactory sharedStateRegistryFactory; /** Registry that tracks state which is shared across (incremental) checkpoints. */ private SharedStateRegistry sharedStateRegistry; private boolean isPreferCheckpointForRecovery; private final CheckpointFailureManager failureManager; private final Clock clock; /** Flag represents there is an in-flight trigger request. */ private boolean isTriggering = false; /** A queue to cache those trigger requests which can't be trigger immediately. */ private final ArrayDeque<CheckpointTriggerRequest> triggerRequestQueue; public CheckpointCoordinator( JobID job, CheckpointCoordinatorConfiguration chkConfig, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, ScheduledExecutor timer, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager) { this( job, chkConfig, tasksToTrigger, tasksToWaitFor, tasksToCommitTo, checkpointIDCounter, completedCheckpointStore, checkpointStateBackend, executor, timer, sharedStateRegistryFactory, failureManager, SystemClock.getInstance()); } @VisibleForTesting public CheckpointCoordinator( JobID job, CheckpointCoordinatorConfiguration chkConfig, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, ScheduledExecutor timer, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager, Clock clock) { checkNotNull(checkpointStateBackend); long minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints(); if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) { minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000; } long baseInterval = chkConfig.getCheckpointInterval(); if (baseInterval < minPauseBetweenCheckpoints) { baseInterval = minPauseBetweenCheckpoints; } this.job = checkNotNull(job); this.baseInterval = baseInterval; this.checkpointTimeout = chkConfig.getCheckpointTimeout(); this.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints; this.maxConcurrentCheckpointAttempts = chkConfig.getMaxConcurrentCheckpoints(); this.tasksToTrigger = checkNotNull(tasksToTrigger); this.tasksToWaitFor = checkNotNull(tasksToWaitFor); this.tasksToCommitTo = checkNotNull(tasksToCommitTo); this.pendingCheckpoints = new LinkedHashMap<>(); this.checkpointIdCounter = checkNotNull(checkpointIDCounter); this.completedCheckpointStore = checkNotNull(completedCheckpointStore); this.executor = checkNotNull(executor); this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory); this.sharedStateRegistry = sharedStateRegistryFactory.create(executor); this.isPreferCheckpointForRecovery = chkConfig.isPreferCheckpointForRecovery(); this.failureManager = checkNotNull(failureManager); this.clock = checkNotNull(clock); this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS); this.masterHooks = new HashMap<>(); this.triggerRequestQueue = new ArrayDeque<>(); this.timer = timer; this.checkpointProperties = CheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy()); try { this.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job); checkpointStorage.initializeBaseLocations(); } catch (IOException e) { throw new FlinkRuntimeException("Failed to create checkpoint storage at checkpoint coordinator side.", e); } try { checkpointIDCounter.start(); } catch (Throwable t) { throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t); } } /** * Adds the given master hook to the checkpoint coordinator. This method does nothing, if * the checkpoint coordinator already contained a hook with the same ID (as defined via * {@link MasterTriggerRestoreHook * * @param hook The hook to add. * @return True, if the hook was added, false if the checkpoint coordinator already * contained a hook with the same ID. */ public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) { checkNotNull(hook); final String id = hook.getIdentifier(); checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id"); synchronized (lock) { if (!masterHooks.containsKey(id)) { masterHooks.put(id, hook); return true; } else { return false; } } } /** * Gets the number of currently register master hooks. */ public int getNumberOfRegisteredMasterHooks() { synchronized (lock) { return masterHooks.size(); } } /** * Sets the checkpoint stats tracker. * * @param statsTracker The checkpoint stats tracker. */ public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) { this.statsTracker = statsTracker; } /** * Shuts down the checkpoint coordinator. * * <p>After this method has been called, the coordinator does not accept * and further messages and cannot trigger any further checkpoints. */ public void shutdown(JobStatus jobStatus) throws Exception { synchronized (lock) { if (!shutdown) { shutdown = true; LOG.info("Stopping checkpoint coordinator for job {}.", job); periodicScheduling = false; periodicTriggeringSuspended = false; MasterHooks.close(masterHooks.values(), LOG); masterHooks.clear(); final CheckpointException reason = new CheckpointException( CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); abortPendingAndQueuedCheckpoints(reason); completedCheckpointStore.shutdown(jobStatus); checkpointIdCounter.shutdown(jobStatus); } } } public boolean isShutdown() { return shutdown; } /** * Triggers a savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSavepoint( final long timestamp, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSavepoint(); return triggerSavepointInternal(timestamp, properties, false, targetLocation); } /** * Triggers a synchronous savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline * to fire any registered event-time timers. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSynchronousSavepoint( final long timestamp, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSyncSavepoint(); return triggerSavepointInternal(timestamp, properties, advanceToEndOfEventTime, targetLocation); } private CompletableFuture<CompletedCheckpoint> triggerSavepointInternal( final long timestamp, final CheckpointProperties checkpointProperties, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { checkNotNull(checkpointProperties); final CompletableFuture<CompletedCheckpoint> resultFuture = new CompletableFuture<>(); timer.execute(() -> triggerCheckpoint( timestamp, checkpointProperties, targetLocation, false, advanceToEndOfEventTime) .whenComplete((completedCheckpoint, throwable) -> { if (throwable == null) { resultFuture.complete(completedCheckpoint); } else { resultFuture.completeExceptionally(throwable); } })); return resultFuture; } /** * Triggers a new standard checkpoint and uses the given timestamp as the checkpoint * timestamp. The return value is a future. It completes when the checkpoint triggered finishes * or an error occurred. * * @param timestamp The timestamp for the checkpoint. * @param isPeriodic Flag indicating whether this triggered checkpoint is * periodic. If this flag is true, but the periodic scheduler is disabled, * the checkpoint will be declined. * @return a future to the completed checkpoint. */ public CompletableFuture<CompletedCheckpoint> triggerCheckpoint(long timestamp, boolean isPeriodic) { return triggerCheckpoint(timestamp, checkpointProperties, null, isPeriodic, false); } @VisibleForTesting public CompletableFuture<CompletedCheckpoint> triggerCheckpoint( long timestamp, CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime) { if (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) { return FutureUtils.completedExceptionally(new IllegalArgumentException( "Only synchronous savepoints are allowed to advance the watermark to MAX.")); } final CompletableFuture<CompletedCheckpoint> onCompletionPromise = new CompletableFuture<>(); synchronized (lock) { if (isTriggering || !triggerRequestQueue.isEmpty()) { triggerRequestQueue.add(new CheckpointTriggerRequest( timestamp, props, externalSavepointLocation, isPeriodic, advanceToEndOfTime, onCompletionPromise)); return onCompletionPromise; } } startTriggeringCheckpoint( timestamp, props, externalSavepointLocation, isPeriodic, advanceToEndOfTime, onCompletionPromise); return onCompletionPromise; } private void startTriggeringCheckpoint( long timestamp, CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime, CompletableFuture<CompletedCheckpoint> onCompletionPromise) { try { synchronized (lock) { preCheckBeforeTriggeringCheckpoint(isPeriodic, props.forceCheckpoint()); } final Execution[] executions = getTriggerExecutions(); final Map<ExecutionAttemptID, ExecutionVertex> ackTasks = getAckTasks(); Preconditions.checkState(!isTriggering); isTriggering = true; final CompletableFuture<PendingCheckpoint> pendingCheckpointCompletableFuture = initializeCheckpoint(props, externalSavepointLocation) .thenApplyAsync( (checkpointIdAndStorageLocation) -> createPendingCheckpoint( timestamp, props, ackTasks, isPeriodic, checkpointIdAndStorageLocation.checkpointId, checkpointIdAndStorageLocation.checkpointStorageLocation, onCompletionPromise), timer); pendingCheckpointCompletableFuture .thenCompose(this::snapshotMasterState) .whenCompleteAsync( (ignored, throwable) -> { final PendingCheckpoint checkpoint = FutureUtils.getWithoutException(pendingCheckpointCompletableFuture); if (throwable == null && checkpoint != null && !checkpoint.isDiscarded()) { snapshotTaskState( timestamp, checkpoint.getCheckpointId(), checkpoint.getCheckpointStorageLocation(), props, executions, advanceToEndOfTime); onTriggerSuccess(); } else { if (checkpoint == null) { onTriggerFailure(onCompletionPromise, throwable); } else { onTriggerFailure(checkpoint, throwable); } } }, timer); } catch (Throwable throwable) { onTriggerFailure(onCompletionPromise, throwable); } } /** * Initialize the checkpoint trigger asynchronously. It will be executed in io thread due to * it might be time-consuming. * * @param props checkpoint properties * @param externalSavepointLocation the external savepoint location, it might be null * @return the future of initialized result, checkpoint id and checkpoint location */ private CompletableFuture<CheckpointIdAndStorageLocation> initializeCheckpoint( CheckpointProperties props, @Nullable String externalSavepointLocation) { return CompletableFuture.supplyAsync(() -> { try { long checkpointID = checkpointIdCounter.getAndIncrement(); CheckpointStorageLocation checkpointStorageLocation = props.isSavepoint() ? checkpointStorage .initializeLocationForSavepoint(checkpointID, externalSavepointLocation) : checkpointStorage.initializeLocationForCheckpoint(checkpointID); return new CheckpointIdAndStorageLocation(checkpointID, checkpointStorageLocation); } catch (Throwable throwable) { throw new CompletionException(throwable); } }, executor); } private PendingCheckpoint createPendingCheckpoint( long timestamp, CheckpointProperties props, Map<ExecutionAttemptID, ExecutionVertex> ackTasks, boolean isPeriodic, long checkpointID, CheckpointStorageLocation checkpointStorageLocation, CompletableFuture<CompletedCheckpoint> onCompletionPromise) { synchronized (lock) { try { preCheckGlobalState(isPeriodic); } catch (Throwable t) { throw new CompletionException(t); } } final PendingCheckpoint checkpoint = new PendingCheckpoint( job, checkpointID, timestamp, ackTasks, masterHooks.keySet(), props, checkpointStorageLocation, executor, onCompletionPromise); if (statsTracker != null) { PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint( checkpointID, timestamp, props); checkpoint.setStatsCallback(callback); } synchronized (lock) { pendingCheckpoints.put(checkpointID, checkpoint); ScheduledFuture<?> cancellerHandle = timer.schedule( new CheckpointCanceller(checkpoint), checkpointTimeout, TimeUnit.MILLISECONDS); if (!checkpoint.setCancellerHandle(cancellerHandle)) { cancellerHandle.cancel(false); } } LOG.info("Triggering checkpoint {} @ {} for job {}.", checkpointID, timestamp, job); return checkpoint; } /** * Snapshot master hook states asynchronously. * * @param checkpoint the pending checkpoint * @return the future represents master hook states are finished or not */ private CompletableFuture<Void> snapshotMasterState(PendingCheckpoint checkpoint) { if (masterHooks.isEmpty()) { return CompletableFuture.completedFuture(null); } final long checkpointID = checkpoint.getCheckpointId(); final long timestamp = checkpoint.getCheckpointTimestamp(); final CompletableFuture<Void> masterStateCompletableFuture = new CompletableFuture<>(); for (MasterTriggerRestoreHook<?> masterHook : masterHooks.values()) { MasterHooks .triggerHook(masterHook, checkpointID, timestamp, executor) .whenCompleteAsync( (masterState, throwable) -> { try { synchronized (lock) { if (masterStateCompletableFuture.isDone()) { return; } if (checkpoint.isDiscarded()) { throw new IllegalStateException( "Checkpoint " + checkpointID + " has been discarded"); } if (throwable == null) { checkpoint.acknowledgeMasterState( masterHook.getIdentifier(), masterState); if (checkpoint.areMasterStatesFullyAcknowledged()) { masterStateCompletableFuture.complete(null); } } else { masterStateCompletableFuture.completeExceptionally(throwable); } } } catch (Throwable t) { masterStateCompletableFuture.completeExceptionally(t); } }, timer); } return masterStateCompletableFuture; } /** * Snapshot task state. * * @param timestamp the timestamp of this checkpoint reques * @param checkpointID the checkpoint id * @param checkpointStorageLocation the checkpoint location * @param props the checkpoint properties * @param executions the executions which should be triggered * @param advanceToEndOfTime Flag indicating if the source should inject a {@code MAX_WATERMARK} * in the pipeline to fire any registered event-time timers. */ private void snapshotTaskState( long timestamp, long checkpointID, CheckpointStorageLocation checkpointStorageLocation, CheckpointProperties props, Execution[] executions, boolean advanceToEndOfTime) { final CheckpointOptions checkpointOptions = new CheckpointOptions( props.getCheckpointType(), checkpointStorageLocation.getLocationReference()); for (Execution execution: executions) { if (props.isSynchronous()) { execution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime); } else { execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions); } } } /** * Trigger request is successful. * NOTE, it must be invoked if trigger request is successful. */ private void onTriggerSuccess() { isTriggering = false; numUnsuccessfulCheckpointsTriggers.set(0); checkQueuedCheckpointTriggerRequest(); } /** * The trigger request is failed prematurely without a proper initialization. * There is no resource to release, but the completion promise needs to fail manually here. * * @param onCompletionPromise the completion promise of the checkpoint/savepoint * @param throwable the reason of trigger failure */ private void onTriggerFailure( CompletableFuture<CompletedCheckpoint> onCompletionPromise, Throwable throwable) { final CheckpointException checkpointException = getCheckpointException(CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable); onCompletionPromise.completeExceptionally(checkpointException); onTriggerFailure((PendingCheckpoint) null, checkpointException); } /** * The trigger request is failed. * NOTE, it must be invoked if trigger request is failed. * * @param checkpoint the pending checkpoint which is failed. It could be null if it's failed * prematurely without a proper initialization. * @param throwable the reason of trigger failure */ private void onTriggerFailure(@Nullable PendingCheckpoint checkpoint, Throwable throwable) { try { if (checkpoint != null && !checkpoint.isDiscarded()) { int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn( "Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)", checkpoint.getCheckpointId(), job, numUnsuccessful, throwable); final CheckpointException cause = getCheckpointException( CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable); synchronized (lock) { abortPendingCheckpoint(checkpoint, cause); } } } finally { isTriggering = false; checkQueuedCheckpointTriggerRequest(); } } /** * Checks whether there is a trigger request queued. Consumes it if there is one. * NOTE: this must be called after each triggering */ private void checkQueuedCheckpointTriggerRequest() { synchronized (lock) { if (triggerRequestQueue.isEmpty()) { return; } } final CheckpointTriggerRequest request; synchronized (lock) { request = triggerRequestQueue.poll(); } if (request != null) { startTriggeringCheckpoint( request.timestamp, request.props, request.externalSavepointLocation, request.isPeriodic, request.advanceToEndOfTime, request.onCompletionPromise); } } /** * Receives a {@link DeclineCheckpoint} message for a pending checkpoint. * * @param message Checkpoint decline from the task manager * @param taskManagerLocationInfo The location info of the decline checkpoint message's sender */ public void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) { if (shutdown || message == null) { return; } if (!job.equals(message.getJob())) { throw new IllegalArgumentException("Received DeclineCheckpoint message for job " + message.getJob() + " from " + taskManagerLocationInfo + " while this coordinator handles job " + job); } final long checkpointId = message.getCheckpointId(); final String reason = (message.getReason() != null ? message.getReason().getMessage() : ""); PendingCheckpoint checkpoint; synchronized (lock) { if (shutdown) { return; } checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null) { Preconditions.checkState( !checkpoint.isDiscarded(), "Received message for discarded but non-removed checkpoint " + checkpointId); LOG.info("Decline checkpoint {} by task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo); final CheckpointException checkpointException; if (message.getReason() == null) { checkpointException = new CheckpointException(CheckpointFailureReason.CHECKPOINT_DECLINED); } else { checkpointException = getCheckpointException( CheckpointFailureReason.JOB_FAILURE, message.getReason()); } abortPendingCheckpoint( checkpoint, checkpointException, message.getTaskExecutionId()); } else if (LOG.isDebugEnabled()) { if (recentPendingCheckpoints.contains(checkpointId)) { LOG.debug("Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } else { LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } } } } /** * Receives an AcknowledgeCheckpoint message and returns whether the * message was associated with a pending checkpoint. * * @param message Checkpoint ack from the task manager * * @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender * @return Flag indicating whether the ack'd checkpoint was associated * with a pending checkpoint. * * @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store. */ public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message, String taskManagerLocationInfo) throws CheckpointException { if (shutdown || message == null) { return false; } if (!job.equals(message.getJob())) { LOG.error("Received wrong AcknowledgeCheckpoint message for job {} from {} : {}", job, taskManagerLocationInfo, message); return false; } final long checkpointId = message.getCheckpointId(); synchronized (lock) { if (shutdown) { return false; } final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) { case SUCCESS: LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); if (checkpoint.areTasksFullyAcknowledged()) { completePendingCheckpoint(checkpoint); } break; case DUPLICATE: LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); break; case UNKNOWN: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the task's execution attempt id was unknown. Discarding " + "the state handle to avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); break; case DISCARDED: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the pending checkpoint had been discarded. Discarding the " + "state handle tp avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); } return true; } else if (checkpoint != null) { throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else { boolean wasPendingCheckpoint; if (recentPendingCheckpoints.contains(checkpointId)) { wasPendingCheckpoint = true; LOG.warn("Received late message for now expired checkpoint attempt {} from task " + "{} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); } else { LOG.debug("Received message for an unknown checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); wasPendingCheckpoint = false; } discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); return wasPendingCheckpoint; } } } /** * Try to complete the given pending checkpoint. * * <p>Important: This method should only be called in the checkpoint lock scope. * * @param pendingCheckpoint to complete * @throws CheckpointException if the completion failed */ /** * Fails all pending checkpoints which have not been acknowledged by the given execution * attempt id. * * @param executionAttemptId for which to discard unacknowledged pending checkpoints * @param cause of the failure */ public void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) { synchronized (lock) { abortPendingCheckpoints( checkpoint -> !checkpoint.isAcknowledgedBy(executionAttemptId), new CheckpointException(CheckpointFailureReason.TASK_FAILURE, cause)); } } private void rememberRecentCheckpointId(long id) { if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) { recentPendingCheckpoints.removeFirst(); } recentPendingCheckpoints.addLast(id); } private void dropSubsumedCheckpoints(long checkpointId) { abortPendingCheckpoints( checkpoint -> checkpoint.getCheckpointId() < checkpointId && checkpoint.canBeSubsumed(), new CheckpointException(CheckpointFailureReason.CHECKPOINT_SUBSUMED)); } /** * Resumes suspended periodic triggering. * * <p>NOTE: The caller of this method must hold the lock when invoking the method! */ private void resumePeriodicTriggering() { assert(Thread.holdsLock(lock)); if (shutdown || !periodicScheduling) { return; } if (periodicTriggeringSuspended) { periodicTriggeringSuspended = false; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); } currentPeriodicTrigger = scheduleTriggerWithDelay(0L); } } /** * Restores the latest checkpointed state. * * @param tasks Map of job vertices to restore. State for these vertices is * restored via {@link Execution * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ @Deprecated public boolean restoreLatestCheckpointedState( Map<JobVertexID, ExecutionJobVertex> tasks, boolean errorIfNoCheckpoint, boolean allowNonRestoredState) throws Exception { return restoreLatestCheckpointedState(new HashSet<>(tasks.values()), errorIfNoCheckpoint, allowNonRestoredState); } /** * Restores the latest checkpointed state. * * @param tasks Set of job vertices to restore. State for these vertices is * restored via {@link Execution * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedState( final Set<ExecutionJobVertex> tasks, final boolean errorIfNoCheckpoint, final boolean allowNonRestoredState) throws Exception { synchronized (lock) { if (shutdown) { throw new IllegalStateException("CheckpointCoordinator is shut down"); } sharedStateRegistry.close(); sharedStateRegistry = sharedStateRegistryFactory.create(executor); completedCheckpointStore.recover(); for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) { completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry); } LOG.debug("Status of the shared state registry of job {} after restore: {}.", job, sharedStateRegistry); CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(isPreferCheckpointForRecovery); if (latest == null) { if (errorIfNoCheckpoint) { throw new IllegalStateException("No completed checkpoint available"); } else { LOG.debug("Resetting the master hooks."); MasterHooks.reset(masterHooks.values(), LOG); return false; } } LOG.info("Restoring job {} from latest valid checkpoint: {}.", job, latest); final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates(); StateAssignmentOperation stateAssignmentOperation = new StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState); stateAssignmentOperation.assignStates(); MasterHooks.restoreMasterHooks( masterHooks, latest.getMasterHookStates(), latest.getCheckpointID(), allowNonRestoredState, LOG); if (statsTracker != null) { long restoreTimestamp = System.currentTimeMillis(); RestoredCheckpointStats restored = new RestoredCheckpointStats( latest.getCheckpointID(), latest.getProperties(), restoreTimestamp, latest.getExternalPointer()); statsTracker.reportRestoredCheckpoint(restored); } return true; } } /** * Restore the state with given savepoint. * * @param savepointPointer The pointer to the savepoint. * @param allowNonRestored True if allowing checkpoint state that cannot be * mapped to any job vertex in tasks. * @param tasks Map of job vertices to restore. State for these * vertices is restored via * {@link Execution * @param userClassLoader The class loader to resolve serialized classes in * legacy savepoint versions. */ public boolean restoreSavepoint( String savepointPointer, boolean allowNonRestored, Map<JobVertexID, ExecutionJobVertex> tasks, ClassLoader userClassLoader) throws Exception { Preconditions.checkNotNull(savepointPointer, "The savepoint path cannot be null."); LOG.info("Starting job {} from savepoint {} ({})", job, savepointPointer, (allowNonRestored ? "allowing non restored state" : "")); final CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer); CompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint( job, tasks, checkpointLocation, userClassLoader, allowNonRestored); completedCheckpointStore.addCheckpoint(savepoint); long nextCheckpointId = savepoint.getCheckpointID() + 1; checkpointIdCounter.setCount(nextCheckpointId); LOG.info("Reset the checkpoint ID of job {} to {}.", job, nextCheckpointId); return restoreLatestCheckpointedState(new HashSet<>(tasks.values()), true, allowNonRestored); } public int getNumberOfPendingCheckpoints() { return this.pendingCheckpoints.size(); } public int getNumberOfRetainedSuccessfulCheckpoints() { synchronized (lock) { return completedCheckpointStore.getNumberOfRetainedCheckpoints(); } } public Map<Long, PendingCheckpoint> getPendingCheckpoints() { synchronized (lock) { return new HashMap<>(this.pendingCheckpoints); } } public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception { synchronized (lock) { return completedCheckpointStore.getAllCheckpoints(); } } public CheckpointStorageCoordinatorView getCheckpointStorage() { return checkpointStorage; } public CompletedCheckpointStore getCheckpointStore() { return completedCheckpointStore; } public long getCheckpointTimeout() { return checkpointTimeout; } public ArrayDeque<CheckpointTriggerRequest> getTriggerRequestQueue() { return triggerRequestQueue; } public boolean isTriggering() { return isTriggering; } @VisibleForTesting boolean isCurrentPeriodicTriggerAvailable() { return currentPeriodicTrigger != null; } /** * Returns whether periodic checkpointing has been configured. * * @return <code>true</code> if periodic checkpoints have been configured. */ public boolean isPeriodicCheckpointingConfigured() { return baseInterval != Long.MAX_VALUE; } public void startCheckpointScheduler() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } stopCheckpointScheduler(); periodicScheduling = true; currentPeriodicTrigger = scheduleTriggerWithDelay(getRandomInitDelay()); } } public void stopCheckpointScheduler() { synchronized (lock) { periodicTriggeringSuspended = false; periodicScheduling = false; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } final CheckpointException reason = new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND); abortPendingAndQueuedCheckpoints(reason); numUnsuccessfulCheckpointsTriggers.set(0); } } /** * Aborts all the pending checkpoints due to en exception. * @param exception The exception. */ public void abortPendingCheckpoints(CheckpointException exception) { synchronized (lock) { abortPendingCheckpoints(ignored -> true, exception); } } private void abortPendingCheckpoints( Predicate<PendingCheckpoint> checkpointToFailPredicate, CheckpointException exception) { assert Thread.holdsLock(lock); final PendingCheckpoint[] pendingCheckpointsToFail = pendingCheckpoints .values() .stream() .filter(checkpointToFailPredicate) .toArray(PendingCheckpoint[]::new); for (PendingCheckpoint pendingCheckpoint : pendingCheckpointsToFail) { abortPendingCheckpoint(pendingCheckpoint, exception); } } /** * If too many checkpoints are currently in progress, we need to mark that a request is queued. * * @throws CheckpointException If too many checkpoints are currently in progress. */ private void checkConcurrentCheckpoints() throws CheckpointException { if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { periodicTriggeringSuspended = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } } /** * Make sure the minimum interval between checkpoints has passed. * * @throws CheckpointException If the minimum interval between checkpoints has not passed. */ private void checkMinPauseBetweenCheckpoints() throws CheckpointException { final long nextCheckpointTriggerRelativeTime = lastCheckpointCompletionRelativeTime + minPauseBetweenCheckpoints; final long durationTillNextMillis = nextCheckpointTriggerRelativeTime - clock.relativeTimeMillis(); if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } currentPeriodicTrigger = scheduleTriggerWithDelay(durationTillNextMillis); throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } private long getRandomInitDelay() { return ThreadLocalRandom.current().nextLong(minPauseBetweenCheckpoints, baseInterval + 1L); } private ScheduledFuture<?> scheduleTriggerWithDelay(long initDelay) { return timer.scheduleAtFixedRate( new ScheduledTrigger(), initDelay, baseInterval, TimeUnit.MILLISECONDS); } public JobStatusListener createActivatorDeactivator() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } if (jobStatusListener == null) { jobStatusListener = new CheckpointCoordinatorDeActivator(this); } return jobStatusListener; } } private final class ScheduledTrigger implements Runnable { @Override public void run() { try { triggerCheckpoint(System.currentTimeMillis(), true); } catch (Exception e) { LOG.error("Exception while triggering checkpoint for job {}.", job, e); } } } /** * Discards the given state object asynchronously belonging to the given job, execution attempt * id and checkpoint id. * * @param jobId identifying the job to which the state object belongs * @param executionAttemptID identifying the task to which the state object belongs * @param checkpointId of the state object * @param subtaskState to discard asynchronously */ private void discardSubtaskState( final JobID jobId, final ExecutionAttemptID executionAttemptID, final long checkpointId, final TaskStateSnapshot subtaskState) { if (subtaskState != null) { executor.execute(new Runnable() { @Override public void run() { try { subtaskState.discardState(); } catch (Throwable t2) { LOG.warn("Could not properly discard state object of checkpoint {} " + "belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2); } } }); } } private void abortPendingCheckpoint( PendingCheckpoint pendingCheckpoint, CheckpointException exception) { abortPendingCheckpoint(pendingCheckpoint, exception, null); } private void abortPendingCheckpoint( PendingCheckpoint pendingCheckpoint, CheckpointException exception, @Nullable final ExecutionAttemptID executionAttemptID) { assert(Thread.holdsLock(lock)); if (!pendingCheckpoint.isDiscarded()) { try { pendingCheckpoint.abort( exception.getCheckpointFailureReason(), exception.getCause()); if (pendingCheckpoint.getProps().isSavepoint() && pendingCheckpoint.getProps().isSynchronous()) { failureManager.handleSynchronousSavepointFailure(exception); } else if (executionAttemptID != null) { failureManager.handleTaskLevelCheckpointException( exception, pendingCheckpoint.getCheckpointId(), executionAttemptID); } else { failureManager.handleJobLevelCheckpointException( exception, pendingCheckpoint.getCheckpointId()); } } finally { pendingCheckpoints.remove(pendingCheckpoint.getCheckpointId()); rememberRecentCheckpointId(pendingCheckpoint.getCheckpointId()); resumePeriodicTriggering(); } } } private void preCheckBeforeTriggeringCheckpoint(boolean isPeriodic, boolean forceCheckpoint) throws CheckpointException { preCheckGlobalState(isPeriodic); if (!forceCheckpoint) { checkConcurrentCheckpoints(); checkMinPauseBetweenCheckpoints(); } } private void preCheckGlobalState(boolean isPeriodic) throws CheckpointException { if (shutdown) { throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } if (isPeriodic && !periodicScheduling) { throw new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN); } } /** * Check if all tasks that we need to trigger are running. If not, abort the checkpoint. * * @return the executions need to be triggered. * @throws CheckpointException the exception fails checking */ private Execution[] getTriggerExecutions() throws CheckpointException { Execution[] executions = new Execution[tasksToTrigger.length]; for (int i = 0; i < tasksToTrigger.length; i++) { Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt(); if (ee == null) { LOG.info( "Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job); throw new CheckpointException( CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } else if (ee.getState() == ExecutionState.RUNNING) { executions[i] = ee; } else { LOG.info( "Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job, ExecutionState.RUNNING, ee.getState()); throw new CheckpointException( CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } return executions; } /** * Check if all tasks that need to acknowledge the checkpoint are running. * If not, abort the checkpoint * * @return the execution vertices which should give an ack response * @throws CheckpointException the exception fails checking */ private Map<ExecutionAttemptID, ExecutionVertex> getAckTasks() throws CheckpointException { Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length); for (ExecutionVertex ev : tasksToWaitFor) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ackTasks.put(ee.getAttemptId(), ev); } else { LOG.info( "Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.", ev.getTaskNameWithSubtaskIndex(), job); throw new CheckpointException( CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } return ackTasks; } private void abortPendingAndQueuedCheckpoints(CheckpointException exception) { assert(Thread.holdsLock(lock)); CheckpointTriggerRequest request; while ((request = triggerRequestQueue.poll()) != null) { request.onCompletionPromise.completeExceptionally(exception); } abortPendingCheckpoints(exception); } /** * The canceller of checkpoint. The checkpoint might be cancelled if it doesn't finish in a * configured period. */ private class CheckpointCanceller implements Runnable { private final PendingCheckpoint pendingCheckpoint; private CheckpointCanceller(PendingCheckpoint pendingCheckpoint) { this.pendingCheckpoint = checkNotNull(pendingCheckpoint); } @Override public void run() { synchronized (lock) { if (!pendingCheckpoint.isDiscarded()) { LOG.info("Checkpoint {} of job {} expired before completing.", pendingCheckpoint.getCheckpointId(), job); abortPendingCheckpoint( pendingCheckpoint, new CheckpointException(CheckpointFailureReason.CHECKPOINT_EXPIRED)); } } } } private static CheckpointException getCheckpointException( CheckpointFailureReason defaultReason, Throwable throwable) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(throwable, CheckpointException.class); return checkpointExceptionOptional .orElseGet(() -> new CheckpointException(defaultReason, throwable)); } private static class CheckpointIdAndStorageLocation { private final long checkpointId; private final CheckpointStorageLocation checkpointStorageLocation; CheckpointIdAndStorageLocation( long checkpointId, CheckpointStorageLocation checkpointStorageLocation) { this.checkpointId = checkpointId; this.checkpointStorageLocation = checkNotNull(checkpointStorageLocation); } } private static class CheckpointTriggerRequest { private final long timestamp; private final CheckpointProperties props; private final @Nullable String externalSavepointLocation; private final boolean isPeriodic; private final boolean advanceToEndOfTime; private final CompletableFuture<CompletedCheckpoint> onCompletionPromise; CheckpointTriggerRequest( long timestamp, CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime, CompletableFuture<CompletedCheckpoint> onCompletionPromise) { this.timestamp = timestamp; this.props = checkNotNull(props); this.externalSavepointLocation = externalSavepointLocation; this.isPeriodic = isPeriodic; this.advanceToEndOfTime = advanceToEndOfTime; this.onCompletionPromise = checkNotNull(onCompletionPromise); } } }
Moved and fixed other issues with 3954b14
public void visit(BLangBinaryExpr binaryExpr) { if (OperatorKind.AND.equals(binaryExpr.opKind)) { visitAndExpression(binaryExpr); } else if (OperatorKind.OR.equals(binaryExpr.opKind)) { visitOrExpression(binaryExpr); } else if (binaryExpr.opSymbol.opcode == InstructionCodes.REQ_NULL || binaryExpr.opSymbol.opcode == InstructionCodes.RNE_NULL || binaryExpr.opSymbol.opcode == InstructionCodes.SEQ_NULL || binaryExpr.opSymbol.opcode == InstructionCodes.SNE_NULL) { BLangExpression expr = (binaryExpr.lhsExpr.type.tag == TypeTags.NIL) ? binaryExpr.rhsExpr : binaryExpr.lhsExpr; genNode(expr, this.env); emit(binaryExpr.opSymbol.opcode, expr.regIndex, calcAndGetExprRegIndex(binaryExpr)); } else { genNode(binaryExpr.lhsExpr, this.env); genNode(binaryExpr.rhsExpr, this.env); RegIndex regIndex = calcAndGetExprRegIndex(binaryExpr); int opCode = binaryExpr.opSymbol.opcode; if (opCode == InstructionCodes.INT_RANGE) { RegIndex endValRegIndex = binaryExpr.rhsExpr.regIndex; if (OperatorKind.HALF_OPEN_RANGE.equals(binaryExpr.opKind)) { endValRegIndex = getRegIndex(TypeTags.INT); RegIndex constOneRegIndex = getRegIndex(TypeTags.INT); emit(InstructionCodes.ICONST_1, constOneRegIndex); emit(InstructionCodes.ISUB, binaryExpr.rhsExpr.regIndex, constOneRegIndex, endValRegIndex); } if (binaryExpr.parent instanceof BLangForeach) { emit(InstructionCodes.NEW_INT_RANGE, binaryExpr.lhsExpr.regIndex, endValRegIndex, regIndex); } else { emit(opCode, binaryExpr.lhsExpr.regIndex, endValRegIndex, regIndex); } } else { emit(opCode, binaryExpr.lhsExpr.regIndex, binaryExpr.rhsExpr.regIndex, regIndex); } } }
if (OperatorKind.HALF_OPEN_RANGE.equals(binaryExpr.opKind)) {
public void visit(BLangBinaryExpr binaryExpr) { if (OperatorKind.AND.equals(binaryExpr.opKind)) { visitAndExpression(binaryExpr); } else if (OperatorKind.OR.equals(binaryExpr.opKind)) { visitOrExpression(binaryExpr); } else if (binaryExpr.opSymbol.opcode == InstructionCodes.REQ_NULL || binaryExpr.opSymbol.opcode == InstructionCodes.RNE_NULL || binaryExpr.opSymbol.opcode == InstructionCodes.SEQ_NULL || binaryExpr.opSymbol.opcode == InstructionCodes.SNE_NULL) { BLangExpression expr = (binaryExpr.lhsExpr.type.tag == TypeTags.NIL) ? binaryExpr.rhsExpr : binaryExpr.lhsExpr; genNode(expr, this.env); emit(binaryExpr.opSymbol.opcode, expr.regIndex, calcAndGetExprRegIndex(binaryExpr)); } else { genNode(binaryExpr.lhsExpr, this.env); genNode(binaryExpr.rhsExpr, this.env); RegIndex regIndex = calcAndGetExprRegIndex(binaryExpr); int opCode = binaryExpr.opSymbol.opcode; if (opCode == InstructionCodes.INT_RANGE) { if (binaryExpr.parent instanceof BLangForeach) { emit(InstructionCodes.NEW_INT_RANGE, binaryExpr.lhsExpr.regIndex, binaryExpr.rhsExpr.regIndex, regIndex); } else { emit(opCode, binaryExpr.lhsExpr.regIndex, binaryExpr.rhsExpr.regIndex, regIndex); } } else { emit(opCode, binaryExpr.lhsExpr.regIndex, binaryExpr.rhsExpr.regIndex, regIndex); } } }
class CodeGenerator extends BLangNodeVisitor { private static final CompilerContext.Key<CodeGenerator> CODE_GENERATOR_KEY = new CompilerContext.Key<>(); /** * This structure holds current package-level variable indexes. */ private VariableIndex pvIndexes = new VariableIndex(PACKAGE); /** * This structure holds current local variable indexes. */ private VariableIndex lvIndexes = new VariableIndex(LOCAL); /** * This structure holds current field indexes. */ private VariableIndex fieldIndexes = new VariableIndex(FIELD); /** * This structure holds current register indexes. */ private VariableIndex regIndexes = new VariableIndex(REG); /** * This structure holds the maximum register count per type. * This structure is updated for every statement. */ private VariableIndex maxRegIndexes = new VariableIndex(REG); private List<RegIndex> regIndexList = new ArrayList<>(); private SymbolEnv env; private final SymbolTable symTable; private final PackageCache packageCache; private PackageInfo currentPkgInfo; private PackageID currentPkgID; private int currentPackageRefCPIndex; private LineNumberTableAttributeInfo lineNoAttrInfo; private CallableUnitInfo currentCallableUnitInfo; private LocalVariableAttributeInfo localVarAttrInfo; private WorkerInfo currentWorkerInfo; private ServiceInfo currentServiceInfo; private boolean varAssignment = false; private boolean regIndexResetDisabled = false; private int transactionIndex = 0; private Stack<Instruction> loopResetInstructionStack = new Stack<>(); private Stack<Instruction> loopExitInstructionStack = new Stack<>(); private Stack<Instruction> abortInstructions = new Stack<>(); private Stack<Instruction> failInstructions = new Stack<>(); private int workerChannelCount = 0; private int forkJoinCount = 0; public static CodeGenerator getInstance(CompilerContext context) { CodeGenerator codeGenerator = context.get(CODE_GENERATOR_KEY); if (codeGenerator == null) { codeGenerator = new CodeGenerator(context); } return codeGenerator; } public CodeGenerator(CompilerContext context) { context.put(CODE_GENERATOR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.packageCache = PackageCache.getInstance(context); } public ProgramFile generateBALX(BLangPackage pkgNode) { ProgramFile programFile = new ProgramFile(); addPackageInfo(pkgNode.symbol, programFile); programFile.entryPkgCPIndex = addPackageRefCPEntry(programFile, pkgNode.symbol.pkgID); setEntryPoints(programFile, pkgNode); return programFile; } public BLangPackage generateBALO(BLangPackage pkgNode) { this.pvIndexes = new VariableIndex(VariableIndex.Kind.PACKAGE); this.currentPkgInfo = new PackageInfo(); genNode(pkgNode, this.symTable.pkgEnvMap.get(pkgNode.symbol)); prepareIndexes(this.pvIndexes); addVarCountAttrInfo(this.currentPkgInfo, this.currentPkgInfo, pvIndexes); pkgNode.symbol.packageFile = new PackageFile(getPackageBinaryContent(pkgNode)); setEntryPoints(pkgNode.symbol.packageFile, pkgNode); this.currentPkgInfo = null; return pkgNode; } private void setEntryPoints(CompiledBinaryFile compiledBinaryFile, BLangPackage pkgNode) { BLangFunction mainFunc = getMainFunction(pkgNode); if (mainFunc != null) { compiledBinaryFile.setMainEPAvailable(true); pkgNode.symbol.entryPointExists = true; } if (pkgNode.services.size() != 0) { compiledBinaryFile.setServiceEPAvailable(true); pkgNode.symbol.entryPointExists = true; } } private BLangFunction getMainFunction(BLangPackage pkgNode) { for (BLangFunction funcNode : pkgNode.functions) { if (CompilerUtils.isMainFunction(funcNode)) { return funcNode; } } return null; } public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.CODE_GEN)) { return; } pkgNode.imports.forEach(impPkgNode -> { int impPkgNameCPIndex = addUTF8CPEntry(this.currentPkgInfo, impPkgNode.symbol.pkgID.bvmAlias()); int impPkgVersionCPIndex = addUTF8CPEntry(this.currentPkgInfo, impPkgNode.symbol.pkgID.version.value); ImportPackageInfo importPkgInfo = new ImportPackageInfo(impPkgNameCPIndex, impPkgVersionCPIndex); this.currentPkgInfo.importPkgInfoSet.add(importPkgInfo); }); BPackageSymbol pkgSymbol = pkgNode.symbol; currentPkgID = pkgSymbol.pkgID; currentPkgInfo.nameCPIndex = addUTF8CPEntry(currentPkgInfo, currentPkgID.bvmAlias()); currentPkgInfo.versionCPIndex = addUTF8CPEntry(currentPkgInfo, currentPkgID.version.value); currentPackageRefCPIndex = addPackageRefCPEntry(currentPkgInfo, currentPkgID); int lineNoAttrNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.LINE_NUMBER_TABLE_ATTRIBUTE.value()); lineNoAttrInfo = new LineNumberTableAttributeInfo(lineNoAttrNameIndex); int pkgVarAttrNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE.value()); currentPkgInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, new LocalVariableAttributeInfo(pkgVarAttrNameIndex)); pkgNode.globalVars.forEach(this::createPackageVarInfo); pkgNode.typeDefinitions.forEach(this::createTypeDefinitionInfoEntry); pkgNode.annotations.forEach(this::createAnnotationInfoEntry); pkgNode.functions.forEach(this::createFunctionInfoEntry); pkgNode.services.forEach(this::createServiceInfoEntry); pkgNode.functions.forEach(this::createFunctionInfoEntry); visitBuiltinFunctions(pkgNode.initFunction); visitBuiltinFunctions(pkgNode.startFunction); visitBuiltinFunctions(pkgNode.stopFunction); pkgNode.topLevelNodes.stream() .filter(pkgLevelNode -> pkgLevelNode.getKind() != NodeKind.VARIABLE && pkgLevelNode.getKind() != NodeKind.XMLNS) .forEach(pkgLevelNode -> genNode((BLangNode) pkgLevelNode, this.env)); pkgNode.functions.forEach(funcNode -> { funcNode.symbol = funcNode.originalFuncSymbol; }); currentPkgInfo.addAttributeInfo(AttributeInfo.Kind.LINE_NUMBER_TABLE_ATTRIBUTE, lineNoAttrInfo); currentPackageRefCPIndex = -1; currentPkgID = null; pkgNode.completedPhases.add(CompilerPhase.CODE_GEN); } private void visitBuiltinFunctions(BLangFunction function) { createFunctionInfoEntry(function); genNode(function, this.env); } public void visit(BLangService serviceNode) { BLangFunction initFunction = (BLangFunction) serviceNode.getInitFunction(); visit(initFunction); currentServiceInfo = currentPkgInfo.getServiceInfo(serviceNode.getName().getValue()); SymbolEnv serviceEnv = SymbolEnv.createServiceEnv(serviceNode, serviceNode.symbol.scope, this.env); serviceNode.resources.forEach(resource -> genNode(resource, serviceEnv)); } public void visit(BLangResource resourceNode) { ResourceInfo resourceInfo = currentServiceInfo.resourceInfoMap.get(resourceNode.name.getValue()); currentCallableUnitInfo = resourceInfo; SymbolEnv resourceEnv = SymbolEnv .createResourceActionSymbolEnv(resourceNode, resourceNode.symbol.scope, this.env); visitInvokableNode(resourceNode, currentCallableUnitInfo, resourceEnv); } public void visit(BLangFunction funcNode) { SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, this.env); currentCallableUnitInfo = currentPkgInfo.functionInfoMap.get(funcNode.symbol.name.value); visitInvokableNode(funcNode, currentCallableUnitInfo, funcEnv); } public void visit(BLangBlockStmt blockNode) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, this.env); for (BLangStatement stmt : blockNode.stmts) { if (stmt.getKind() != NodeKind.TRY && stmt.getKind() != NodeKind.CATCH && stmt.getKind() != NodeKind.IF) { addLineNumberInfo(stmt.pos); } genNode(stmt, blockEnv); if (regIndexResetDisabled) { continue; } setMaxRegIndexes(regIndexes, maxRegIndexes); regIndexes = new VariableIndex(REG); } } public void visit(BLangEnum enumNode) { } public void visit(BLangVariable varNode) { BVarSymbol varSymbol = varNode.symbol; int ownerSymTag = env.scope.owner.tag; if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) { varSymbol.varIndex = getLVIndex(varSymbol.type.tag); LocalVariableInfo localVarInfo = getLocalVarAttributeInfo(varSymbol); localVarAttrInfo.localVars.add(localVarInfo); } else { throw new IllegalStateException(""); } BLangExpression rhsExpr = varNode.expr; if (rhsExpr != null) { rhsExpr.regIndex = varSymbol.varIndex; genNode(rhsExpr, this.env); } } public void visit(BLangTransformer transformerNode) { SymbolEnv transformerEnv = SymbolEnv.createTransformerEnv(transformerNode, transformerNode.symbol.scope, this.env); currentCallableUnitInfo = currentPkgInfo.transformerInfoMap.get(transformerNode.symbol.name.value); visitInvokableNode(transformerNode, currentCallableUnitInfo, transformerEnv); } public void visit(BLangVariableDef varDefNode) { genNode(varDefNode.var, this.env); } @Override public void visit(BLangMatch matchStmt) { } public void visit(BLangReturn returnNode) { if (returnNode.expr.type != symTable.nilType) { BLangExpression expr = returnNode.expr; this.genNode(expr, this.env); emit(this.typeTagToInstr(expr.type.tag), getOperand(0), expr.regIndex); } generateFinallyInstructions(returnNode); emit(InstructionCodes.RET); } private int typeTagToInstr(int typeTag) { switch (typeTag) { case TypeTags.INT: return InstructionCodes.IRET; case TypeTags.FLOAT: return InstructionCodes.FRET; case TypeTags.STRING: return InstructionCodes.SRET; case TypeTags.BOOLEAN: return InstructionCodes.BRET; case TypeTags.BLOB: return InstructionCodes.LRET; default: return InstructionCodes.RRET; } } @Override public void visit(BLangLiteral literalExpr) { int opcode; Operand regIndex = calcAndGetExprRegIndex(literalExpr); int typeTag = literalExpr.type.tag; switch (typeTag) { case TypeTags.INT: long longVal = (Long) literalExpr.value; if (longVal >= 0 && longVal <= 5) { opcode = InstructionCodes.ICONST_0 + (int) longVal; emit(opcode, regIndex); } else { int intCPEntryIndex = currentPkgInfo.addCPEntry(new IntegerCPEntry(longVal)); emit(InstructionCodes.ICONST, getOperand(intCPEntryIndex), regIndex); } break; case TypeTags.FLOAT: double doubleVal = (Double) literalExpr.value; if (doubleVal == 0 || doubleVal == 1 || doubleVal == 2 || doubleVal == 3 || doubleVal == 4 || doubleVal == 5) { opcode = InstructionCodes.FCONST_0 + (int) doubleVal; emit(opcode, regIndex); } else { int floatCPEntryIndex = currentPkgInfo.addCPEntry(new FloatCPEntry(doubleVal)); emit(InstructionCodes.FCONST, getOperand(floatCPEntryIndex), regIndex); } break; case TypeTags.STRING: String strValue = (String) literalExpr.value; StringCPEntry stringCPEntry = new StringCPEntry(addUTF8CPEntry(currentPkgInfo, strValue), strValue); int strCPIndex = currentPkgInfo.addCPEntry(stringCPEntry); emit(InstructionCodes.SCONST, getOperand(strCPIndex), regIndex); break; case TypeTags.BOOLEAN: boolean booleanVal = (Boolean) literalExpr.value; if (!booleanVal) { opcode = InstructionCodes.BCONST_0; } else { opcode = InstructionCodes.BCONST_1; } emit(opcode, regIndex); break; case TypeTags.BLOB: byte[] blobValue = (byte[]) literalExpr.value; BlobCPEntry blobCPEntry = new BlobCPEntry(blobValue); int blobCPIndex = currentPkgInfo.addCPEntry(blobCPEntry); emit(InstructionCodes.LCONST, getOperand(blobCPIndex), regIndex); break; case TypeTags.NIL: emit(InstructionCodes.RCONST_NULL, regIndex); } } @Override public void visit(BLangArrayLiteral arrayLiteral) { BType etype; if (arrayLiteral.type.tag == TypeTags.ANY) { etype = arrayLiteral.type; } else { etype = ((BArrayType) arrayLiteral.type).eType; } int opcode = getOpcode(etype.tag, InstructionCodes.INEWARRAY); Operand arrayVarRegIndex = calcAndGetExprRegIndex(arrayLiteral); Operand typeCPIndex = getTypeCPIndex(arrayLiteral.type); emit(opcode, arrayVarRegIndex, typeCPIndex); for (int i = 0; i < arrayLiteral.exprs.size(); i++) { BLangExpression argExpr = arrayLiteral.exprs.get(i); genNode(argExpr, this.env); BLangLiteral indexLiteral = new BLangLiteral(); indexLiteral.pos = arrayLiteral.pos; indexLiteral.value = (long) i; indexLiteral.type = symTable.intType; genNode(indexLiteral, this.env); opcode = getOpcode(argExpr.type.tag, InstructionCodes.IASTORE); emit(opcode, arrayVarRegIndex, indexLiteral.regIndex, argExpr.regIndex); } } @Override public void visit(BLangJSONArrayLiteral arrayLiteral) { arrayLiteral.regIndex = calcAndGetExprRegIndex(arrayLiteral); List<BLangExpression> argExprs = arrayLiteral.exprs; BLangLiteral arraySizeLiteral = new BLangLiteral(); arraySizeLiteral.pos = arrayLiteral.pos; arraySizeLiteral.value = (long) argExprs.size(); arraySizeLiteral.type = symTable.intType; genNode(arraySizeLiteral, this.env); emit(InstructionCodes.JSONNEWARRAY, arrayLiteral.regIndex, arraySizeLiteral.regIndex); for (int i = 0; i < argExprs.size(); i++) { BLangExpression argExpr = argExprs.get(i); genNode(argExpr, this.env); BLangLiteral indexLiteral = new BLangLiteral(); indexLiteral.pos = arrayLiteral.pos; indexLiteral.value = (long) i; indexLiteral.type = symTable.intType; genNode(indexLiteral, this.env); emit(InstructionCodes.JSONASTORE, arrayLiteral.regIndex, indexLiteral.regIndex, argExpr.regIndex); } } @Override public void visit(BLangJSONLiteral jsonLiteral) { jsonLiteral.regIndex = calcAndGetExprRegIndex(jsonLiteral); Operand typeCPIndex = getTypeCPIndex(jsonLiteral.type); emit(InstructionCodes.NEWJSON, jsonLiteral.regIndex, typeCPIndex); for (BLangRecordKeyValue keyValue : jsonLiteral.keyValuePairs) { BLangExpression keyExpr = keyValue.key.expr; genNode(keyExpr, this.env); BLangExpression valueExpr = keyValue.valueExpr; genNode(valueExpr, this.env); emit(InstructionCodes.JSONSTORE, jsonLiteral.regIndex, keyExpr.regIndex, valueExpr.regIndex); } } @Override public void visit(BLangMapLiteral mapLiteral) { Operand mapVarRegIndex = calcAndGetExprRegIndex(mapLiteral); Operand typeCPIndex = getTypeCPIndex(mapLiteral.type); emit(InstructionCodes.NEWMAP, mapVarRegIndex, typeCPIndex); for (BLangRecordKeyValue keyValue : mapLiteral.keyValuePairs) { BLangExpression keyExpr = keyValue.key.expr; genNode(keyExpr, this.env); BLangExpression valueExpr = keyValue.valueExpr; genNode(valueExpr, this.env); BMapType mapType = (BMapType) mapLiteral.type; int opcode = getValueToRefTypeCastOpcode(mapType.constraint.tag); if (opcode == InstructionCodes.NOP) { emit(InstructionCodes.MAPSTORE, mapVarRegIndex, keyExpr.regIndex, valueExpr.regIndex); } else { RegIndex refRegMapValue = getRegIndex(TypeTags.ANY); emit(opcode, valueExpr.regIndex, refRegMapValue); emit(InstructionCodes.MAPSTORE, mapVarRegIndex, keyExpr.regIndex, refRegMapValue); } } } @Override public void visit(BLangStructLiteral structLiteral) { BRecordTypeSymbol structSymbol = (BRecordTypeSymbol) structLiteral.type.tsymbol; int pkgCPIndex = addPackageRefCPEntry(currentPkgInfo, structSymbol.pkgID); int structNameCPIndex = addUTF8CPEntry(currentPkgInfo, structSymbol.name.value); StructureRefCPEntry structureRefCPEntry = new StructureRefCPEntry(pkgCPIndex, structNameCPIndex); Operand structCPIndex = getOperand(currentPkgInfo.addCPEntry(structureRefCPEntry)); RegIndex structRegIndex = calcAndGetExprRegIndex(structLiteral); emit(InstructionCodes.NEWSTRUCT, structCPIndex, structRegIndex); if (structSymbol.defaultsValuesInitFunc != null) { int funcRefCPIndex = getFuncRefCPIndex(structSymbol.defaultsValuesInitFunc.symbol); Operand[] operands = new Operand[5]; operands[0] = getOperand(funcRefCPIndex); operands[1] = getOperand(false); operands[2] = getOperand(1); operands[3] = structRegIndex; operands[4] = getOperand(0); emit(InstructionCodes.CALL, operands); } if (structLiteral.initializer != null) { int funcRefCPIndex = getFuncRefCPIndex(structLiteral.initializer.symbol); Operand[] operands = new Operand[5]; operands[0] = getOperand(funcRefCPIndex); operands[1] = getOperand(false); operands[2] = getOperand(1); operands[3] = structRegIndex; operands[4] = getOperand(0); emit(InstructionCodes.CALL, operands); } for (BLangRecordKeyValue keyValue : structLiteral.keyValuePairs) { BLangRecordKey key = keyValue.key; Operand fieldIndex = key.fieldSymbol.varIndex; genNode(keyValue.valueExpr, this.env); int opcode = getOpcode(key.fieldSymbol.type.tag, InstructionCodes.IFIELDSTORE); emit(opcode, structRegIndex, fieldIndex, keyValue.valueExpr.regIndex); } } @Override public void visit(BLangTableLiteral tableLiteral) { genNode(tableLiteral.configurationExpr, this.env); Operand varRefRegIndex = tableLiteral.configurationExpr.regIndex; tableLiteral.regIndex = calcAndGetExprRegIndex(tableLiteral); Operand typeCPIndex = getTypeCPIndex(tableLiteral.type); emit(InstructionCodes.NEWTABLE, tableLiteral.regIndex, typeCPIndex, varRefRegIndex); } @Override public void visit(BLangStreamLiteral streamLiteral) { streamLiteral.regIndex = calcAndGetExprRegIndex(streamLiteral); Operand typeCPIndex = getTypeCPIndex(streamLiteral.type); StringCPEntry nameCPEntry = new StringCPEntry(addUTF8CPEntry(currentPkgInfo, streamLiteral.name.value), streamLiteral.name.value); Operand nameCPIndex = getOperand(currentPkgInfo.addCPEntry(nameCPEntry)); emit(InstructionCodes.NEWSTREAM, streamLiteral.regIndex, typeCPIndex, nameCPIndex); } @Override public void visit(BLangLocalVarRef localVarRef) { if (localVarRef.regIndex != null && (localVarRef.regIndex.isLHSIndex || localVarRef.regIndex.isVarIndex)) { emit(getOpcode(localVarRef.type.tag, InstructionCodes.IMOVE), localVarRef.varSymbol.varIndex, localVarRef.regIndex); return; } localVarRef.regIndex = localVarRef.varSymbol.varIndex; } @Override public void visit(BLangFieldVarRef fieldVarRef) { RegIndex fieldIndex = fieldVarRef.varSymbol.varIndex; Operand varRegIndex = getOperand(0); if (varAssignment) { int opcode = getOpcode(fieldVarRef.type.tag, InstructionCodes.IFIELDSTORE); emit(opcode, varRegIndex, fieldIndex, fieldVarRef.regIndex); return; } int opcode = getOpcode(fieldVarRef.type.tag, InstructionCodes.IFIELDLOAD); RegIndex exprRegIndex = calcAndGetExprRegIndex(fieldVarRef); emit(opcode, varRegIndex, fieldIndex, exprRegIndex); } @Override public void visit(BLangPackageVarRef packageVarRef) { BPackageSymbol pkgSymbol; BSymbol ownerSymbol = packageVarRef.symbol.owner; if (ownerSymbol.tag == SymTag.SERVICE) { pkgSymbol = (BPackageSymbol) ownerSymbol.owner; } else { pkgSymbol = (BPackageSymbol) ownerSymbol; } Operand gvIndex = packageVarRef.varSymbol.varIndex; int pkgRefCPIndex = addPackageRefCPEntry(currentPkgInfo, pkgSymbol.pkgID); if (varAssignment) { int opcode = getOpcode(packageVarRef.type.tag, InstructionCodes.IGSTORE); emit(opcode, getOperand(pkgRefCPIndex), packageVarRef.regIndex, gvIndex); } else { int opcode = getOpcode(packageVarRef.type.tag, InstructionCodes.IGLOAD); packageVarRef.regIndex = calcAndGetExprRegIndex(packageVarRef); emit(opcode, getOperand(pkgRefCPIndex), gvIndex, packageVarRef.regIndex); } } @Override public void visit(BLangFunctionVarRef functionVarRef) { visitFunctionPointerLoad(functionVarRef, (BInvokableSymbol) functionVarRef.symbol); } @Override public void visit(BLangSimpleVarRef.BLangTypeLoad typeLoad) { Operand typeCPIndex = getTypeCPIndex(typeLoad.symbol.type); emit(InstructionCodes.TYPELOAD, typeCPIndex, calcAndGetExprRegIndex(typeLoad)); } @Override public void visit(BLangStructFieldAccessExpr fieldAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(fieldAccessExpr.expr, this.env); Operand varRefRegIndex = fieldAccessExpr.expr.regIndex; int opcode; Operand fieldIndex = fieldAccessExpr.varSymbol.varIndex; if (variableStore) { opcode = getOpcode(fieldAccessExpr.symbol.type.tag, InstructionCodes.IFIELDSTORE); emit(opcode, varRefRegIndex, fieldIndex, fieldAccessExpr.regIndex); } else { opcode = getOpcode(fieldAccessExpr.symbol.type.tag, InstructionCodes.IFIELDLOAD); emit(opcode, varRefRegIndex, fieldIndex, calcAndGetExprRegIndex(fieldAccessExpr)); } this.varAssignment = variableStore; } @Override public void visit(BLangStructFunctionVarRef functionVarRef) { visitFunctionPointerLoad(functionVarRef, (BInvokableSymbol) functionVarRef.symbol); } @Override public void visit(BLangMapAccessExpr mapKeyAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(mapKeyAccessExpr.expr, this.env); Operand varRefRegIndex = mapKeyAccessExpr.expr.regIndex; genNode(mapKeyAccessExpr.indexExpr, this.env); Operand keyRegIndex = mapKeyAccessExpr.indexExpr.regIndex; BMapType mapType = (BMapType) mapKeyAccessExpr.expr.type; if (variableStore) { int opcode = getValueToRefTypeCastOpcode(mapType.constraint.tag); if (opcode == InstructionCodes.NOP) { emit(InstructionCodes.MAPSTORE, varRefRegIndex, keyRegIndex, mapKeyAccessExpr.regIndex); } else { RegIndex refRegMapValue = getRegIndex(TypeTags.ANY); emit(opcode, mapKeyAccessExpr.regIndex, refRegMapValue); emit(InstructionCodes.MAPSTORE, varRefRegIndex, keyRegIndex, refRegMapValue); } } else { IntegerCPEntry exceptCPEntry = new IntegerCPEntry(mapKeyAccessExpr.except ? 1 : 0); Operand except = getOperand(currentPkgInfo.addCPEntry(exceptCPEntry)); int opcode = getRefToValueTypeCastOpcode(mapType.constraint.tag); if (opcode == InstructionCodes.NOP) { emit(InstructionCodes.MAPLOAD, varRefRegIndex, keyRegIndex, calcAndGetExprRegIndex(mapKeyAccessExpr), except); } else { RegIndex refRegMapValue = getRegIndex(TypeTags.ANY); emit(InstructionCodes.MAPLOAD, varRefRegIndex, keyRegIndex, refRegMapValue, except); emit(opcode, refRegMapValue, calcAndGetExprRegIndex(mapKeyAccessExpr)); } } this.varAssignment = variableStore; } @Override public void visit(BLangJSONAccessExpr jsonAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(jsonAccessExpr.expr, this.env); Operand varRefRegIndex = jsonAccessExpr.expr.regIndex; genNode(jsonAccessExpr.indexExpr, this.env); Operand keyRegIndex = jsonAccessExpr.indexExpr.regIndex; if (jsonAccessExpr.indexExpr.type.tag == TypeTags.INT) { if (variableStore) { emit(InstructionCodes.JSONASTORE, varRefRegIndex, keyRegIndex, jsonAccessExpr.regIndex); } else { emit(InstructionCodes.JSONALOAD, varRefRegIndex, keyRegIndex, calcAndGetExprRegIndex(jsonAccessExpr)); } } else { if (variableStore) { emit(InstructionCodes.JSONSTORE, varRefRegIndex, keyRegIndex, jsonAccessExpr.regIndex); } else { emit(InstructionCodes.JSONLOAD, varRefRegIndex, keyRegIndex, calcAndGetExprRegIndex(jsonAccessExpr)); } } this.varAssignment = variableStore; } @Override public void visit(BLangXMLAccessExpr xmlIndexAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(xmlIndexAccessExpr.expr, this.env); RegIndex varRefRegIndex = xmlIndexAccessExpr.expr.regIndex; genNode(xmlIndexAccessExpr.indexExpr, this.env); RegIndex indexRegIndex = xmlIndexAccessExpr.indexExpr.regIndex; RegIndex elementRegIndex = calcAndGetExprRegIndex(xmlIndexAccessExpr); if (xmlIndexAccessExpr.fieldType == FieldKind.ALL) { emit(InstructionCodes.XMLLOADALL, varRefRegIndex, elementRegIndex); } else if (xmlIndexAccessExpr.indexExpr.type.tag == TypeTags.STRING) { emit(InstructionCodes.XMLLOAD, varRefRegIndex, indexRegIndex, elementRegIndex); } else { emit(InstructionCodes.XMLSEQLOAD, varRefRegIndex, indexRegIndex, elementRegIndex); } this.varAssignment = variableStore; } @Override public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(arrayIndexAccessExpr.expr, this.env); Operand varRefRegIndex = arrayIndexAccessExpr.expr.regIndex; genNode(arrayIndexAccessExpr.indexExpr, this.env); Operand indexRegIndex = arrayIndexAccessExpr.indexExpr.regIndex; BArrayType arrayType = (BArrayType) arrayIndexAccessExpr.expr.type; if (variableStore) { int opcode = getOpcode(arrayType.eType.tag, InstructionCodes.IASTORE); emit(opcode, varRefRegIndex, indexRegIndex, arrayIndexAccessExpr.regIndex); } else { int opcode = getOpcode(arrayType.eType.tag, InstructionCodes.IALOAD); emit(opcode, varRefRegIndex, indexRegIndex, calcAndGetExprRegIndex(arrayIndexAccessExpr)); } this.varAssignment = variableStore; } @Override public void visit(BLangEnumeratorAccessExpr enumeratorAccessExpr) { } @Override public void visit(BLangElvisExpr elvisExpr) { } @Override public void visit(BLangIsAssignableExpr assignableExpr) { genNode(assignableExpr.lhsExpr, this.env); RegIndex regIndex = calcAndGetExprRegIndex(assignableExpr); Operand typeCPIndex = getTypeCPIndex(assignableExpr.targetType); emit(assignableExpr.opSymbol.opcode, assignableExpr.lhsExpr.regIndex, typeCPIndex, regIndex); } @Override public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) { RegIndex exprRegIndex = calcAndGetExprRegIndex(bracedOrTupleExpr); Operand typeCPIndex = getTypeCPIndex(bracedOrTupleExpr.type); emit(InstructionCodes.RNEWARRAY, exprRegIndex, typeCPIndex); for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) { BLangExpression argExpr = bracedOrTupleExpr.expressions.get(i); genNode(argExpr, this.env); BLangLiteral indexLiteral = new BLangLiteral(); indexLiteral.pos = argExpr.pos; indexLiteral.value = (long) i; indexLiteral.type = symTable.intType; genNode(indexLiteral, this.env); emit(InstructionCodes.RASTORE, exprRegIndex, indexLiteral.regIndex, argExpr.regIndex); } } private void visitAndExpression(BLangBinaryExpr binaryExpr) { Operand falseJumpAddr = getOperand(-1); genNode(binaryExpr.lhsExpr, this.env); emit(InstructionCodes.BR_FALSE, binaryExpr.lhsExpr.regIndex, falseJumpAddr); genNode(binaryExpr.rhsExpr, this.env); emit(InstructionCodes.BR_FALSE, binaryExpr.rhsExpr.regIndex, falseJumpAddr); calcAndGetExprRegIndex(binaryExpr); emit(InstructionCodes.BCONST_1, binaryExpr.regIndex); Operand gotoAddr = getOperand(-1); emit(InstructionCodes.GOTO, gotoAddr); falseJumpAddr.value = nextIP(); emit(InstructionCodes.BCONST_0, binaryExpr.regIndex); gotoAddr.value = nextIP(); } private void visitOrExpression(BLangBinaryExpr binaryExpr) { Operand lExprTrueJumpAddr = getOperand(-1); Operand rExprFalseJumpAddr = getOperand(-1); genNode(binaryExpr.lhsExpr, this.env); emit(InstructionCodes.BR_TRUE, binaryExpr.lhsExpr.regIndex, lExprTrueJumpAddr); genNode(binaryExpr.rhsExpr, this.env); emit(InstructionCodes.BR_FALSE, binaryExpr.rhsExpr.regIndex, rExprFalseJumpAddr); lExprTrueJumpAddr.value = nextIP(); RegIndex exprRegIndex = calcAndGetExprRegIndex(binaryExpr); emit(InstructionCodes.BCONST_1, exprRegIndex); Operand gotoAddr = getOperand(-1); emit(InstructionCodes.GOTO, gotoAddr); rExprFalseJumpAddr.value = nextIP(); emit(InstructionCodes.BCONST_0, exprRegIndex); gotoAddr.value = nextIP(); } public void visit(BLangInvocation iExpr) { if (iExpr.expr != null) { return; } Operand[] operands = getFuncOperands(iExpr); emit(InstructionCodes.CALL, operands); } public void visit(BLangActionInvocation aIExpr) { } public void visit(BLangTypeInit cIExpr) { BSymbol structSymbol = cIExpr.type.tsymbol; int pkgCPIndex = addPackageRefCPEntry(currentPkgInfo, structSymbol.pkgID); int structNameCPIndex = addUTF8CPEntry(currentPkgInfo, structSymbol.name.value); StructureRefCPEntry structureRefCPEntry = new StructureRefCPEntry(pkgCPIndex, structNameCPIndex); Operand structCPIndex = getOperand(currentPkgInfo.addCPEntry(structureRefCPEntry)); RegIndex structRegIndex = calcAndGetExprRegIndex(cIExpr); emit(InstructionCodes.NEWSTRUCT, structCPIndex, structRegIndex); Operand[] operands = getFuncOperands(cIExpr.objectInitInvocation); Operand[] callOperands = new Operand[operands.length + 1]; callOperands[0] = operands[0]; callOperands[1] = operands[1]; callOperands[2] = getOperand(operands[2].value + 1); callOperands[3] = structRegIndex; System.arraycopy(operands, 3, callOperands, 4, operands.length - 3); emit(InstructionCodes.CALL, callOperands); } public void visit(BLangAttachedFunctionInvocation iExpr) { Operand[] operands = getFuncOperands(iExpr); if (iExpr.expr.type.tag == TypeTags.OBJECT) { Operand[] vCallOperands = new Operand[operands.length + 1]; vCallOperands[0] = iExpr.expr.regIndex; System.arraycopy(operands, 0, vCallOperands, 1, operands.length); emit(InstructionCodes.VCALL, vCallOperands); } else { emit(InstructionCodes.CALL, operands); } } public void visit(BLangTransformerInvocation iExpr) { BInvokableSymbol transformerSymbol = (BInvokableSymbol) iExpr.symbol; int pkgRefCPIndex = addPackageRefCPEntry(currentPkgInfo, transformerSymbol.pkgID); int transformerNameCPIndex = addUTF8CPEntry(currentPkgInfo, transformerSymbol.name.value); TransformerRefCPEntry transformerRefCPEntry = new TransformerRefCPEntry(pkgRefCPIndex, transformerNameCPIndex); int transformerRefCPIndex = currentPkgInfo.addCPEntry(transformerRefCPEntry); Operand[] operands = getFuncOperands(iExpr, transformerRefCPIndex); emit(InstructionCodes.TCALL, operands); } public void visit(BFunctionPointerInvocation iExpr) { Operand[] operands = getFuncOperands(iExpr, -1); genNode(iExpr.expr, env); operands[0] = iExpr.expr.regIndex; emit(InstructionCodes.FPCALL, operands); } public void visit(BLangTypeConversionExpr convExpr) { int opcode = convExpr.conversionSymbol.opcode; BType castExprType = convExpr.type; RegIndex convExprRegIndex = calcAndGetExprRegIndex(convExpr.regIndex, castExprType.tag); convExpr.regIndex = convExprRegIndex; if (opcode == InstructionCodes.NOP) { convExpr.expr.regIndex = createLHSRegIndex(convExprRegIndex); genNode(convExpr.expr, this.env); return; } genNode(convExpr.expr, this.env); if (opcode == InstructionCodes.MAP2T || opcode == InstructionCodes.JSON2T || opcode == InstructionCodes.ANY2T || opcode == InstructionCodes.ANY2C || opcode == InstructionCodes.ANY2E || opcode == InstructionCodes.ANY2M || opcode == InstructionCodes.T2JSON || opcode == InstructionCodes.MAP2JSON || opcode == InstructionCodes.JSON2MAP || opcode == InstructionCodes.JSON2ARRAY || opcode == InstructionCodes.CHECKCAST) { Operand typeCPIndex = getTypeCPIndex(convExpr.targetType); emit(opcode, convExpr.expr.regIndex, typeCPIndex, convExprRegIndex); } else { emit(opcode, convExpr.expr.regIndex, convExprRegIndex); } } public void visit(BLangRecordLiteral recordLiteral) { /* ignore */ } public void visit(BLangTernaryExpr ternaryExpr) { RegIndex ternaryExprRegIndex = calcAndGetExprRegIndex(ternaryExpr); this.genNode(ternaryExpr.expr, this.env); Operand ifFalseJumpAddr = getOperand(-1); this.emit(InstructionCodes.BR_FALSE, ternaryExpr.expr.regIndex, ifFalseJumpAddr); ternaryExpr.thenExpr.regIndex = createLHSRegIndex(ternaryExprRegIndex); this.genNode(ternaryExpr.thenExpr, this.env); Operand endJumpAddr = getOperand(-1); this.emit(InstructionCodes.GOTO, endJumpAddr); ifFalseJumpAddr.value = nextIP(); ternaryExpr.elseExpr.regIndex = createLHSRegIndex(ternaryExprRegIndex); this.genNode(ternaryExpr.elseExpr, this.env); endJumpAddr.value = nextIP(); } public void visit(BLangAwaitExpr awaitExpr) { Operand valueRegIndex; if (awaitExpr.type != null) { valueRegIndex = calcAndGetExprRegIndex(awaitExpr); } else { valueRegIndex = this.getOperand(-1); } genNode(awaitExpr.expr, this.env); Operand futureRegIndex = awaitExpr.expr.regIndex; this.emit(InstructionCodes.AWAIT, futureRegIndex, valueRegIndex); } public void visit(BLangTypedescExpr accessExpr) { Operand typeCPIndex = getTypeCPIndex(accessExpr.resolvedType); emit(InstructionCodes.TYPELOAD, typeCPIndex, calcAndGetExprRegIndex(accessExpr)); } public void visit(BLangUnaryExpr unaryExpr) { RegIndex exprIndex = calcAndGetExprRegIndex(unaryExpr); if (OperatorKind.ADD.equals(unaryExpr.operator) || OperatorKind.UNTAINT.equals(unaryExpr.operator)) { unaryExpr.expr.regIndex = createLHSRegIndex(unaryExpr.regIndex); genNode(unaryExpr.expr, this.env); return; } int opcode; genNode(unaryExpr.expr, this.env); if (OperatorKind.LENGTHOF.equals(unaryExpr.operator)) { Operand typeCPIndex = getTypeCPIndex(unaryExpr.expr.type); opcode = unaryExpr.opSymbol.opcode; emit(opcode, unaryExpr.expr.regIndex, typeCPIndex, exprIndex); } else { opcode = unaryExpr.opSymbol.opcode; emit(opcode, unaryExpr.expr.regIndex, exprIndex); } } public void visit(BLangLambdaFunction bLangLambdaFunction) { visitFunctionPointerLoad(bLangLambdaFunction, ((BLangFunction) bLangLambdaFunction.getFunctionNode()).symbol); } public void visit(BLangStatementExpression bLangStatementExpression) { bLangStatementExpression.regIndex = calcAndGetExprRegIndex(bLangStatementExpression); boolean prevRegIndexResetDisabledState = this.regIndexResetDisabled; this.regIndexResetDisabled = true; genNode(bLangStatementExpression.stmt, this.env); this.regIndexResetDisabled = prevRegIndexResetDisabledState; genNode(bLangStatementExpression.expr, this.env); emit(getOpcode(bLangStatementExpression.expr.type.tag, InstructionCodes.IMOVE), bLangStatementExpression.expr.regIndex, bLangStatementExpression.regIndex); } private <T extends BLangNode, U extends SymbolEnv> T genNode(T t, U u) { SymbolEnv prevEnv = this.env; this.env = u; t.accept(this); this.env = prevEnv; return t; } private String generateSig(BType[] types) { StringBuilder builder = new StringBuilder(); Arrays.stream(types).forEach(e -> builder.append(e.getDesc())); return builder.toString(); } private String generateFunctionSig(BType[] paramTypes, BType retType) { return "(" + generateSig(paramTypes) + ")(" + retType.getDesc() + ")"; } private String generateFunctionSig(BType[] paramTypes) { return "(" + generateSig(paramTypes) + ")()"; } private int getNextIndex(int typeTag, VariableIndex indexes) { int index; switch (typeTag) { case TypeTags.INT: index = ++indexes.tInt; break; case TypeTags.FLOAT: index = ++indexes.tFloat; break; case TypeTags.STRING: index = ++indexes.tString; break; case TypeTags.BOOLEAN: index = ++indexes.tBoolean; break; case TypeTags.BLOB: index = ++indexes.tBlob; break; default: index = ++indexes.tRef; break; } return index; } private int getOpcode(int typeTag, int baseOpcode) { int opcode; switch (typeTag) { case TypeTags.INT: opcode = baseOpcode; break; case TypeTags.FLOAT: opcode = baseOpcode + FLOAT_OFFSET; break; case TypeTags.STRING: opcode = baseOpcode + STRING_OFFSET; break; case TypeTags.BOOLEAN: opcode = baseOpcode + BOOL_OFFSET; break; case TypeTags.BLOB: opcode = baseOpcode + BLOB_OFFSET; break; default: opcode = baseOpcode + REF_OFFSET; break; } return opcode; } private Operand getOperand(int value) { return new Operand(value); } private Operand getOperand(boolean value) { return new Operand(value ? 1 : 0); } private RegIndex getLVIndex(int typeTag) { return getRegIndexInternal(typeTag, LOCAL); } private RegIndex getPVIndex(int typeTag) { return getRegIndexInternal(typeTag, PACKAGE); } private RegIndex getFieldIndex(int typeTag) { return getRegIndexInternal(typeTag, FIELD); } private RegIndex getRegIndex(int typeTag) { RegIndex regIndex = getRegIndexInternal(typeTag, REG); addToRegIndexList(regIndex); return regIndex; } private RegIndex getRegIndexInternal(int typeTag, VariableIndex.Kind varIndexKind) { int index; switch (varIndexKind) { case REG: return new RegIndex(getNextIndex(typeTag, regIndexes), typeTag); case PACKAGE: index = getNextIndex(typeTag, pvIndexes); break; case FIELD: index = getNextIndex(typeTag, fieldIndexes); break; default: index = getNextIndex(typeTag, lvIndexes); break; } RegIndex regIndex = new RegIndex(index, typeTag); regIndex.isVarIndex = true; return regIndex; } private RegIndex calcAndGetExprRegIndex(BLangExpression expr) { expr.regIndex = calcAndGetExprRegIndex(expr.regIndex, expr.type.tag); return expr.regIndex; } private RegIndex calcAndGetExprRegIndex(RegIndex regIndex, int typeTag) { if (regIndex != null && (regIndex.isVarIndex || regIndex.isLHSIndex)) { return regIndex; } return getRegIndex(typeTag); } private RegIndex createLHSRegIndex(RegIndex regIndex) { if (regIndex.isVarIndex || regIndex.isLHSIndex) { return regIndex; } RegIndex lhsRegIndex = new RegIndex(regIndex.value, regIndex.typeTag, true); addToRegIndexList(lhsRegIndex); return lhsRegIndex; } private void addToRegIndexList(RegIndex regIndex) { if (regIndex.isVarIndex) { throw new IllegalStateException(""); } regIndexList.add(regIndex); } private LocalVariableInfo getLocalVarAttributeInfo(BVarSymbol varSymbol) { int varNameCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.name.value); int varIndex = varSymbol.varIndex.value; int sigCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.type.getDesc()); return new LocalVariableInfo(varNameCPIndex, sigCPIndex, varIndex); } private void visitInvokableNode(BLangInvokableNode invokableNode, CallableUnitInfo callableUnitInfo, SymbolEnv invokableSymbolEnv) { int localVarAttrNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE.value()); LocalVariableAttributeInfo localVarAttributeInfo = new LocalVariableAttributeInfo(localVarAttrNameIndex); visitInvokableNodeParams(invokableNode.symbol, callableUnitInfo, localVarAttributeInfo); if (Symbols.isNative(invokableNode.symbol)) { this.processWorker(callableUnitInfo.defaultWorkerInfo, null, localVarAttributeInfo, invokableSymbolEnv, null); } else { VariableIndex lvIndexCopy = this.copyVarIndex(lvIndexes); this.processWorker(callableUnitInfo.defaultWorkerInfo, invokableNode.body, localVarAttributeInfo, invokableSymbolEnv, lvIndexCopy); for (BLangWorker worker : invokableNode.getWorkers()) { this.processWorker(callableUnitInfo.getWorkerInfo(worker.name.value), worker.body, localVarAttributeInfo, invokableSymbolEnv, this.copyVarIndex(lvIndexCopy)); } } if (invokableNode.symbol.taintTable != null) { int taintTableAttributeNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.TAINT_TABLE.value()); TaintTableAttributeInfo taintTableAttributeInfo = new TaintTableAttributeInfo(taintTableAttributeNameIndex); visitTaintTable(invokableNode.symbol.taintTable, taintTableAttributeInfo); callableUnitInfo.addAttributeInfo(AttributeInfo.Kind.TAINT_TABLE, taintTableAttributeInfo); } } private void visitTaintTable(Map<Integer, TaintRecord> taintTable, TaintTableAttributeInfo taintTableAttributeInfo) { int rowCount = 0; for (Integer paramIndex : taintTable.keySet()) { TaintRecord taintRecord = taintTable.get(paramIndex); boolean added = addTaintTableEntry(taintTableAttributeInfo, paramIndex, taintRecord); if (added) { taintTableAttributeInfo.columnCount = taintRecord.retParamTaintedStatus.size(); rowCount++; } } taintTableAttributeInfo.rowCount = rowCount; } private boolean addTaintTableEntry(TaintTableAttributeInfo taintTableAttributeInfo, int index, TaintRecord taintRecord) { if (taintRecord.taintError == null || taintRecord.taintError.isEmpty()) { taintTableAttributeInfo.taintTable.put(index, taintRecord.retParamTaintedStatus); return true; } return false; } private void processWorker(WorkerInfo workerInfo, BLangBlockStmt body, LocalVariableAttributeInfo localVarAttributeInfo, SymbolEnv invokableSymbolEnv, VariableIndex lvIndexCopy) { workerInfo.codeAttributeInfo.attributeNameIndex = this.addUTF8CPEntry( this.currentPkgInfo, AttributeInfo.Kind.CODE_ATTRIBUTE.value()); workerInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, localVarAttributeInfo); if (body != null) { localVarAttrInfo = new LocalVariableAttributeInfo(localVarAttributeInfo.attributeNameIndex); localVarAttrInfo.localVars = new ArrayList<>(localVarAttributeInfo.localVars); workerInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, localVarAttrInfo); workerInfo.codeAttributeInfo.codeAddrs = nextIP(); this.lvIndexes = lvIndexCopy; this.currentWorkerInfo = workerInfo; this.genNode(body, invokableSymbolEnv); } this.endWorkerInfoUnit(workerInfo.codeAttributeInfo); this.emit(InstructionCodes.HALT); } private void visitInvokableNodeParams(BInvokableSymbol invokableSymbol, CallableUnitInfo callableUnitInfo, LocalVariableAttributeInfo localVarAttrInfo) { invokableSymbol.params.forEach(param -> visitVarSymbol(param, lvIndexes, localVarAttrInfo)); invokableSymbol.defaultableParams.forEach(param -> visitVarSymbol(param, lvIndexes, localVarAttrInfo)); if (invokableSymbol.restParam != null) { visitVarSymbol(invokableSymbol.restParam, lvIndexes, localVarAttrInfo); } callableUnitInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, localVarAttrInfo); } private void visitVarSymbol(BVarSymbol varSymbol, VariableIndex variableIndex, LocalVariableAttributeInfo localVarAttrInfo) { varSymbol.varIndex = getRegIndexInternal(varSymbol.type.tag, variableIndex.kind); LocalVariableInfo localVarInfo = getLocalVarAttributeInfo(varSymbol); localVarAttrInfo.localVars.add(localVarInfo); } private VariableIndex copyVarIndex(VariableIndex that) { VariableIndex vIndexes = new VariableIndex(that.kind); vIndexes.tInt = that.tInt; vIndexes.tFloat = that.tFloat; vIndexes.tString = that.tString; vIndexes.tBoolean = that.tBoolean; vIndexes.tBlob = that.tBlob; vIndexes.tRef = that.tRef; return vIndexes; } private int nextIP() { return currentPkgInfo.instructionList.size(); } private void endWorkerInfoUnit(CodeAttributeInfo codeAttributeInfo) { codeAttributeInfo.maxLongLocalVars = lvIndexes.tInt + 1; codeAttributeInfo.maxDoubleLocalVars = lvIndexes.tFloat + 1; codeAttributeInfo.maxStringLocalVars = lvIndexes.tString + 1; codeAttributeInfo.maxIntLocalVars = lvIndexes.tBoolean + 1; codeAttributeInfo.maxByteLocalVars = lvIndexes.tBlob + 1; codeAttributeInfo.maxRefLocalVars = lvIndexes.tRef + 1; codeAttributeInfo.maxLongRegs = codeAttributeInfo.maxLongLocalVars + maxRegIndexes.tInt + 1; codeAttributeInfo.maxDoubleRegs = codeAttributeInfo.maxDoubleLocalVars + maxRegIndexes.tFloat + 1; codeAttributeInfo.maxStringRegs = codeAttributeInfo.maxStringLocalVars + maxRegIndexes.tString + 1; codeAttributeInfo.maxIntRegs = codeAttributeInfo.maxIntLocalVars + maxRegIndexes.tBoolean + 1; codeAttributeInfo.maxByteRegs = codeAttributeInfo.maxByteLocalVars + maxRegIndexes.tBlob + 1; codeAttributeInfo.maxRefRegs = codeAttributeInfo.maxRefLocalVars + maxRegIndexes.tRef + 1; for (RegIndex regIndex : regIndexList) { switch (regIndex.typeTag) { case TypeTags.INT: regIndex.value = regIndex.value + codeAttributeInfo.maxLongLocalVars; break; case TypeTags.FLOAT: regIndex.value = regIndex.value + codeAttributeInfo.maxDoubleLocalVars; break; case TypeTags.STRING: regIndex.value = regIndex.value + codeAttributeInfo.maxStringLocalVars; break; case TypeTags.BOOLEAN: regIndex.value = regIndex.value + codeAttributeInfo.maxIntLocalVars; break; case TypeTags.BLOB: regIndex.value = regIndex.value + codeAttributeInfo.maxByteLocalVars; break; default: regIndex.value = regIndex.value + codeAttributeInfo.maxRefLocalVars; break; } } regIndexList = new ArrayList<>(); lvIndexes = new VariableIndex(LOCAL); regIndexes = new VariableIndex(REG); maxRegIndexes = new VariableIndex(REG); } private void setMaxRegIndexes(VariableIndex current, VariableIndex max) { max.tInt = (max.tInt > current.tInt) ? max.tInt : current.tInt; max.tFloat = (max.tFloat > current.tFloat) ? max.tFloat : current.tFloat; max.tString = (max.tString > current.tString) ? max.tString : current.tString; max.tBoolean = (max.tBoolean > current.tBoolean) ? max.tBoolean : current.tBoolean; max.tBlob = (max.tBlob > current.tBlob) ? max.tBlob : current.tBlob; max.tRef = (max.tRef > current.tRef) ? max.tRef : current.tRef; } private void prepareIndexes(VariableIndex indexes) { indexes.tInt++; indexes.tFloat++; indexes.tString++; indexes.tBoolean++; indexes.tBlob++; indexes.tRef++; } private int emit(int opcode) { currentPkgInfo.instructionList.add(InstructionFactory.get(opcode)); return currentPkgInfo.instructionList.size(); } private int emit(int opcode, Operand... operands) { currentPkgInfo.instructionList.add(InstructionFactory.get(opcode, operands)); return currentPkgInfo.instructionList.size(); } private int emit(Instruction instr) { currentPkgInfo.instructionList.add(instr); return currentPkgInfo.instructionList.size(); } private void addVarCountAttrInfo(ConstantPool constantPool, AttributeInfoPool attributeInfoPool, VariableIndex fieldCount) { int attrNameCPIndex = addUTF8CPEntry(constantPool, AttributeInfo.Kind.VARIABLE_TYPE_COUNT_ATTRIBUTE.value()); VarTypeCountAttributeInfo varCountAttribInfo = new VarTypeCountAttributeInfo(attrNameCPIndex); varCountAttribInfo.setMaxLongVars(fieldCount.tInt); varCountAttribInfo.setMaxDoubleVars(fieldCount.tFloat); varCountAttribInfo.setMaxStringVars(fieldCount.tString); varCountAttribInfo.setMaxIntVars(fieldCount.tBoolean); varCountAttribInfo.setMaxByteVars(fieldCount.tBlob); varCountAttribInfo.setMaxRefVars(fieldCount.tRef); attributeInfoPool.addAttributeInfo(AttributeInfo.Kind.VARIABLE_TYPE_COUNT_ATTRIBUTE, varCountAttribInfo); } private Operand[] getFuncOperands(BLangInvocation iExpr) { int funcRefCPIndex = getFuncRefCPIndex((BInvokableSymbol) iExpr.symbol); return getFuncOperands(iExpr, funcRefCPIndex); } private int getFuncRefCPIndex(BInvokableSymbol invokableSymbol) { int pkgRefCPIndex = addPackageRefCPEntry(currentPkgInfo, invokableSymbol.pkgID); int funcNameCPIndex = addUTF8CPEntry(currentPkgInfo, invokableSymbol.name.value); FunctionRefCPEntry funcRefCPEntry = new FunctionRefCPEntry(pkgRefCPIndex, funcNameCPIndex); return currentPkgInfo.addCPEntry(funcRefCPEntry); } private Operand[] getFuncOperands(BLangInvocation iExpr, int funcRefCPIndex) { int i = 0; int nArgRegs = iExpr.requiredArgs.size() + iExpr.namedArgs.size() + iExpr.restArgs.size(); int nRetRegs = 1; int flags = FunctionFlags.NOTHING; Operand[] operands = new Operand[nArgRegs + nRetRegs + 4]; operands[i++] = getOperand(funcRefCPIndex); if (iExpr.async) { flags = FunctionFlags.markAsync(flags); } if (iExpr.actionInvocation) { flags = FunctionFlags.markObserved(flags); } operands[i++] = getOperand(flags); operands[i++] = getOperand(nArgRegs); for (BLangExpression argExpr : iExpr.requiredArgs) { operands[i++] = genNode(argExpr, this.env).regIndex; } i = generateNamedArgs(iExpr, operands, i); for (BLangExpression argExpr : iExpr.restArgs) { operands[i++] = genNode(argExpr, this.env).regIndex; } operands[i++] = getOperand(nRetRegs); iExpr.regIndex = calcAndGetExprRegIndex(iExpr.regIndex, iExpr.type.tag); operands[i] = iExpr.regIndex; return operands; } private int generateNamedArgs(BLangInvocation iExpr, Operand[] operands, int currentIndex) { if (iExpr.namedArgs.isEmpty()) { return currentIndex; } if (iExpr.symbol.kind != SymbolKind.FUNCTION) { throw new IllegalStateException("Unsupported callable unit"); } for (BLangExpression argExpr : iExpr.namedArgs) { operands[currentIndex++] = genNode(argExpr, this.env).regIndex; } return currentIndex; } private void addVariableCountAttributeInfo(ConstantPool constantPool, AttributeInfoPool attributeInfoPool, int[] fieldCount) { UTF8CPEntry attribNameCPEntry = new UTF8CPEntry(AttributeInfo.Kind.VARIABLE_TYPE_COUNT_ATTRIBUTE.toString()); int attribNameCPIndex = constantPool.addCPEntry(attribNameCPEntry); VarTypeCountAttributeInfo varCountAttribInfo = new VarTypeCountAttributeInfo(attribNameCPIndex); varCountAttribInfo.setMaxLongVars(fieldCount[INT_OFFSET]); varCountAttribInfo.setMaxDoubleVars(fieldCount[FLOAT_OFFSET]); varCountAttribInfo.setMaxStringVars(fieldCount[STRING_OFFSET]); varCountAttribInfo.setMaxIntVars(fieldCount[BOOL_OFFSET]); varCountAttribInfo.setMaxByteVars(fieldCount[BLOB_OFFSET]); varCountAttribInfo.setMaxRefVars(fieldCount[REF_OFFSET]); attributeInfoPool.addAttributeInfo(AttributeInfo.Kind.VARIABLE_TYPE_COUNT_ATTRIBUTE, varCountAttribInfo); } private DefaultValue getDefaultValue(BLangLiteral literalExpr) { String desc = literalExpr.type.getDesc(); int typeDescCPIndex = addUTF8CPEntry(currentPkgInfo, desc); DefaultValue defaultValue = new DefaultValue(typeDescCPIndex, desc); int typeTag = literalExpr.type.tag; switch (typeTag) { case TypeTags.INT: defaultValue.intValue = (Long) literalExpr.value; defaultValue.valueCPIndex = currentPkgInfo.addCPEntry(new IntegerCPEntry(defaultValue.intValue)); break; case TypeTags.FLOAT: defaultValue.floatValue = (Double) literalExpr.value; defaultValue.valueCPIndex = currentPkgInfo.addCPEntry(new FloatCPEntry(defaultValue.floatValue)); break; case TypeTags.STRING: defaultValue.stringValue = (String) literalExpr.value; defaultValue.valueCPIndex = currentPkgInfo.addCPEntry(new UTF8CPEntry(defaultValue.stringValue)); break; case TypeTags.BOOLEAN: defaultValue.booleanValue = (Boolean) literalExpr.value; break; case TypeTags.BLOB: defaultValue.blobValue = (byte[]) literalExpr.value; defaultValue.valueCPIndex = currentPkgInfo.addCPEntry(new BlobCPEntry(defaultValue.blobValue)); break; case TypeTags.NIL: break; default: defaultValue = null; } return defaultValue; } private DefaultValueAttributeInfo getDefaultValueAttributeInfo(BLangLiteral literalExpr) { DefaultValue defaultValue = getDefaultValue(literalExpr); UTF8CPEntry defaultValueAttribUTF8CPEntry = new UTF8CPEntry(AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE.toString()); int defaultValueAttribNameIndex = currentPkgInfo.addCPEntry(defaultValueAttribUTF8CPEntry); return new DefaultValueAttributeInfo(defaultValueAttribNameIndex, defaultValue); } private void createPackageVarInfo(BLangVariable varNode) { BVarSymbol varSymbol = varNode.symbol; varSymbol.varIndex = getPVIndex(varSymbol.type.tag); int varNameCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.name.value); int typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.type.getDesc()); PackageVarInfo pkgVarInfo = new PackageVarInfo(varNameCPIndex, typeSigCPIndex, varSymbol.flags, varSymbol.varIndex.value); currentPkgInfo.pkgVarInfoMap.put(varSymbol.name.value, pkgVarInfo); LocalVariableInfo localVarInfo = getLocalVarAttributeInfo(varSymbol); LocalVariableAttributeInfo pkgVarAttrInfo = (LocalVariableAttributeInfo) currentPkgInfo.getAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE); pkgVarAttrInfo.localVars.add(localVarInfo); addDocumentAttachmentAttrInfo(varNode.docAttachments, pkgVarInfo); } public void visit(BLangTypeDefinition typeDefinition) { } private void createAnnotationInfoEntry(BLangAnnotation annotation) { int nameCPIndex = addUTF8CPEntry(currentPkgInfo, annotation.name.value); int typeSigCPIndex = -1; if (annotation.typeNode != null) { typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, annotation.typeNode.type.getDesc()); } int[] attachPointCPIndexes = new int[annotation.attachmentPoints.size()]; List<BLangAnnotationAttachmentPoint> attachmentPoints = annotation.attachmentPoints; for (int i = 0; i < attachmentPoints.size(); i++) { String pointName = attachmentPoints.get(i).attachmentPoint.getValue(); attachPointCPIndexes[i] = addUTF8CPEntry(currentPkgInfo, pointName); } AnnotationInfo annotationInfo = new AnnotationInfo(nameCPIndex, typeSigCPIndex, annotation.symbol.flags, attachPointCPIndexes); currentPkgInfo.annotationInfoMap.put(annotation.name.value, annotationInfo); } private void createTypeDefinitionInfoEntry(BLangTypeDefinition typeDefinition) { if (typeDefinition.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) { return; } BTypeSymbol typeDefSymbol = typeDefinition.symbol; int typeDefNameCPIndex = addUTF8CPEntry(currentPkgInfo, typeDefSymbol.name.value); TypeDefInfo typeDefInfo = new TypeDefInfo(currentPackageRefCPIndex, typeDefNameCPIndex, typeDefSymbol.flags); typeDefInfo.typeTag = typeDefSymbol.type.tag; if (typeDefinition.symbol.tag == SymTag.OBJECT) { ObjectTypeInfo objInfo = new ObjectTypeInfo(); BObjectTypeSymbol objectSymbol = (BObjectTypeSymbol) typeDefSymbol; objInfo.objectType = (BObjectType) objectSymbol.type; BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDefinition.typeNode; List<BLangVariable> objFields = objectTypeNode.fields; for (BLangVariable objField : objFields) { int fieldNameCPIndex = addUTF8CPEntry(currentPkgInfo, objField.name.value); int sigCPIndex = addUTF8CPEntry(currentPkgInfo, objField.type.getDesc()); objField.symbol.varIndex = getFieldIndex(objField.symbol.type.tag); StructFieldInfo objFieldInfo = new StructFieldInfo(fieldNameCPIndex, sigCPIndex, objField.symbol.flags, objField.symbol.varIndex.value); objFieldInfo.fieldType = objField.type; if (objField.expr != null && objField.expr.getKind() == NodeKind.LITERAL) { DefaultValueAttributeInfo defaultVal = getDefaultValueAttributeInfo((BLangLiteral) objField.expr); objFieldInfo.addAttributeInfo(AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE, defaultVal); } objInfo.fieldInfoEntries.add(objFieldInfo); addDocumentAttachmentAttrInfo(objField.docAttachments, objFieldInfo); } prepareIndexes(fieldIndexes); int[] fieldCount = new int[]{fieldIndexes.tInt, fieldIndexes.tFloat, fieldIndexes.tString, fieldIndexes.tBoolean, fieldIndexes.tBlob, fieldIndexes.tRef}; addVariableCountAttributeInfo(currentPkgInfo, objInfo, fieldCount); fieldIndexes = new VariableIndex(FIELD); for (BAttachedFunction attachedFunc : objectSymbol.attachedFuncs) { int funcNameCPIndex = addUTF8CPEntry(currentPkgInfo, attachedFunc.funcName.value); BType[] paramTypes = attachedFunc.type.paramTypes.toArray(new BType[0]); int sigCPIndex = addUTF8CPEntry(currentPkgInfo, generateFunctionSig(paramTypes, attachedFunc.type.retType)); int flags = attachedFunc.symbol.flags; objInfo.attachedFuncInfoEntries.add(new AttachedFunctionInfo(funcNameCPIndex, sigCPIndex, flags)); } typeDefInfo.typeInfo = objInfo; addDocumentAttachmentAttrInfo(typeDefinition.docAttachments, objInfo); currentPkgInfo.addTypeDefInfo(typeDefSymbol.name.value, typeDefInfo); } else if (typeDefinition.symbol.tag == SymTag.RECORD) { RecordTypeInfo recordInfo = new RecordTypeInfo(); BRecordTypeSymbol recordSymbol = (BRecordTypeSymbol) typeDefSymbol; recordInfo.recordType = (BRecordType) recordSymbol.type; BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDefinition.typeNode; List<BLangVariable> recordFields = recordTypeNode.fields; for (BLangVariable recordField : recordFields) { int fieldNameCPIndex = addUTF8CPEntry(currentPkgInfo, recordField.name.value); int sigCPIndex = addUTF8CPEntry(currentPkgInfo, recordField.type.getDesc()); recordField.symbol.varIndex = getFieldIndex(recordField.symbol.type.tag); StructFieldInfo recordFieldInfo = new StructFieldInfo(fieldNameCPIndex, sigCPIndex, recordField.symbol.flags, recordField.symbol.varIndex.value); recordFieldInfo.fieldType = recordField.type; if (recordField.expr != null && recordField.expr.getKind() == NodeKind.LITERAL) { DefaultValueAttributeInfo defaultVal = getDefaultValueAttributeInfo((BLangLiteral) recordField.expr); recordFieldInfo.addAttributeInfo(AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE, defaultVal); } recordInfo.fieldInfoEntries.add(recordFieldInfo); addDocumentAttachmentAttrInfo(recordField.docAttachments, recordFieldInfo); } prepareIndexes(fieldIndexes); int[] fieldCount = new int[]{fieldIndexes.tInt, fieldIndexes.tFloat, fieldIndexes.tString, fieldIndexes.tBoolean, fieldIndexes.tBlob, fieldIndexes.tRef}; addVariableCountAttributeInfo(currentPkgInfo, recordInfo, fieldCount); fieldIndexes = new VariableIndex(FIELD); BAttachedFunction attachedFunc = recordSymbol.initializerFunc; int funcNameCPIndex = addUTF8CPEntry(currentPkgInfo, attachedFunc.funcName.value); BType[] paramTypes = attachedFunc.type.paramTypes.toArray(new BType[0]); int sigCPIndex = addUTF8CPEntry(currentPkgInfo, generateFunctionSig(paramTypes, attachedFunc.type.retType)); int flags = attachedFunc.symbol.flags; recordInfo.attachedFuncInfoEntries.add(new AttachedFunctionInfo(funcNameCPIndex, sigCPIndex, flags)); typeDefInfo.typeInfo = recordInfo; addDocumentAttachmentAttrInfo(typeDefinition.docAttachments, typeDefInfo); currentPkgInfo.addTypeDefInfo(typeDefSymbol.name.value, typeDefInfo); } else if (typeDefinition.symbol.tag == SymTag.FINITE_TYPE) { BLangFiniteTypeNode typeNode = (BLangFiniteTypeNode) typeDefinition.typeNode; FiniteTypeInfo typeInfo = new FiniteTypeInfo(); Iterator<BLangExpression> valueSpaceIterator = typeNode.valueSpace.iterator(); while (valueSpaceIterator.hasNext()) { BLangExpression literal = valueSpaceIterator.next(); typeInfo.valueSpaceItemInfos.add(new ValueSpaceItemInfo(getDefaultValue((BLangLiteral) literal))); } typeDefInfo.typeInfo = typeInfo; addDocumentAttachmentAttrInfo(typeDefinition.docAttachments, typeDefInfo); currentPkgInfo.addTypeDefInfo(typeDefSymbol.name.value, typeDefInfo); } } /** * Creates a {@code FunctionInfo} from the given function node in AST. * * @param funcNode function node in AST */ private void createFunctionInfoEntry(BLangFunction funcNode) { BInvokableSymbol funcSymbol = funcNode.symbol; BInvokableType funcType = (BInvokableType) funcSymbol.type; int funcNameCPIndex = this.addUTF8CPEntry(currentPkgInfo, funcNode.name.value); FunctionInfo funcInfo = new FunctionInfo(currentPackageRefCPIndex, funcNameCPIndex); funcInfo.paramTypes = funcType.paramTypes.toArray(new BType[0]); populateInvokableSignature(funcType, funcInfo); funcInfo.flags = funcSymbol.flags; if (funcNode.receiver != null) { funcInfo.attachedToTypeCPIndex = getTypeCPIndex(funcNode.receiver.type).value; } this.addWorkerInfoEntries(funcInfo, funcNode.getWorkers()); addParameterAttributeInfo(funcNode, funcInfo); addDocumentAttachmentAttrInfo(funcNode.docAttachments, funcInfo); this.currentPkgInfo.functionInfoMap.put(funcSymbol.name.value, funcInfo); } private void createTransformerInfoEntry(BLangInvokableNode invokable) { BInvokableSymbol transformerSymbol = invokable.symbol; BInvokableType transformerType = (BInvokableType) transformerSymbol.type; int transformerNameCPIndex = this.addUTF8CPEntry(currentPkgInfo, transformerSymbol.name.value); TransformerInfo transformerInfo = new TransformerInfo(currentPackageRefCPIndex, transformerNameCPIndex); transformerInfo.paramTypes = transformerType.paramTypes.toArray(new BType[0]); populateInvokableSignature(transformerType, transformerInfo); transformerInfo.retParamTypes = new BType[1]; transformerInfo.retParamTypes[0] = transformerType.retType; transformerInfo.flags = transformerSymbol.flags; this.addWorkerInfoEntries(transformerInfo, invokable.getWorkers()); addParameterAttributeInfo(invokable, transformerInfo); this.currentPkgInfo.transformerInfoMap.put(transformerSymbol.name.value, transformerInfo); addDocumentAttachmentAttrInfo(invokable.docAttachments, transformerInfo); } private void populateInvokableSignature(BInvokableType bInvokableType, CallableUnitInfo callableUnitInfo) { if (bInvokableType.retType == symTable.nilType) { callableUnitInfo.retParamTypes = new BType[0]; callableUnitInfo.signatureCPIndex = addUTF8CPEntry(this.currentPkgInfo, generateFunctionSig(callableUnitInfo.paramTypes)); } else { callableUnitInfo.retParamTypes = new BType[1]; callableUnitInfo.retParamTypes[0] = bInvokableType.retType; callableUnitInfo.signatureCPIndex = addUTF8CPEntry(this.currentPkgInfo, generateFunctionSig(callableUnitInfo.paramTypes, bInvokableType.retType)); } } private void addWorkerInfoEntries(CallableUnitInfo callableUnitInfo, List<BLangWorker> workers) { UTF8CPEntry workerNameCPEntry = new UTF8CPEntry("default"); int workerNameCPIndex = this.currentPkgInfo.addCPEntry(workerNameCPEntry); WorkerInfo defaultWorkerInfo = new WorkerInfo(workerNameCPIndex, "default"); callableUnitInfo.defaultWorkerInfo = defaultWorkerInfo; for (BLangWorker worker : workers) { workerNameCPEntry = new UTF8CPEntry(worker.name.value); workerNameCPIndex = currentPkgInfo.addCPEntry(workerNameCPEntry); WorkerInfo workerInfo = new WorkerInfo(workerNameCPIndex, worker.getName().value); callableUnitInfo.addWorkerInfo(worker.getName().value, workerInfo); } } @Override public void visit(BLangEndpoint endpointNode) { } private void createServiceInfoEntry(BLangService serviceNode) { int serviceNameCPIndex = addUTF8CPEntry(currentPkgInfo, serviceNode.name.value); if (serviceNode.endpointType != null) { String endPointQName = serviceNode.endpointType.tsymbol.toString(); int epNameCPIndex = addUTF8CPEntry(currentPkgInfo, endPointQName); ServiceInfo serviceInfo = new ServiceInfo(currentPackageRefCPIndex, serviceNameCPIndex, serviceNode.symbol.flags, epNameCPIndex); int localVarAttNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE.value()); LocalVariableAttributeInfo localVarAttributeInfo = new LocalVariableAttributeInfo(localVarAttNameIndex); serviceNode.vars.forEach(var -> visitVarSymbol(var.var.symbol, pvIndexes, localVarAttributeInfo)); serviceInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, localVarAttributeInfo); BLangFunction serviceInitFunction = (BLangFunction) serviceNode.getInitFunction(); createFunctionInfoEntry(serviceInitFunction); serviceInfo.initFuncInfo = currentPkgInfo.functionInfoMap.get(serviceInitFunction.name.toString()); currentPkgInfo.addServiceInfo(serviceNode.name.value, serviceInfo); serviceNode.resources.forEach(res -> createResourceInfoEntry(res, serviceInfo)); addDocumentAttachmentAttrInfo(serviceNode.docAttachments, serviceInfo); } } private void createResourceInfoEntry(BLangResource resourceNode, ServiceInfo serviceInfo) { BInvokableType resourceType = (BInvokableType) resourceNode.symbol.type; int serviceNameCPIndex = addUTF8CPEntry(currentPkgInfo, resourceNode.name.value); ResourceInfo resourceInfo = new ResourceInfo(currentPackageRefCPIndex, serviceNameCPIndex); resourceInfo.paramTypes = resourceType.paramTypes.toArray(new BType[0]); setParameterNames(resourceNode, resourceInfo); resourceInfo.retParamTypes = new BType[0]; resourceInfo.signatureCPIndex = addUTF8CPEntry(currentPkgInfo, generateFunctionSig(resourceInfo.paramTypes)); int workerNameCPIndex = addUTF8CPEntry(currentPkgInfo, "default"); resourceInfo.defaultWorkerInfo = new WorkerInfo(workerNameCPIndex, "default"); resourceNode.workers.forEach(worker -> addWorkerInfoEntry(worker, resourceInfo)); serviceInfo.resourceInfoMap.put(resourceNode.name.getValue(), resourceInfo); addDocumentAttachmentAttrInfo(resourceNode.docAttachments, resourceInfo); } private void addWorkerInfoEntry(BLangWorker worker, CallableUnitInfo callableUnitInfo) { int workerNameCPIndex = addUTF8CPEntry(currentPkgInfo, worker.name.value); WorkerInfo workerInfo = new WorkerInfo(workerNameCPIndex, worker.name.value); callableUnitInfo.addWorkerInfo(worker.name.value, workerInfo); } private ErrorTableAttributeInfo createErrorTableIfAbsent(PackageInfo packageInfo) { ErrorTableAttributeInfo errorTable = (ErrorTableAttributeInfo) packageInfo.getAttributeInfo(AttributeInfo.Kind.ERROR_TABLE); if (errorTable == null) { UTF8CPEntry attribNameCPEntry = new UTF8CPEntry(AttributeInfo.Kind.ERROR_TABLE.toString()); int attribNameCPIndex = packageInfo.addCPEntry(attribNameCPEntry); errorTable = new ErrorTableAttributeInfo(attribNameCPIndex); packageInfo.addAttributeInfo(AttributeInfo.Kind.ERROR_TABLE, errorTable); } return errorTable; } private void addLineNumberInfo(DiagnosticPos pos) { LineNumberInfo lineNumInfo = createLineNumberInfo(pos, currentPkgInfo, currentPkgInfo.instructionList.size()); lineNoAttrInfo.addLineNumberInfo(lineNumInfo); } private LineNumberInfo createLineNumberInfo(DiagnosticPos pos, PackageInfo packageInfo, int ip) { UTF8CPEntry fileNameUTF8CPEntry = new UTF8CPEntry(pos.src.cUnitName); int fileNameCPEntryIndex = packageInfo.addCPEntry(fileNameUTF8CPEntry); LineNumberInfo lineNumberInfo = new LineNumberInfo(pos.sLine, fileNameCPEntryIndex, pos.src.cUnitName, ip); lineNumberInfo.setPackageInfo(packageInfo); lineNumberInfo.setIp(ip); return lineNumberInfo; } private void setParameterNames(BLangResource resourceNode, ResourceInfo resourceInfo) { int paramCount = resourceNode.requiredParams.size(); resourceInfo.paramNameCPIndexes = new int[paramCount]; for (int i = 0; i < paramCount; i++) { BLangVariable paramVar = resourceNode.requiredParams.get(i); String paramName = null; boolean isAnnotated = false; for (BLangAnnotationAttachment annotationAttachment : paramVar.annAttachments) { String attachmentName = annotationAttachment.getAnnotationName().getValue(); if ("PathParam".equalsIgnoreCase(attachmentName) || "QueryParam".equalsIgnoreCase(attachmentName)) { isAnnotated = true; break; } } if (!isAnnotated) { paramName = paramVar.name.getValue(); } int paramNameCPIndex = addUTF8CPEntry(currentPkgInfo, paramName); resourceInfo.paramNameCPIndexes[i] = paramNameCPIndex; } } private WorkerDataChannelInfo getWorkerDataChannelInfo(CallableUnitInfo callableUnit, String source, String target) { WorkerDataChannelInfo workerDataChannelInfo = callableUnit.getWorkerDataChannelInfo( WorkerDataChannelInfo.generateChannelName(source, target)); if (workerDataChannelInfo == null) { UTF8CPEntry sourceCPEntry = new UTF8CPEntry(source); int sourceCPIndex = this.currentPkgInfo.addCPEntry(sourceCPEntry); UTF8CPEntry targetCPEntry = new UTF8CPEntry(target); int targetCPIndex = this.currentPkgInfo.addCPEntry(targetCPEntry); workerDataChannelInfo = new WorkerDataChannelInfo(sourceCPIndex, source, targetCPIndex, target); workerDataChannelInfo.setUniqueName(workerDataChannelInfo.getChannelName() + this.workerChannelCount); String uniqueName = workerDataChannelInfo.getUniqueName(); UTF8CPEntry uniqueNameCPEntry = new UTF8CPEntry(uniqueName); int uniqueNameCPIndex = this.currentPkgInfo.addCPEntry(uniqueNameCPEntry); workerDataChannelInfo.setUniqueNameCPIndex(uniqueNameCPIndex); callableUnit.addWorkerDataChannelInfo(workerDataChannelInfo); this.workerChannelCount++; } return workerDataChannelInfo; } private int addUTF8CPEntry(ConstantPool pool, String value) { UTF8CPEntry pkgPathCPEntry = new UTF8CPEntry(value); return pool.addCPEntry(pkgPathCPEntry); } private int addPackageRefCPEntry(ConstantPool pool, PackageID pkgID) { int nameCPIndex = addUTF8CPEntry(pool, pkgID.bvmAlias()); int versionCPIndex = addUTF8CPEntry(pool, pkgID.version.value); PackageRefCPEntry packageRefCPEntry = new PackageRefCPEntry(nameCPIndex, versionCPIndex); return pool.addCPEntry(packageRefCPEntry); } /** * Holds the variable index per type. * * @since 0.94 */ static class VariableIndex { public enum Kind { LOCAL, FIELD, PACKAGE, REG } int tInt = -1; int tFloat = -1; int tString = -1; int tBoolean = -1; int tBlob = -1; int tRef = -1; Kind kind; VariableIndex(Kind kind) { this.kind = kind; } public int[] toArray() { int[] result = new int[6]; result[0] = this.tInt; result[1] = this.tFloat; result[2] = this.tString; result[3] = this.tBoolean; result[4] = this.tBlob; result[5] = this.tRef; return result; } } public void visit(BLangWorker workerNode) { this.genNode(workerNode.body, this.env); } /* visit the workers within fork-join block */ private void processJoinWorkers(BLangForkJoin forkJoin, ForkjoinInfo forkjoinInfo, SymbolEnv forkJoinEnv) { UTF8CPEntry codeUTF8CPEntry = new UTF8CPEntry(AttributeInfo.Kind.CODE_ATTRIBUTE.toString()); int codeAttribNameIndex = this.currentPkgInfo.addCPEntry(codeUTF8CPEntry); for (BLangWorker worker : forkJoin.workers) { VariableIndex lvIndexesCopy = copyVarIndex(this.lvIndexes); this.regIndexes = new VariableIndex(REG); VariableIndex regIndexesCopy = this.regIndexes; this.regIndexes = new VariableIndex(REG); VariableIndex maxRegIndexesCopy = this.maxRegIndexes; this.maxRegIndexes = new VariableIndex(REG); List<RegIndex> regIndexListCopy = this.regIndexList; this.regIndexList = new ArrayList<>(); WorkerInfo workerInfo = forkjoinInfo.getWorkerInfo(worker.name.value); workerInfo.codeAttributeInfo.attributeNameIndex = codeAttribNameIndex; workerInfo.codeAttributeInfo.codeAddrs = this.nextIP(); this.currentWorkerInfo = workerInfo; this.genNode(worker.body, forkJoinEnv); this.endWorkerInfoUnit(workerInfo.codeAttributeInfo); this.emit(InstructionCodes.HALT); this.lvIndexes = lvIndexesCopy; this.regIndexes = regIndexesCopy; this.maxRegIndexes = maxRegIndexesCopy; this.regIndexList = regIndexListCopy; } } private void populateForkJoinWorkerInfo(BLangForkJoin forkJoin, ForkjoinInfo forkjoinInfo) { for (BLangWorker worker : forkJoin.workers) { UTF8CPEntry workerNameCPEntry = new UTF8CPEntry(worker.name.value); int workerNameCPIndex = this.currentPkgInfo.addCPEntry(workerNameCPEntry); WorkerInfo workerInfo = new WorkerInfo(workerNameCPIndex, worker.name.value); forkjoinInfo.addWorkerInfo(worker.name.value, workerInfo); } } /* generate code for Join block */ private void processJoinBlock(BLangForkJoin forkJoin, ForkjoinInfo forkjoinInfo, SymbolEnv forkJoinEnv, RegIndex joinVarRegIndex, Operand joinBlockAddr) { UTF8CPEntry joinType = new UTF8CPEntry(forkJoin.joinType.name()); int joinTypeCPIndex = this.currentPkgInfo.addCPEntry(joinType); forkjoinInfo.setJoinType(forkJoin.joinType.name()); forkjoinInfo.setJoinTypeCPIndex(joinTypeCPIndex); joinBlockAddr.value = nextIP(); if (forkJoin.joinResultVar != null) { visitForkJoinParameterDefs(forkJoin.joinResultVar, forkJoinEnv); joinVarRegIndex.value = forkJoin.joinResultVar.symbol.varIndex.value; } if (forkJoin.joinedBody != null) { this.genNode(forkJoin.joinedBody, forkJoinEnv); } } /* generate code for timeout block */ private void processTimeoutBlock(BLangForkJoin forkJoin, SymbolEnv forkJoinEnv, RegIndex timeoutVarRegIndex, Operand timeoutBlockAddr) { /* emit a GOTO instruction to jump out of the timeout block */ Operand gotoAddr = getOperand(-1); this.emit(InstructionCodes.GOTO, gotoAddr); timeoutBlockAddr.value = nextIP(); if (forkJoin.timeoutVariable != null) { visitForkJoinParameterDefs(forkJoin.timeoutVariable, forkJoinEnv); timeoutVarRegIndex.value = forkJoin.timeoutVariable.symbol.varIndex.value; } if (forkJoin.timeoutBody != null) { this.genNode(forkJoin.timeoutBody, forkJoinEnv); } gotoAddr.value = nextIP(); } public void visit(BLangForkJoin forkJoin) { SymbolEnv forkJoinEnv = SymbolEnv.createForkJoinSymbolEnv(forkJoin, this.env); ForkjoinInfo forkjoinInfo = new ForkjoinInfo(this.lvIndexes.toArray()); this.populateForkJoinWorkerInfo(forkJoin, forkjoinInfo); int forkJoinInfoIndex = this.forkJoinCount++; /* was I already inside a fork/join */ if (this.env.forkJoin != null) { this.currentWorkerInfo.addForkJoinInfo(forkjoinInfo); } else { this.currentCallableUnitInfo.defaultWorkerInfo.addForkJoinInfo(forkjoinInfo); } ForkJoinCPEntry forkJoinCPEntry = new ForkJoinCPEntry(forkJoinInfoIndex); Operand forkJoinCPIndex = getOperand(this.currentPkgInfo.addCPEntry(forkJoinCPEntry)); forkjoinInfo.setIndexCPIndex(forkJoinCPIndex.value); RegIndex timeoutRegIndex = new RegIndex(-1, TypeTags.INT); addToRegIndexList(timeoutRegIndex); if (forkJoin.timeoutExpression != null) { forkjoinInfo.setTimeoutAvailable(true); this.genNode(forkJoin.timeoutExpression, forkJoinEnv); timeoutRegIndex.value = forkJoin.timeoutExpression.regIndex.value; } RegIndex joinVarRegIndex = new RegIndex(-1, TypeTags.MAP); Operand joinBlockAddr = getOperand(-1); RegIndex timeoutVarRegIndex = new RegIndex(-1, TypeTags.MAP); Operand timeoutBlockAddr = getOperand(-1); this.emit(InstructionCodes.FORKJOIN, forkJoinCPIndex, timeoutRegIndex, joinVarRegIndex, joinBlockAddr, timeoutVarRegIndex, timeoutBlockAddr); VariableIndex lvIndexesCopy = copyVarIndex(this.lvIndexes); VariableIndex regIndexesCopy = this.regIndexes; VariableIndex maxRegIndexesCopy = this.maxRegIndexes; List<RegIndex> regIndexListCopy = this.regIndexList; this.processJoinWorkers(forkJoin, forkjoinInfo, forkJoinEnv); this.lvIndexes = lvIndexesCopy; this.regIndexes = regIndexesCopy; this.maxRegIndexes = maxRegIndexesCopy; this.regIndexList = regIndexListCopy; int i = 0; int[] joinWrkrNameCPIndexes = new int[forkJoin.joinedWorkers.size()]; String[] joinWrkrNames = new String[joinWrkrNameCPIndexes.length]; for (BLangIdentifier workerName : forkJoin.joinedWorkers) { UTF8CPEntry workerNameCPEntry = new UTF8CPEntry(workerName.value); int workerNameCPIndex = this.currentPkgInfo.addCPEntry(workerNameCPEntry); joinWrkrNameCPIndexes[i] = workerNameCPIndex; joinWrkrNames[i] = workerName.value; i++; } forkjoinInfo.setJoinWrkrNameIndexes(joinWrkrNameCPIndexes); forkjoinInfo.setJoinWorkerNames(joinWrkrNames); forkjoinInfo.setWorkerCount(forkJoin.joinedWorkerCount); this.processJoinBlock(forkJoin, forkjoinInfo, forkJoinEnv, joinVarRegIndex, joinBlockAddr); this.processTimeoutBlock(forkJoin, forkJoinEnv, timeoutVarRegIndex, timeoutBlockAddr); } private void visitForkJoinParameterDefs(BLangVariable parameterDef, SymbolEnv forkJoinEnv) { LocalVariableAttributeInfo localVariableAttributeInfo = new LocalVariableAttributeInfo(1); parameterDef.symbol.varIndex = getLVIndex(parameterDef.type.tag); this.genNode(parameterDef, forkJoinEnv); LocalVariableInfo localVariableDetails = this.getLocalVarAttributeInfo(parameterDef.symbol); localVariableAttributeInfo.localVars.add(localVariableDetails); } public void visit(BLangWorkerSend workerSendStmt) { WorkerDataChannelInfo workerDataChannelInfo = this.getWorkerDataChannelInfo(this.currentCallableUnitInfo, this.currentWorkerInfo.getWorkerName(), workerSendStmt.workerIdentifier.value); WorkerDataChannelRefCPEntry wrkrInvRefCPEntry = new WorkerDataChannelRefCPEntry(workerDataChannelInfo .getUniqueNameCPIndex(), workerDataChannelInfo.getUniqueName()); wrkrInvRefCPEntry.setWorkerDataChannelInfo(workerDataChannelInfo); Operand wrkrInvRefCPIndex = getOperand(currentPkgInfo.addCPEntry(wrkrInvRefCPEntry)); if (workerSendStmt.isForkJoinSend) { this.currentWorkerInfo.setWrkrDtChnlRefCPIndex(wrkrInvRefCPIndex.value); this.currentWorkerInfo.setWorkerDataChannelInfoForForkJoin(workerDataChannelInfo); } workerDataChannelInfo.setDataChannelRefIndex(wrkrInvRefCPIndex.value); genNode(workerSendStmt.expr, this.env); RegIndex argReg = workerSendStmt.expr.regIndex; BType bType = workerSendStmt.expr.type; UTF8CPEntry sigCPEntry = new UTF8CPEntry(this.generateSig(new BType[] { bType })); Operand sigCPIndex = getOperand(this.currentPkgInfo.addCPEntry(sigCPEntry)); Operand[] wrkSendArgRegs = new Operand[3]; wrkSendArgRegs[0] = wrkrInvRefCPIndex; wrkSendArgRegs[1] = sigCPIndex; wrkSendArgRegs[2] = argReg; this.emit(InstructionCodes.WRKSEND, wrkSendArgRegs); } public void visit(BLangWorkerReceive workerReceiveStmt) { WorkerDataChannelInfo workerDataChannelInfo = this.getWorkerDataChannelInfo(this.currentCallableUnitInfo, workerReceiveStmt.workerIdentifier.value, this.currentWorkerInfo.getWorkerName()); WorkerDataChannelRefCPEntry wrkrChnlRefCPEntry = new WorkerDataChannelRefCPEntry(workerDataChannelInfo .getUniqueNameCPIndex(), workerDataChannelInfo.getUniqueName()); wrkrChnlRefCPEntry.setWorkerDataChannelInfo(workerDataChannelInfo); Operand wrkrRplyRefCPIndex = getOperand(currentPkgInfo.addCPEntry(wrkrChnlRefCPEntry)); workerDataChannelInfo.setDataChannelRefIndex(wrkrRplyRefCPIndex.value); BLangExpression lExpr = workerReceiveStmt.expr; RegIndex regIndex; BType bType; if (lExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && lExpr instanceof BLangLocalVarRef) { lExpr.regIndex = ((BLangLocalVarRef) lExpr).varSymbol.varIndex; regIndex = lExpr.regIndex; } else { lExpr.regIndex = getRegIndex(lExpr.type.tag); lExpr.regIndex.isLHSIndex = true; regIndex = lExpr.regIndex; } bType = lExpr.type; UTF8CPEntry sigCPEntry = new UTF8CPEntry(this.generateSig(new BType[] { bType })); Operand sigCPIndex = getOperand(currentPkgInfo.addCPEntry(sigCPEntry)); Operand[] wrkReceiveArgRegs = new Operand[3]; wrkReceiveArgRegs[0] = wrkrRplyRefCPIndex; wrkReceiveArgRegs[1] = sigCPIndex; wrkReceiveArgRegs[2] = regIndex; emit(InstructionCodes.WRKRECEIVE, wrkReceiveArgRegs); if (!(lExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && lExpr instanceof BLangLocalVarRef)) { this.varAssignment = true; this.genNode(lExpr, this.env); this.varAssignment = false; } } public void visit(BLangConnector connectorNode) { } public void visit(BLangAction actionNode) { } public void visit(BLangForever foreverStatement) { /* ignore */ } public void visit(BLangSimpleVarRef varRefExpr) { /* ignore */ } public void visit(BLangIdentifier identifierNode) { /* ignore */ } public void visit(BLangAnnotation annotationNode) { /* ignore */ } public void visit(BLangAnnotAttribute annotationAttribute) { /* ignore */ } public void visit(BLangAnnotationAttachment annAttachmentNode) { /* ignore */ } public void visit(BLangAnnotAttachmentAttributeValue annotAttributeValue) { /* ignore */ } public void visit(BLangAnnotAttachmentAttribute annotAttachmentAttribute) { /* ignore */ } public void visit(BLangAssignment assignNode) { BLangExpression lhrExpr = assignNode.varRef; if (assignNode.declaredWithVar) { BLangVariableReference varRef = (BLangVariableReference) lhrExpr; visitVarSymbol((BVarSymbol) varRef.symbol, lvIndexes, localVarAttrInfo); } BLangExpression rhsExpr = assignNode.expr; if (lhrExpr.type.tag != TypeTags.NONE && lhrExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && lhrExpr instanceof BLangLocalVarRef) { lhrExpr.regIndex = ((BVarSymbol) ((BLangVariableReference) lhrExpr).symbol).varIndex; rhsExpr.regIndex = lhrExpr.regIndex; } genNode(rhsExpr, this.env); if (lhrExpr.type.tag == TypeTags.NONE || (lhrExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && lhrExpr instanceof BLangLocalVarRef)) { return; } varAssignment = true; lhrExpr.regIndex = rhsExpr.regIndex; genNode(lhrExpr, this.env); varAssignment = false; } public void visit(BLangContinue continueNode) { generateFinallyInstructions(continueNode, NodeKind.WHILE, NodeKind.FOREACH); this.emit(this.loopResetInstructionStack.peek()); } public void visit(BLangBreak breakNode) { generateFinallyInstructions(breakNode, NodeKind.WHILE, NodeKind.FOREACH); this.emit(this.loopExitInstructionStack.peek()); } public void visit(BLangThrow throwNode) { genNode(throwNode.expr, env); emit(InstructionFactory.get(InstructionCodes.THROW, throwNode.expr.regIndex)); } public void visit(BLangIf ifNode) { addLineNumberInfo(ifNode.pos); genNode(ifNode.expr, this.env); Operand ifCondJumpAddr = getOperand(-1); emit(InstructionCodes.BR_FALSE, ifNode.expr.regIndex, ifCondJumpAddr); genNode(ifNode.body, this.env); Operand endJumpAddr = getOperand(-1); emit(InstructionCodes.GOTO, endJumpAddr); ifCondJumpAddr.value = nextIP(); if (ifNode.elseStmt != null) { genNode(ifNode.elseStmt, this.env); } endJumpAddr.value = nextIP(); } public void visit(BLangForeach foreach) { Operand iteratorVar = getLVIndex(TypeTags.ITERATOR); Operand conditionVar = getLVIndex(TypeTags.BOOLEAN); this.genNode(foreach.collection, env); this.emit(InstructionCodes.ITR_NEW, foreach.collection.regIndex, iteratorVar); Operand foreachStartAddress = new Operand(nextIP()); Operand foreachEndAddress = new Operand(-1); Instruction gotoStartInstruction = InstructionFactory.get(InstructionCodes.GOTO, foreachStartAddress); Instruction gotoEndInstruction = InstructionFactory.get(InstructionCodes.GOTO, foreachEndAddress); this.emit(InstructionCodes.ITR_HAS_NEXT, iteratorVar, conditionVar); this.emit(InstructionCodes.BR_FALSE, conditionVar, foreachEndAddress); generateForeachVarAssignment(foreach, iteratorVar); this.loopResetInstructionStack.push(gotoStartInstruction); this.loopExitInstructionStack.push(gotoEndInstruction); this.genNode(foreach.body, env); this.loopResetInstructionStack.pop(); this.loopExitInstructionStack.pop(); this.emit(gotoStartInstruction); foreachEndAddress.value = this.nextIP(); } public void visit(BLangWhile whileNode) { Instruction gotoTopJumpInstr = InstructionFactory.get(InstructionCodes.GOTO, getOperand(this.nextIP())); this.genNode(whileNode.expr, this.env); Operand exitLoopJumpAddr = getOperand(-1); Instruction exitLoopJumpInstr = InstructionFactory.get(InstructionCodes.GOTO, exitLoopJumpAddr); emit(InstructionCodes.BR_FALSE, whileNode.expr.regIndex, exitLoopJumpAddr); this.loopResetInstructionStack.push(gotoTopJumpInstr); this.loopExitInstructionStack.push(exitLoopJumpInstr); this.genNode(whileNode.body, this.env); this.loopResetInstructionStack.pop(); this.loopExitInstructionStack.pop(); this.emit(gotoTopJumpInstr); exitLoopJumpAddr.value = nextIP(); } public void visit(BLangLock lockNode) { if (lockNode.lockVariables.isEmpty()) { this.genNode(lockNode.body, this.env); return; } Operand gotoLockEndAddr = getOperand(-1); Instruction instructGotoLockEnd = InstructionFactory.get(InstructionCodes.GOTO, gotoLockEndAddr); Operand[] operands = getOperands(lockNode); ErrorTableAttributeInfo errorTable = createErrorTableIfAbsent(currentPkgInfo); int fromIP = nextIP(); emit((InstructionCodes.LOCK), operands); this.genNode(lockNode.body, this.env); int toIP = nextIP() - 1; emit((InstructionCodes.UNLOCK), operands); emit(instructGotoLockEnd); ErrorTableEntry errorTableEntry = new ErrorTableEntry(fromIP, toIP, nextIP(), 0, -1); errorTable.addErrorTableEntry(errorTableEntry); emit((InstructionCodes.UNLOCK), operands); emit(InstructionFactory.get(InstructionCodes.THROW, getOperand(-1))); gotoLockEndAddr.value = nextIP(); } private Operand[] getOperands(BLangLock lockNode) { Operand[] operands = new Operand[(lockNode.lockVariables.size() * 3) + 1]; int i = 0; operands[i++] = new Operand(lockNode.lockVariables.size()); for (BVarSymbol varSymbol : lockNode.lockVariables) { BPackageSymbol pkgSymbol; BSymbol ownerSymbol = varSymbol.owner; if (ownerSymbol.tag == SymTag.SERVICE) { pkgSymbol = (BPackageSymbol) ownerSymbol.owner; } else { pkgSymbol = (BPackageSymbol) ownerSymbol; } int pkgRefCPIndex = addPackageRefCPEntry(currentPkgInfo, pkgSymbol.pkgID); int typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.getType().getDesc()); TypeRefCPEntry typeRefCPEntry = new TypeRefCPEntry(typeSigCPIndex); operands[i++] = getOperand(currentPkgInfo.addCPEntry(typeRefCPEntry)); operands[i++] = getOperand(pkgRefCPIndex); operands[i++] = varSymbol.varIndex; } return operands; } public void visit(BLangTransaction transactionNode) { ++transactionIndex; Operand transactionIndexOperand = getOperand(transactionIndex); Operand retryCountRegIndex = new RegIndex(-1, TypeTags.INT); if (transactionNode.retryCount != null) { this.genNode(transactionNode.retryCount, this.env); retryCountRegIndex = transactionNode.retryCount.regIndex; } Operand committedFuncRegIndex = new RegIndex(-1, TypeTags.INVOKABLE); if (transactionNode.onCommitFunction != null) { committedFuncRegIndex.value = getFuncRefCPIndex( (BInvokableSymbol) ((BLangFunctionVarRef) transactionNode.onCommitFunction).symbol); } Operand abortedFuncRegIndex = new RegIndex(-1, TypeTags.INVOKABLE); if (transactionNode.onAbortFunction != null) { abortedFuncRegIndex.value = getFuncRefCPIndex( (BInvokableSymbol) ((BLangFunctionVarRef) transactionNode.onAbortFunction).symbol); } ErrorTableAttributeInfo errorTable = createErrorTableIfAbsent(currentPkgInfo); Operand transStmtEndAddr = getOperand(-1); Operand transStmtAbortEndAddr = getOperand(-1); Operand transStmtFailEndAddr = getOperand(-1); Instruction gotoAbortTransBlockEnd = InstructionFactory.get(InstructionCodes.GOTO, transStmtAbortEndAddr); Instruction gotoFailTransBlockEnd = InstructionFactory.get(InstructionCodes.GOTO, transStmtFailEndAddr); abortInstructions.push(gotoAbortTransBlockEnd); failInstructions.push(gotoFailTransBlockEnd); this.emit(InstructionCodes.TR_BEGIN, transactionIndexOperand, retryCountRegIndex, committedFuncRegIndex, abortedFuncRegIndex); Operand transBlockStartAddr = getOperand(nextIP()); Operand retryEndWithThrowAddr = getOperand(-1); Operand retryEndWithNoThrowAddr = getOperand(-1); this.emit(InstructionCodes.TR_RETRY, transactionIndexOperand, retryEndWithThrowAddr, retryEndWithNoThrowAddr); this.genNode(transactionNode.transactionBody, this.env); int transBlockEndAddr = nextIP(); this.emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.SUCCESS.value())); abortInstructions.pop(); failInstructions.pop(); emit(InstructionCodes.GOTO, transStmtEndAddr); int errorTargetIP = nextIP(); transStmtFailEndAddr.value = errorTargetIP; emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.FAILED.value())); if (transactionNode.onRetryBody != null) { this.genNode(transactionNode.onRetryBody, this.env); } emit(InstructionCodes.GOTO, transBlockStartAddr); retryEndWithThrowAddr.value = nextIP(); emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.END.value())); emit(InstructionCodes.THROW, getOperand(-1)); ErrorTableEntry errorTableEntry = new ErrorTableEntry(transBlockStartAddr.value, transBlockEndAddr, errorTargetIP, 0, -1); errorTable.addErrorTableEntry(errorTableEntry); transStmtAbortEndAddr.value = nextIP(); emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.ABORTED.value())); int transactionEndIp = nextIP(); transStmtEndAddr.value = transactionEndIp; retryEndWithNoThrowAddr.value = transactionEndIp; emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.END.value())); } public void visit(BLangAbort abortNode) { generateFinallyInstructions(abortNode, NodeKind.TRANSACTION); this.emit(abortInstructions.peek()); } public void visit(BLangDone doneNode) { generateFinallyInstructions(doneNode, NodeKind.DONE); this.emit(InstructionCodes.HALT); } public void visit(BLangRetry retryNode) { generateFinallyInstructions(retryNode, NodeKind.TRANSACTION); this.emit(failInstructions.peek()); } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { xmlnsStmtNode.xmlnsDecl.accept(this); } @Override public void visit(BLangXMLNS xmlnsNode) { } @Override public void visit(BLangLocalXMLNS xmlnsNode) { RegIndex lvIndex = getLVIndex(TypeTags.STRING); BLangExpression nsURIExpr = xmlnsNode.namespaceURI; nsURIExpr.regIndex = createLHSRegIndex(lvIndex); genNode(nsURIExpr, env); BXMLNSSymbol nsSymbol = (BXMLNSSymbol) xmlnsNode.symbol; nsSymbol.nsURIIndex = lvIndex; } @Override public void visit(BLangPackageXMLNS xmlnsNode) { BLangExpression nsURIExpr = xmlnsNode.namespaceURI; Operand pvIndex = getPVIndex(TypeTags.STRING); BXMLNSSymbol nsSymbol = (BXMLNSSymbol) xmlnsNode.symbol; genNode(nsURIExpr, env); nsSymbol.nsURIIndex = pvIndex; int pkgIndex = addPackageRefCPEntry(this.currentPkgInfo, this.currentPkgID); emit(InstructionCodes.SGSTORE, getOperand(pkgIndex), nsURIExpr.regIndex, pvIndex); } @Override public void visit(BLangXMLQName xmlQName) { if (!xmlQName.isUsedInXML) { xmlQName.regIndex = calcAndGetExprRegIndex(xmlQName); String qName = xmlQName.namespaceURI == null ? xmlQName.localname.value : ("{" + xmlQName.namespaceURI + "}" + xmlQName.localname); xmlQName.regIndex = createStringLiteral(qName, xmlQName.regIndex, env); return; } RegIndex nsURIIndex = getNamespaceURIIndex(xmlQName.nsSymbol, env); RegIndex localnameIndex = createStringLiteral(xmlQName.localname.value, null, env); RegIndex prefixIndex = createStringLiteral(xmlQName.prefix.value, null, env); xmlQName.regIndex = calcAndGetExprRegIndex(xmlQName.regIndex, TypeTags.XML); emit(InstructionCodes.NEWQNAME, localnameIndex, nsURIIndex, prefixIndex, xmlQName.regIndex); } @Override public void visit(BLangXMLAttribute xmlAttribute) { SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(xmlAttribute, env); BLangExpression attrNameExpr = xmlAttribute.name; attrNameExpr.regIndex = calcAndGetExprRegIndex(attrNameExpr); genNode(attrNameExpr, xmlAttributeEnv); RegIndex attrQNameRegIndex = attrNameExpr.regIndex; if (attrNameExpr.getKind() != NodeKind.XML_QNAME) { RegIndex localNameRegIndex = getRegIndex(TypeTags.STRING); RegIndex uriRegIndex = getRegIndex(TypeTags.STRING); emit(InstructionCodes.S2QNAME, attrQNameRegIndex, localNameRegIndex, uriRegIndex); attrQNameRegIndex = getRegIndex(TypeTags.XML); generateURILookupInstructions(((BLangXMLElementLiteral) env.node).namespacesInScope, localNameRegIndex, uriRegIndex, attrQNameRegIndex, xmlAttribute.pos, xmlAttributeEnv); attrNameExpr.regIndex = attrQNameRegIndex; } BLangExpression attrValueExpr = xmlAttribute.value; genNode(attrValueExpr, env); if (xmlAttribute.isNamespaceDeclr) { ((BXMLNSSymbol) xmlAttribute.symbol).nsURIIndex = attrValueExpr.regIndex; } } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(xmlElementLiteral, env); xmlElementLiteral.regIndex = calcAndGetExprRegIndex(xmlElementLiteral); xmlElementLiteral.inlineNamespaces.forEach(xmlns -> { genNode(xmlns, xmlElementEnv); }); BLangExpression startTagName = (BLangExpression) xmlElementLiteral.getStartTagName(); RegIndex startTagNameRegIndex = visitXMLTagName(startTagName, xmlElementEnv, xmlElementLiteral); BLangExpression endTagName = (BLangExpression) xmlElementLiteral.getEndTagName(); RegIndex endTagNameRegIndex = endTagName == null ? startTagNameRegIndex : visitXMLTagName(endTagName, xmlElementEnv, xmlElementLiteral); RegIndex defaultNsURIIndex = getNamespaceURIIndex(xmlElementLiteral.defaultNsSymbol, xmlElementEnv); emit(InstructionCodes.NEWXMLELEMENT, xmlElementLiteral.regIndex, startTagNameRegIndex, endTagNameRegIndex, defaultNsURIIndex); xmlElementLiteral.namespacesInScope.forEach((name, symbol) -> { BLangXMLQName nsQName = new BLangXMLQName(name.getValue(), XMLConstants.XMLNS_ATTRIBUTE); genNode(nsQName, xmlElementEnv); RegIndex uriIndex = getNamespaceURIIndex(symbol, xmlElementEnv); emit(InstructionCodes.XMLATTRSTORE, xmlElementLiteral.regIndex, nsQName.regIndex, uriIndex); }); xmlElementLiteral.attributes.forEach(attribute -> { genNode(attribute, xmlElementEnv); emit(InstructionCodes.XMLATTRSTORE, xmlElementLiteral.regIndex, attribute.name.regIndex, attribute.value.regIndex); }); xmlElementLiteral.modifiedChildren.forEach(child -> { genNode(child, xmlElementEnv); emit(InstructionCodes.XMLSEQSTORE, xmlElementLiteral.regIndex, child.regIndex); }); } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { if (xmlTextLiteral.type == null) { xmlTextLiteral.regIndex = calcAndGetExprRegIndex(xmlTextLiteral.regIndex, TypeTags.XML); } else { xmlTextLiteral.regIndex = calcAndGetExprRegIndex(xmlTextLiteral); } genNode(xmlTextLiteral.concatExpr, env); emit(InstructionCodes.NEWXMLTEXT, xmlTextLiteral.regIndex, xmlTextLiteral.concatExpr.regIndex); } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.regIndex = calcAndGetExprRegIndex(xmlCommentLiteral); genNode(xmlCommentLiteral.concatExpr, env); emit(InstructionCodes.NEWXMLCOMMENT, xmlCommentLiteral.regIndex, xmlCommentLiteral.concatExpr.regIndex); } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.regIndex = calcAndGetExprRegIndex(xmlProcInsLiteral); genNode(xmlProcInsLiteral.dataConcatExpr, env); genNode(xmlProcInsLiteral.target, env); emit(InstructionCodes.NEWXMLPI, xmlProcInsLiteral.regIndex, xmlProcInsLiteral.target.regIndex, xmlProcInsLiteral.dataConcatExpr.regIndex); } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.concatExpr.regIndex = calcAndGetExprRegIndex(xmlQuotedString); genNode(xmlQuotedString.concatExpr, env); xmlQuotedString.regIndex = xmlQuotedString.concatExpr.regIndex; } @Override public void visit(BLangXMLSequenceLiteral xmlSeqLiteral) { xmlSeqLiteral.regIndex = calcAndGetExprRegIndex(xmlSeqLiteral); emit(InstructionCodes.NEWXMLSEQ, xmlSeqLiteral.regIndex); } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { stringTemplateLiteral.concatExpr.regIndex = calcAndGetExprRegIndex(stringTemplateLiteral); genNode(stringTemplateLiteral.concatExpr, env); stringTemplateLiteral.regIndex = stringTemplateLiteral.concatExpr.regIndex; } @Override public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(xmlAttributeAccessExpr.expr, this.env); RegIndex varRefRegIndex = xmlAttributeAccessExpr.expr.regIndex; if (xmlAttributeAccessExpr.indexExpr == null) { RegIndex xmlValueRegIndex = calcAndGetExprRegIndex(xmlAttributeAccessExpr); emit(InstructionCodes.XML2XMLATTRS, varRefRegIndex, xmlValueRegIndex); return; } BLangExpression indexExpr = xmlAttributeAccessExpr.indexExpr; genNode(xmlAttributeAccessExpr.indexExpr, this.env); RegIndex qnameRegIndex = xmlAttributeAccessExpr.indexExpr.regIndex; if (indexExpr.getKind() != NodeKind.XML_QNAME) { RegIndex localNameRegIndex = getRegIndex(TypeTags.STRING); RegIndex uriRegIndex = getRegIndex(TypeTags.STRING); emit(InstructionCodes.S2QNAME, qnameRegIndex, localNameRegIndex, uriRegIndex); qnameRegIndex = getRegIndex(TypeTags.XML); generateURILookupInstructions(xmlAttributeAccessExpr.namespaces, localNameRegIndex, uriRegIndex, qnameRegIndex, indexExpr.pos, env); } if (variableStore) { emit(InstructionCodes.XMLATTRSTORE, varRefRegIndex, qnameRegIndex, xmlAttributeAccessExpr.regIndex); } else { RegIndex xmlValueRegIndex = calcAndGetExprRegIndex(xmlAttributeAccessExpr); emit(InstructionCodes.XMLATTRLOAD, varRefRegIndex, qnameRegIndex, xmlValueRegIndex); } } public void visit(BLangTryCatchFinally tryNode) { Operand gotoTryCatchEndAddr = getOperand(-1); Instruction instructGotoTryCatchEnd = InstructionFactory.get(InstructionCodes.GOTO, gotoTryCatchEndAddr); List<int[]> unhandledErrorRangeList = new ArrayList<>(); ErrorTableAttributeInfo errorTable = createErrorTableIfAbsent(currentPkgInfo); int fromIP = nextIP(); genNode(tryNode.tryBody, env); int toIP = nextIP() - 1; if (tryNode.finallyBody != null) { genNode(tryNode.finallyBody, env); } emit(instructGotoTryCatchEnd); unhandledErrorRangeList.add(new int[]{fromIP, toIP}); int order = 0; for (BLangCatch bLangCatch : tryNode.getCatchBlocks()) { addLineNumberInfo(bLangCatch.pos); int targetIP = nextIP(); genNode(bLangCatch, env); unhandledErrorRangeList.add(new int[]{targetIP, nextIP() - 1}); if (tryNode.finallyBody != null) { genNode(tryNode.finallyBody, env); } emit(instructGotoTryCatchEnd); BTypeSymbol structSymbol = bLangCatch.param.symbol.type.tsymbol; BPackageSymbol packageSymbol = (BPackageSymbol) bLangCatch.param.symbol.type.tsymbol.owner; int pkgCPIndex = addPackageRefCPEntry(currentPkgInfo, packageSymbol.pkgID); int structNameCPIndex = addUTF8CPEntry(currentPkgInfo, structSymbol.name.value); StructureRefCPEntry structureRefCPEntry = new StructureRefCPEntry(pkgCPIndex, structNameCPIndex); int structCPEntryIndex = currentPkgInfo.addCPEntry(structureRefCPEntry); ErrorTableEntry errorTableEntry = new ErrorTableEntry(fromIP, toIP, targetIP, order++, structCPEntryIndex); errorTable.addErrorTableEntry(errorTableEntry); } if (tryNode.finallyBody != null) { for (int[] range : unhandledErrorRangeList) { ErrorTableEntry errorTableEntry = new ErrorTableEntry(range[0], range[1], nextIP(), order++, -1); errorTable.addErrorTableEntry(errorTableEntry); } genNode(tryNode.finallyBody, env); emit(InstructionFactory.get(InstructionCodes.THROW, getOperand(-1))); } gotoTryCatchEndAddr.value = nextIP(); } public void visit(BLangCatch bLangCatch) { BLangVariable variable = bLangCatch.param; RegIndex lvIndex = getLVIndex(variable.symbol.type.tag); variable.symbol.varIndex = lvIndex; emit(InstructionFactory.get(InstructionCodes.ERRSTORE, lvIndex)); genNode(bLangCatch.body, env); } public void visit(BLangExpressionStmt exprStmtNode) { genNode(exprStmtNode.expr, this.env); } @Override public void visit(BLangIntRangeExpression rangeExpr) { BLangExpression startExpr = rangeExpr.startExpr; BLangExpression endExpr = rangeExpr.endExpr; genNode(startExpr, env); genNode(endExpr, env); rangeExpr.regIndex = calcAndGetExprRegIndex(rangeExpr); RegIndex startExprRegIndex = startExpr.regIndex; RegIndex endExprRegIndex = endExpr.regIndex; if (!rangeExpr.includeStart || !rangeExpr.includeEnd) { RegIndex const1RegIndex = getRegIndex(TypeTags.INT); emit(InstructionCodes.ICONST_1, const1RegIndex); if (!rangeExpr.includeStart) { startExprRegIndex = getRegIndex(TypeTags.INT); emit(InstructionCodes.IADD, startExpr.regIndex, const1RegIndex, startExprRegIndex); } if (!rangeExpr.includeEnd) { endExprRegIndex = getRegIndex(TypeTags.INT); emit(InstructionCodes.ISUB, endExpr.regIndex, const1RegIndex, endExprRegIndex); } } emit(InstructionCodes.NEW_INT_RANGE, startExprRegIndex, endExprRegIndex, rangeExpr.regIndex); } private void generateForeachVarAssignment(BLangForeach foreach, Operand iteratorIndex) { List<BLangVariableReference> variables = foreach.varRefs.stream() .map(expr -> (BLangVariableReference) expr) .collect(Collectors.toList()); variables.stream() .filter(v -> v.type.tag != TypeTags.NONE) .forEach(varRef -> visitVarSymbol((BVarSymbol) varRef.symbol, lvIndexes, localVarAttrInfo)); List<Operand> nextOperands = new ArrayList<>(); nextOperands.add(iteratorIndex); nextOperands.add(new Operand(variables.size())); foreach.varTypes.forEach(v -> nextOperands.add(new Operand(v.tag))); nextOperands.add(new Operand(variables.size())); for (int i = 0; i < variables.size(); i++) { BLangVariableReference varRef = variables.get(i); nextOperands.add(Optional.ofNullable(((BVarSymbol) varRef.symbol).varIndex) .orElse(getRegIndex(foreach.varTypes.get(i).tag))); } this.emit(InstructionCodes.ITR_NEXT, nextOperands.toArray(new Operand[0])); } private void visitFunctionPointerLoad(BLangExpression fpExpr, BInvokableSymbol funcSymbol) { int pkgRefCPIndex = addPackageRefCPEntry(currentPkgInfo, funcSymbol.pkgID); int funcNameCPIndex = addUTF8CPEntry(currentPkgInfo, funcSymbol.name.value); FunctionRefCPEntry funcRefCPEntry = new FunctionRefCPEntry(pkgRefCPIndex, funcNameCPIndex); Operand typeCPIndex = getTypeCPIndex(funcSymbol.type); int funcRefCPIndex = currentPkgInfo.addCPEntry(funcRefCPEntry); RegIndex nextIndex = calcAndGetExprRegIndex(fpExpr); Operand[] operands; if (!(fpExpr instanceof BLangLambdaFunction)) { operands = new Operand[4]; operands[0] = getOperand(funcRefCPIndex); operands[1] = nextIndex; operands[2] = typeCPIndex; operands[3] = new Operand(0); } else { Operand[] closureIndexes = calcAndGetClosureIndexes(((BLangLambdaFunction) fpExpr).function); operands = new Operand[3 + closureIndexes.length]; operands[0] = getOperand(funcRefCPIndex); operands[1] = nextIndex; operands[2] = typeCPIndex; System.arraycopy(closureIndexes, 0, operands, 3, closureIndexes.length); } emit(InstructionCodes.FPLOAD, operands); } private Operand[] calcAndGetClosureIndexes(BLangFunction function) { List<Operand> operands = new ArrayList<>(); int closureOperandPairs = 0; for (BVarSymbol symbol : function.symbol.params) { if (!symbol.closure || function.requiredParams.stream().anyMatch(var -> var.symbol.equals(symbol))) { continue; } Operand type = new Operand(symbol.type.tag); Operand index = new Operand(symbol.varIndex.value); operands.add(type); operands.add(index); closureOperandPairs++; } operands.add(0, new Operand(closureOperandPairs)); return operands.toArray(new Operand[]{}); } private void generateFinallyInstructions(BLangStatement statement) { generateFinallyInstructions(statement, new NodeKind[0]); } private void generateFinallyInstructions(BLangStatement statement, NodeKind... expectedParentKinds) { BLangStatement current = statement; while (current != null && current.statementLink.parent != null) { BLangStatement parent = current.statementLink.parent.statement; for (NodeKind expected : expectedParentKinds) { if (expected == parent.getKind()) { return; } } if (NodeKind.TRY == parent.getKind()) { BLangTryCatchFinally tryCatchFinally = (BLangTryCatchFinally) parent; if (tryCatchFinally.finallyBody != null && current != tryCatchFinally.finallyBody) { genNode(tryCatchFinally.finallyBody, env); } } else if (NodeKind.LOCK == parent.getKind()) { BLangLock lockNode = (BLangLock) parent; if (!lockNode.lockVariables.isEmpty()) { Operand[] operands = getOperands(lockNode); emit((InstructionCodes.UNLOCK), operands); } } current = parent; } } private RegIndex getNamespaceURIIndex(BXMLNSSymbol namespaceSymbol, SymbolEnv env) { if (namespaceSymbol == null && env.node.getKind() == NodeKind.XML_ATTRIBUTE) { return createStringLiteral(XMLConstants.NULL_NS_URI, null, env); } if (namespaceSymbol == null) { return createStringLiteral(null, null, env); } if ((namespaceSymbol.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (namespaceSymbol.owner.tag & SymTag.SERVICE) == SymTag.SERVICE) { return (RegIndex) namespaceSymbol.nsURIIndex; } int pkgIndex = addPackageRefCPEntry(this.currentPkgInfo, namespaceSymbol.owner.pkgID); RegIndex index = getRegIndex(TypeTags.STRING); emit(InstructionCodes.SGLOAD, getOperand(pkgIndex), namespaceSymbol.nsURIIndex, index); return index; } private void generateURILookupInstructions(Map<Name, BXMLNSSymbol> namespaces, RegIndex localNameRegIndex, RegIndex uriRegIndex, RegIndex targetQNameRegIndex, DiagnosticPos pos, SymbolEnv symbolEnv) { if (namespaces.isEmpty()) { createQNameWithoutPrefix(localNameRegIndex, uriRegIndex, targetQNameRegIndex); return; } Stack<Operand> endJumpInstrStack = new Stack<>(); String prefix; for (Entry<Name, BXMLNSSymbol> keyValues : namespaces.entrySet()) { prefix = keyValues.getKey().getValue(); if (prefix.equals(XMLConstants.DEFAULT_NS_PREFIX)) { continue; } BXMLNSSymbol nsSymbol = keyValues.getValue(); int opcode = getOpcode(TypeTags.STRING, InstructionCodes.IEQ); RegIndex conditionExprIndex = getRegIndex(TypeTags.BOOLEAN); emit(opcode, uriRegIndex, getNamespaceURIIndex(nsSymbol, symbolEnv), conditionExprIndex); Operand ifCondJumpAddr = getOperand(-1); emit(InstructionCodes.BR_FALSE, conditionExprIndex, ifCondJumpAddr); RegIndex prefixIndex = createStringLiteral(prefix, null, env); emit(InstructionCodes.NEWQNAME, localNameRegIndex, uriRegIndex, prefixIndex, targetQNameRegIndex); Operand endJumpAddr = getOperand(-1); emit(InstructionCodes.GOTO, endJumpAddr); endJumpInstrStack.add(endJumpAddr); ifCondJumpAddr.value = nextIP(); } createQNameWithoutPrefix(localNameRegIndex, uriRegIndex, targetQNameRegIndex); while (!endJumpInstrStack.isEmpty()) { endJumpInstrStack.pop().value = nextIP(); } } private void createQNameWithoutPrefix(RegIndex localNameRegIndex, RegIndex uriRegIndex, RegIndex targetQNameRegIndex) { RegIndex prefixIndex = createStringLiteral(null, null, env); emit(InstructionCodes.NEWQNAME, localNameRegIndex, uriRegIndex, prefixIndex, targetQNameRegIndex); } /** * Creates a string literal expression, generate the code and returns the registry index. * * @param value String value to generate the string literal * @param regIndex String literal expression's reg index * @param env Environment * @return String registry index of the generated string */ private RegIndex createStringLiteral(String value, RegIndex regIndex, SymbolEnv env) { BLangLiteral prefixLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); prefixLiteral.value = value; prefixLiteral.typeTag = TypeTags.STRING; prefixLiteral.type = symTable.stringType; prefixLiteral.regIndex = regIndex; genNode(prefixLiteral, env); return prefixLiteral.regIndex; } /** * Visit XML tag name and return the index of the tag name in the reference registry. * * @param tagName Tag name expression * @param xmlElementEnv Environment of the XML element of the tag * @param xmlElementLiteral XML element literal to which the tag name belongs to * @return Index of the tag name, in the reference registry */ private RegIndex visitXMLTagName(BLangExpression tagName, SymbolEnv xmlElementEnv, BLangXMLElementLiteral xmlElementLiteral) { genNode(tagName, xmlElementEnv); RegIndex startTagNameRegIndex = tagName.regIndex; if (tagName.getKind() != NodeKind.XML_QNAME) { RegIndex localNameRegIndex = getRegIndex(TypeTags.STRING); RegIndex uriRegIndex = getRegIndex(TypeTags.STRING); emit(InstructionCodes.S2QNAME, startTagNameRegIndex, localNameRegIndex, uriRegIndex); startTagNameRegIndex = getRegIndex(TypeTags.XML); generateURILookupInstructions(xmlElementLiteral.namespacesInScope, localNameRegIndex, uriRegIndex, startTagNameRegIndex, xmlElementLiteral.pos, xmlElementEnv); tagName.regIndex = startTagNameRegIndex; } return startTagNameRegIndex; } /** * Get the constant pool entry index of a given type. * * @param type Type to get the constant pool entry index * @return constant pool entry index of the type */ private Operand getTypeCPIndex(BType type) { int typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, type.getDesc()); TypeRefCPEntry typeRefCPEntry = new TypeRefCPEntry(typeSigCPIndex); return getOperand(currentPkgInfo.addCPEntry(typeRefCPEntry)); } private void addDocumentAttachmentAttrInfo(List<BLangDocumentation> docNodeList, AttributeInfoPool attrInfoPool) { docNodeList.forEach(docNode -> addDocumentAttachmentAttrInfo(docNode, attrInfoPool)); } private void addDocumentAttachmentAttrInfo(BLangDocumentation docNode, AttributeInfoPool attrInfoPool) { int docAttrIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.DOCUMENT_ATTACHMENT_ATTRIBUTE.value()); int descCPIndex = addUTF8CPEntry(currentPkgInfo, docNode.documentationText); DocumentationAttributeInfo docAttributeInfo = new DocumentationAttributeInfo(docAttrIndex, descCPIndex); for (BLangDocumentationAttribute paramDocNode : docNode.attributes) { int nameCPIndex = addUTF8CPEntry(currentPkgInfo, paramDocNode.documentationField.value); int typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, paramDocNode.type.getDesc()); int paramKindCPIndex = addUTF8CPEntry(currentPkgInfo, paramDocNode.docTag.getValue()); int descriptionCPIndex = addUTF8CPEntry(currentPkgInfo, paramDocNode.documentationText); ParameterDocumentInfo paramDocInfo = new ParameterDocumentInfo( nameCPIndex, typeSigCPIndex, paramKindCPIndex, descriptionCPIndex); docAttributeInfo.paramDocInfoList.add(paramDocInfo); } attrInfoPool.addAttributeInfo(AttributeInfo.Kind.DOCUMENT_ATTACHMENT_ATTRIBUTE, docAttributeInfo); } private void addParameterAttributeInfo(BLangInvokableNode invokableNode, CallableUnitInfo callableUnitInfo) { int paramAttrIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.PARAMETERS_ATTRIBUTE.value()); ParameterAttributeInfo paramAttrInfo = new ParameterAttributeInfo(paramAttrIndex); paramAttrInfo.requiredParamsCount = invokableNode.requiredParams.size(); paramAttrInfo.defaultableParamsCount = invokableNode.defaultableParams.size(); paramAttrInfo.restParamCount = invokableNode.restParam != null ? 1 : 0; callableUnitInfo.addAttributeInfo(AttributeInfo.Kind.PARAMETERS_ATTRIBUTE, paramAttrInfo); addParameterDefaultValues(invokableNode, callableUnitInfo); } private void addParameterDefaultValues(BLangInvokableNode invokableNode, CallableUnitInfo callableUnitInfo) { int paramDefaultsAttrNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.PARAMETER_DEFAULTS_ATTRIBUTE.value()); ParamDefaultValueAttributeInfo paramDefaulValAttrInfo = new ParamDefaultValueAttributeInfo(paramDefaultsAttrNameIndex); for (BLangVariableDef param : invokableNode.defaultableParams) { DefaultValue defaultVal = getDefaultValue((BLangLiteral) param.var.expr); paramDefaulValAttrInfo.addParamDefaultValueInfo(defaultVal); } callableUnitInfo.addAttributeInfo(AttributeInfo.Kind.PARAMETER_DEFAULTS_ATTRIBUTE, paramDefaulValAttrInfo); } private int getValueToRefTypeCastOpcode(int typeTag) { int opcode; switch (typeTag) { case TypeTags.INT: opcode = InstructionCodes.I2ANY; break; case TypeTags.FLOAT: opcode = InstructionCodes.F2ANY; break; case TypeTags.STRING: opcode = InstructionCodes.S2ANY; break; case TypeTags.BOOLEAN: opcode = InstructionCodes.B2ANY; break; case TypeTags.BLOB: opcode = InstructionCodes.L2ANY; break; default: opcode = InstructionCodes.NOP; break; } return opcode; } private int getRefToValueTypeCastOpcode(int typeTag) { int opcode; switch (typeTag) { case TypeTags.INT: opcode = InstructionCodes.ANY2I; break; case TypeTags.FLOAT: opcode = InstructionCodes.ANY2F; break; case TypeTags.STRING: opcode = InstructionCodes.ANY2S; break; case TypeTags.BOOLEAN: opcode = InstructionCodes.ANY2B; break; case TypeTags.BLOB: opcode = InstructionCodes.ANY2L; break; default: opcode = InstructionCodes.NOP; break; } return opcode; } private void addPackageInfo(BPackageSymbol packageSymbol, ProgramFile programFile) { BLangPackage pkgNode = this.packageCache.get(packageSymbol.pkgID); if (pkgNode == null) { packageSymbol.imports.forEach(importPkdSymbol -> addPackageInfo(importPkdSymbol, programFile)); if (!programFile.packageFileMap.containsKey(packageSymbol.pkgID.bvmAlias())) { programFile.packageFileMap.put(packageSymbol.pkgID.bvmAlias(), packageSymbol.packageFile); } return; } pkgNode.imports.forEach(importPkdNode -> addPackageInfo(importPkdNode.symbol, programFile)); if (!programFile.packageFileMap.containsKey(packageSymbol.pkgID.bvmAlias())) { programFile.packageFileMap.put(packageSymbol.pkgID.bvmAlias(), packageSymbol.packageFile); } } private byte[] getPackageBinaryContent(BLangPackage pkgNode) { try { return PackageInfoWriter.getPackageBinary(this.currentPkgInfo); } catch (IOException e) { throw new BLangCompilerException("failed to generate bytecode for package '" + pkgNode.packageID + "': " + e.getMessage(), e); } } }
class CodeGenerator extends BLangNodeVisitor { private static final CompilerContext.Key<CodeGenerator> CODE_GENERATOR_KEY = new CompilerContext.Key<>(); /** * This structure holds current package-level variable indexes. */ private VariableIndex pvIndexes = new VariableIndex(PACKAGE); /** * This structure holds current local variable indexes. */ private VariableIndex lvIndexes = new VariableIndex(LOCAL); /** * This structure holds current field indexes. */ private VariableIndex fieldIndexes = new VariableIndex(FIELD); /** * This structure holds current register indexes. */ private VariableIndex regIndexes = new VariableIndex(REG); /** * This structure holds the maximum register count per type. * This structure is updated for every statement. */ private VariableIndex maxRegIndexes = new VariableIndex(REG); private List<RegIndex> regIndexList = new ArrayList<>(); private SymbolEnv env; private final SymbolTable symTable; private final PackageCache packageCache; private PackageInfo currentPkgInfo; private PackageID currentPkgID; private int currentPackageRefCPIndex; private LineNumberTableAttributeInfo lineNoAttrInfo; private CallableUnitInfo currentCallableUnitInfo; private LocalVariableAttributeInfo localVarAttrInfo; private WorkerInfo currentWorkerInfo; private ServiceInfo currentServiceInfo; private boolean varAssignment = false; private boolean regIndexResetDisabled = false; private int transactionIndex = 0; private Stack<Instruction> loopResetInstructionStack = new Stack<>(); private Stack<Instruction> loopExitInstructionStack = new Stack<>(); private Stack<Instruction> abortInstructions = new Stack<>(); private Stack<Instruction> failInstructions = new Stack<>(); private int workerChannelCount = 0; private int forkJoinCount = 0; public static CodeGenerator getInstance(CompilerContext context) { CodeGenerator codeGenerator = context.get(CODE_GENERATOR_KEY); if (codeGenerator == null) { codeGenerator = new CodeGenerator(context); } return codeGenerator; } public CodeGenerator(CompilerContext context) { context.put(CODE_GENERATOR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.packageCache = PackageCache.getInstance(context); } public ProgramFile generateBALX(BLangPackage pkgNode) { ProgramFile programFile = new ProgramFile(); addPackageInfo(pkgNode.symbol, programFile); programFile.entryPkgCPIndex = addPackageRefCPEntry(programFile, pkgNode.symbol.pkgID); setEntryPoints(programFile, pkgNode); return programFile; } public BLangPackage generateBALO(BLangPackage pkgNode) { this.pvIndexes = new VariableIndex(VariableIndex.Kind.PACKAGE); this.currentPkgInfo = new PackageInfo(); genNode(pkgNode, this.symTable.pkgEnvMap.get(pkgNode.symbol)); prepareIndexes(this.pvIndexes); addVarCountAttrInfo(this.currentPkgInfo, this.currentPkgInfo, pvIndexes); pkgNode.symbol.packageFile = new PackageFile(getPackageBinaryContent(pkgNode)); setEntryPoints(pkgNode.symbol.packageFile, pkgNode); this.currentPkgInfo = null; return pkgNode; } private void setEntryPoints(CompiledBinaryFile compiledBinaryFile, BLangPackage pkgNode) { BLangFunction mainFunc = getMainFunction(pkgNode); if (mainFunc != null) { compiledBinaryFile.setMainEPAvailable(true); pkgNode.symbol.entryPointExists = true; } if (pkgNode.services.size() != 0) { compiledBinaryFile.setServiceEPAvailable(true); pkgNode.symbol.entryPointExists = true; } } private BLangFunction getMainFunction(BLangPackage pkgNode) { for (BLangFunction funcNode : pkgNode.functions) { if (CompilerUtils.isMainFunction(funcNode)) { return funcNode; } } return null; } public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.CODE_GEN)) { return; } pkgNode.imports.forEach(impPkgNode -> { int impPkgNameCPIndex = addUTF8CPEntry(this.currentPkgInfo, impPkgNode.symbol.pkgID.bvmAlias()); int impPkgVersionCPIndex = addUTF8CPEntry(this.currentPkgInfo, impPkgNode.symbol.pkgID.version.value); ImportPackageInfo importPkgInfo = new ImportPackageInfo(impPkgNameCPIndex, impPkgVersionCPIndex); this.currentPkgInfo.importPkgInfoSet.add(importPkgInfo); }); BPackageSymbol pkgSymbol = pkgNode.symbol; currentPkgID = pkgSymbol.pkgID; currentPkgInfo.nameCPIndex = addUTF8CPEntry(currentPkgInfo, currentPkgID.bvmAlias()); currentPkgInfo.versionCPIndex = addUTF8CPEntry(currentPkgInfo, currentPkgID.version.value); currentPackageRefCPIndex = addPackageRefCPEntry(currentPkgInfo, currentPkgID); int lineNoAttrNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.LINE_NUMBER_TABLE_ATTRIBUTE.value()); lineNoAttrInfo = new LineNumberTableAttributeInfo(lineNoAttrNameIndex); int pkgVarAttrNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE.value()); currentPkgInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, new LocalVariableAttributeInfo(pkgVarAttrNameIndex)); pkgNode.globalVars.forEach(this::createPackageVarInfo); pkgNode.typeDefinitions.forEach(this::createTypeDefinitionInfoEntry); pkgNode.annotations.forEach(this::createAnnotationInfoEntry); pkgNode.functions.forEach(this::createFunctionInfoEntry); pkgNode.services.forEach(this::createServiceInfoEntry); pkgNode.functions.forEach(this::createFunctionInfoEntry); visitBuiltinFunctions(pkgNode.initFunction); visitBuiltinFunctions(pkgNode.startFunction); visitBuiltinFunctions(pkgNode.stopFunction); pkgNode.topLevelNodes.stream() .filter(pkgLevelNode -> pkgLevelNode.getKind() != NodeKind.VARIABLE && pkgLevelNode.getKind() != NodeKind.XMLNS) .forEach(pkgLevelNode -> genNode((BLangNode) pkgLevelNode, this.env)); pkgNode.functions.forEach(funcNode -> { funcNode.symbol = funcNode.originalFuncSymbol; }); currentPkgInfo.addAttributeInfo(AttributeInfo.Kind.LINE_NUMBER_TABLE_ATTRIBUTE, lineNoAttrInfo); currentPackageRefCPIndex = -1; currentPkgID = null; pkgNode.completedPhases.add(CompilerPhase.CODE_GEN); } private void visitBuiltinFunctions(BLangFunction function) { createFunctionInfoEntry(function); genNode(function, this.env); } public void visit(BLangService serviceNode) { BLangFunction initFunction = (BLangFunction) serviceNode.getInitFunction(); visit(initFunction); currentServiceInfo = currentPkgInfo.getServiceInfo(serviceNode.getName().getValue()); SymbolEnv serviceEnv = SymbolEnv.createServiceEnv(serviceNode, serviceNode.symbol.scope, this.env); serviceNode.resources.forEach(resource -> genNode(resource, serviceEnv)); } public void visit(BLangResource resourceNode) { ResourceInfo resourceInfo = currentServiceInfo.resourceInfoMap.get(resourceNode.name.getValue()); currentCallableUnitInfo = resourceInfo; SymbolEnv resourceEnv = SymbolEnv .createResourceActionSymbolEnv(resourceNode, resourceNode.symbol.scope, this.env); visitInvokableNode(resourceNode, currentCallableUnitInfo, resourceEnv); } public void visit(BLangFunction funcNode) { SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, this.env); currentCallableUnitInfo = currentPkgInfo.functionInfoMap.get(funcNode.symbol.name.value); visitInvokableNode(funcNode, currentCallableUnitInfo, funcEnv); } public void visit(BLangBlockStmt blockNode) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, this.env); for (BLangStatement stmt : blockNode.stmts) { if (stmt.getKind() != NodeKind.TRY && stmt.getKind() != NodeKind.CATCH && stmt.getKind() != NodeKind.IF) { addLineNumberInfo(stmt.pos); } genNode(stmt, blockEnv); if (regIndexResetDisabled) { continue; } setMaxRegIndexes(regIndexes, maxRegIndexes); regIndexes = new VariableIndex(REG); } } public void visit(BLangEnum enumNode) { } public void visit(BLangVariable varNode) { BVarSymbol varSymbol = varNode.symbol; int ownerSymTag = env.scope.owner.tag; if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) { varSymbol.varIndex = getLVIndex(varSymbol.type.tag); LocalVariableInfo localVarInfo = getLocalVarAttributeInfo(varSymbol); localVarAttrInfo.localVars.add(localVarInfo); } else { throw new IllegalStateException(""); } BLangExpression rhsExpr = varNode.expr; if (rhsExpr != null) { rhsExpr.regIndex = varSymbol.varIndex; genNode(rhsExpr, this.env); } } public void visit(BLangVariableDef varDefNode) { genNode(varDefNode.var, this.env); } @Override public void visit(BLangMatch matchStmt) { } public void visit(BLangReturn returnNode) { if (returnNode.expr.type != symTable.nilType) { BLangExpression expr = returnNode.expr; this.genNode(expr, this.env); emit(this.typeTagToInstr(expr.type.tag), getOperand(0), expr.regIndex); } generateFinallyInstructions(returnNode); emit(InstructionCodes.RET); } private int typeTagToInstr(int typeTag) { switch (typeTag) { case TypeTags.INT: return InstructionCodes.IRET; case TypeTags.FLOAT: return InstructionCodes.FRET; case TypeTags.STRING: return InstructionCodes.SRET; case TypeTags.BOOLEAN: return InstructionCodes.BRET; case TypeTags.BLOB: return InstructionCodes.LRET; default: return InstructionCodes.RRET; } } @Override public void visit(BLangLiteral literalExpr) { int opcode; Operand regIndex = calcAndGetExprRegIndex(literalExpr); int typeTag = literalExpr.type.tag; switch (typeTag) { case TypeTags.INT: long longVal = (Long) literalExpr.value; if (longVal >= 0 && longVal <= 5) { opcode = InstructionCodes.ICONST_0 + (int) longVal; emit(opcode, regIndex); } else { int intCPEntryIndex = currentPkgInfo.addCPEntry(new IntegerCPEntry(longVal)); emit(InstructionCodes.ICONST, getOperand(intCPEntryIndex), regIndex); } break; case TypeTags.FLOAT: double doubleVal = (Double) literalExpr.value; if (doubleVal == 0 || doubleVal == 1 || doubleVal == 2 || doubleVal == 3 || doubleVal == 4 || doubleVal == 5) { opcode = InstructionCodes.FCONST_0 + (int) doubleVal; emit(opcode, regIndex); } else { int floatCPEntryIndex = currentPkgInfo.addCPEntry(new FloatCPEntry(doubleVal)); emit(InstructionCodes.FCONST, getOperand(floatCPEntryIndex), regIndex); } break; case TypeTags.STRING: String strValue = (String) literalExpr.value; StringCPEntry stringCPEntry = new StringCPEntry(addUTF8CPEntry(currentPkgInfo, strValue), strValue); int strCPIndex = currentPkgInfo.addCPEntry(stringCPEntry); emit(InstructionCodes.SCONST, getOperand(strCPIndex), regIndex); break; case TypeTags.BOOLEAN: boolean booleanVal = (Boolean) literalExpr.value; if (!booleanVal) { opcode = InstructionCodes.BCONST_0; } else { opcode = InstructionCodes.BCONST_1; } emit(opcode, regIndex); break; case TypeTags.BLOB: byte[] blobValue = (byte[]) literalExpr.value; BlobCPEntry blobCPEntry = new BlobCPEntry(blobValue); int blobCPIndex = currentPkgInfo.addCPEntry(blobCPEntry); emit(InstructionCodes.LCONST, getOperand(blobCPIndex), regIndex); break; case TypeTags.NIL: emit(InstructionCodes.RCONST_NULL, regIndex); } } @Override public void visit(BLangArrayLiteral arrayLiteral) { BType etype; if (arrayLiteral.type.tag == TypeTags.ANY) { etype = arrayLiteral.type; } else { etype = ((BArrayType) arrayLiteral.type).eType; } int opcode = getOpcode(etype.tag, InstructionCodes.INEWARRAY); Operand arrayVarRegIndex = calcAndGetExprRegIndex(arrayLiteral); Operand typeCPIndex = getTypeCPIndex(arrayLiteral.type); emit(opcode, arrayVarRegIndex, typeCPIndex); for (int i = 0; i < arrayLiteral.exprs.size(); i++) { BLangExpression argExpr = arrayLiteral.exprs.get(i); genNode(argExpr, this.env); BLangLiteral indexLiteral = new BLangLiteral(); indexLiteral.pos = arrayLiteral.pos; indexLiteral.value = (long) i; indexLiteral.type = symTable.intType; genNode(indexLiteral, this.env); opcode = getOpcode(argExpr.type.tag, InstructionCodes.IASTORE); emit(opcode, arrayVarRegIndex, indexLiteral.regIndex, argExpr.regIndex); } } @Override public void visit(BLangJSONArrayLiteral arrayLiteral) { arrayLiteral.regIndex = calcAndGetExprRegIndex(arrayLiteral); List<BLangExpression> argExprs = arrayLiteral.exprs; BLangLiteral arraySizeLiteral = new BLangLiteral(); arraySizeLiteral.pos = arrayLiteral.pos; arraySizeLiteral.value = (long) argExprs.size(); arraySizeLiteral.type = symTable.intType; genNode(arraySizeLiteral, this.env); emit(InstructionCodes.JSONNEWARRAY, arrayLiteral.regIndex, arraySizeLiteral.regIndex); for (int i = 0; i < argExprs.size(); i++) { BLangExpression argExpr = argExprs.get(i); genNode(argExpr, this.env); BLangLiteral indexLiteral = new BLangLiteral(); indexLiteral.pos = arrayLiteral.pos; indexLiteral.value = (long) i; indexLiteral.type = symTable.intType; genNode(indexLiteral, this.env); emit(InstructionCodes.JSONASTORE, arrayLiteral.regIndex, indexLiteral.regIndex, argExpr.regIndex); } } @Override public void visit(BLangJSONLiteral jsonLiteral) { jsonLiteral.regIndex = calcAndGetExprRegIndex(jsonLiteral); Operand typeCPIndex = getTypeCPIndex(jsonLiteral.type); emit(InstructionCodes.NEWJSON, jsonLiteral.regIndex, typeCPIndex); for (BLangRecordKeyValue keyValue : jsonLiteral.keyValuePairs) { BLangExpression keyExpr = keyValue.key.expr; genNode(keyExpr, this.env); BLangExpression valueExpr = keyValue.valueExpr; genNode(valueExpr, this.env); emit(InstructionCodes.JSONSTORE, jsonLiteral.regIndex, keyExpr.regIndex, valueExpr.regIndex); } } @Override public void visit(BLangMapLiteral mapLiteral) { Operand mapVarRegIndex = calcAndGetExprRegIndex(mapLiteral); Operand typeCPIndex = getTypeCPIndex(mapLiteral.type); emit(InstructionCodes.NEWMAP, mapVarRegIndex, typeCPIndex); for (BLangRecordKeyValue keyValue : mapLiteral.keyValuePairs) { BLangExpression keyExpr = keyValue.key.expr; genNode(keyExpr, this.env); BLangExpression valueExpr = keyValue.valueExpr; genNode(valueExpr, this.env); BMapType mapType = (BMapType) mapLiteral.type; int opcode = getValueToRefTypeCastOpcode(mapType.constraint.tag); if (opcode == InstructionCodes.NOP) { emit(InstructionCodes.MAPSTORE, mapVarRegIndex, keyExpr.regIndex, valueExpr.regIndex); } else { RegIndex refRegMapValue = getRegIndex(TypeTags.ANY); emit(opcode, valueExpr.regIndex, refRegMapValue); emit(InstructionCodes.MAPSTORE, mapVarRegIndex, keyExpr.regIndex, refRegMapValue); } } } @Override public void visit(BLangStructLiteral structLiteral) { BRecordTypeSymbol structSymbol = (BRecordTypeSymbol) structLiteral.type.tsymbol; int pkgCPIndex = addPackageRefCPEntry(currentPkgInfo, structSymbol.pkgID); int structNameCPIndex = addUTF8CPEntry(currentPkgInfo, structSymbol.name.value); StructureRefCPEntry structureRefCPEntry = new StructureRefCPEntry(pkgCPIndex, structNameCPIndex); Operand structCPIndex = getOperand(currentPkgInfo.addCPEntry(structureRefCPEntry)); RegIndex structRegIndex = calcAndGetExprRegIndex(structLiteral); emit(InstructionCodes.NEWSTRUCT, structCPIndex, structRegIndex); if (structSymbol.defaultsValuesInitFunc != null) { int funcRefCPIndex = getFuncRefCPIndex(structSymbol.defaultsValuesInitFunc.symbol); Operand[] operands = new Operand[5]; operands[0] = getOperand(funcRefCPIndex); operands[1] = getOperand(false); operands[2] = getOperand(1); operands[3] = structRegIndex; operands[4] = getOperand(0); emit(InstructionCodes.CALL, operands); } if (structLiteral.initializer != null) { int funcRefCPIndex = getFuncRefCPIndex(structLiteral.initializer.symbol); Operand[] operands = new Operand[5]; operands[0] = getOperand(funcRefCPIndex); operands[1] = getOperand(false); operands[2] = getOperand(1); operands[3] = structRegIndex; operands[4] = getOperand(0); emit(InstructionCodes.CALL, operands); } for (BLangRecordKeyValue keyValue : structLiteral.keyValuePairs) { BLangRecordKey key = keyValue.key; Operand fieldIndex = key.fieldSymbol.varIndex; genNode(keyValue.valueExpr, this.env); int opcode = getOpcode(key.fieldSymbol.type.tag, InstructionCodes.IFIELDSTORE); emit(opcode, structRegIndex, fieldIndex, keyValue.valueExpr.regIndex); } } @Override public void visit(BLangTableLiteral tableLiteral) { genNode(tableLiteral.configurationExpr, this.env); Operand varRefRegIndex = tableLiteral.configurationExpr.regIndex; tableLiteral.regIndex = calcAndGetExprRegIndex(tableLiteral); Operand typeCPIndex = getTypeCPIndex(tableLiteral.type); emit(InstructionCodes.NEWTABLE, tableLiteral.regIndex, typeCPIndex, varRefRegIndex); } @Override public void visit(BLangStreamLiteral streamLiteral) { streamLiteral.regIndex = calcAndGetExprRegIndex(streamLiteral); Operand typeCPIndex = getTypeCPIndex(streamLiteral.type); StringCPEntry nameCPEntry = new StringCPEntry(addUTF8CPEntry(currentPkgInfo, streamLiteral.name.value), streamLiteral.name.value); Operand nameCPIndex = getOperand(currentPkgInfo.addCPEntry(nameCPEntry)); emit(InstructionCodes.NEWSTREAM, streamLiteral.regIndex, typeCPIndex, nameCPIndex); } @Override public void visit(BLangLocalVarRef localVarRef) { if (localVarRef.regIndex != null && (localVarRef.regIndex.isLHSIndex || localVarRef.regIndex.isVarIndex)) { emit(getOpcode(localVarRef.type.tag, InstructionCodes.IMOVE), localVarRef.varSymbol.varIndex, localVarRef.regIndex); return; } localVarRef.regIndex = localVarRef.varSymbol.varIndex; } @Override public void visit(BLangFieldVarRef fieldVarRef) { RegIndex fieldIndex = fieldVarRef.varSymbol.varIndex; Operand varRegIndex = getOperand(0); if (varAssignment) { int opcode = getOpcode(fieldVarRef.type.tag, InstructionCodes.IFIELDSTORE); emit(opcode, varRegIndex, fieldIndex, fieldVarRef.regIndex); return; } int opcode = getOpcode(fieldVarRef.type.tag, InstructionCodes.IFIELDLOAD); RegIndex exprRegIndex = calcAndGetExprRegIndex(fieldVarRef); emit(opcode, varRegIndex, fieldIndex, exprRegIndex); } @Override public void visit(BLangPackageVarRef packageVarRef) { BPackageSymbol pkgSymbol; BSymbol ownerSymbol = packageVarRef.symbol.owner; if (ownerSymbol.tag == SymTag.SERVICE) { pkgSymbol = (BPackageSymbol) ownerSymbol.owner; } else { pkgSymbol = (BPackageSymbol) ownerSymbol; } Operand gvIndex = packageVarRef.varSymbol.varIndex; int pkgRefCPIndex = addPackageRefCPEntry(currentPkgInfo, pkgSymbol.pkgID); if (varAssignment) { int opcode = getOpcode(packageVarRef.type.tag, InstructionCodes.IGSTORE); emit(opcode, getOperand(pkgRefCPIndex), packageVarRef.regIndex, gvIndex); } else { int opcode = getOpcode(packageVarRef.type.tag, InstructionCodes.IGLOAD); packageVarRef.regIndex = calcAndGetExprRegIndex(packageVarRef); emit(opcode, getOperand(pkgRefCPIndex), gvIndex, packageVarRef.regIndex); } } @Override public void visit(BLangFunctionVarRef functionVarRef) { visitFunctionPointerLoad(functionVarRef, (BInvokableSymbol) functionVarRef.symbol); } @Override public void visit(BLangSimpleVarRef.BLangTypeLoad typeLoad) { Operand typeCPIndex = getTypeCPIndex(typeLoad.symbol.type); emit(InstructionCodes.TYPELOAD, typeCPIndex, calcAndGetExprRegIndex(typeLoad)); } @Override public void visit(BLangStructFieldAccessExpr fieldAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(fieldAccessExpr.expr, this.env); Operand varRefRegIndex = fieldAccessExpr.expr.regIndex; int opcode; Operand fieldIndex = fieldAccessExpr.varSymbol.varIndex; if (variableStore) { opcode = getOpcode(fieldAccessExpr.symbol.type.tag, InstructionCodes.IFIELDSTORE); emit(opcode, varRefRegIndex, fieldIndex, fieldAccessExpr.regIndex); } else { opcode = getOpcode(fieldAccessExpr.symbol.type.tag, InstructionCodes.IFIELDLOAD); emit(opcode, varRefRegIndex, fieldIndex, calcAndGetExprRegIndex(fieldAccessExpr)); } this.varAssignment = variableStore; } @Override public void visit(BLangStructFunctionVarRef functionVarRef) { visitFunctionPointerLoad(functionVarRef, (BInvokableSymbol) functionVarRef.symbol); } @Override public void visit(BLangMapAccessExpr mapKeyAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(mapKeyAccessExpr.expr, this.env); Operand varRefRegIndex = mapKeyAccessExpr.expr.regIndex; genNode(mapKeyAccessExpr.indexExpr, this.env); Operand keyRegIndex = mapKeyAccessExpr.indexExpr.regIndex; BMapType mapType = (BMapType) mapKeyAccessExpr.expr.type; if (variableStore) { int opcode = getValueToRefTypeCastOpcode(mapType.constraint.tag); if (opcode == InstructionCodes.NOP) { emit(InstructionCodes.MAPSTORE, varRefRegIndex, keyRegIndex, mapKeyAccessExpr.regIndex); } else { RegIndex refRegMapValue = getRegIndex(TypeTags.ANY); emit(opcode, mapKeyAccessExpr.regIndex, refRegMapValue); emit(InstructionCodes.MAPSTORE, varRefRegIndex, keyRegIndex, refRegMapValue); } } else { IntegerCPEntry exceptCPEntry = new IntegerCPEntry(mapKeyAccessExpr.except ? 1 : 0); Operand except = getOperand(currentPkgInfo.addCPEntry(exceptCPEntry)); int opcode = getRefToValueTypeCastOpcode(mapType.constraint.tag); if (opcode == InstructionCodes.NOP) { emit(InstructionCodes.MAPLOAD, varRefRegIndex, keyRegIndex, calcAndGetExprRegIndex(mapKeyAccessExpr), except); } else { RegIndex refRegMapValue = getRegIndex(TypeTags.ANY); emit(InstructionCodes.MAPLOAD, varRefRegIndex, keyRegIndex, refRegMapValue, except); emit(opcode, refRegMapValue, calcAndGetExprRegIndex(mapKeyAccessExpr)); } } this.varAssignment = variableStore; } @Override public void visit(BLangJSONAccessExpr jsonAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(jsonAccessExpr.expr, this.env); Operand varRefRegIndex = jsonAccessExpr.expr.regIndex; genNode(jsonAccessExpr.indexExpr, this.env); Operand keyRegIndex = jsonAccessExpr.indexExpr.regIndex; if (jsonAccessExpr.indexExpr.type.tag == TypeTags.INT) { if (variableStore) { emit(InstructionCodes.JSONASTORE, varRefRegIndex, keyRegIndex, jsonAccessExpr.regIndex); } else { emit(InstructionCodes.JSONALOAD, varRefRegIndex, keyRegIndex, calcAndGetExprRegIndex(jsonAccessExpr)); } } else { if (variableStore) { emit(InstructionCodes.JSONSTORE, varRefRegIndex, keyRegIndex, jsonAccessExpr.regIndex); } else { emit(InstructionCodes.JSONLOAD, varRefRegIndex, keyRegIndex, calcAndGetExprRegIndex(jsonAccessExpr)); } } this.varAssignment = variableStore; } @Override public void visit(BLangXMLAccessExpr xmlIndexAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(xmlIndexAccessExpr.expr, this.env); RegIndex varRefRegIndex = xmlIndexAccessExpr.expr.regIndex; genNode(xmlIndexAccessExpr.indexExpr, this.env); RegIndex indexRegIndex = xmlIndexAccessExpr.indexExpr.regIndex; RegIndex elementRegIndex = calcAndGetExprRegIndex(xmlIndexAccessExpr); if (xmlIndexAccessExpr.fieldType == FieldKind.ALL) { emit(InstructionCodes.XMLLOADALL, varRefRegIndex, elementRegIndex); } else if (xmlIndexAccessExpr.indexExpr.type.tag == TypeTags.STRING) { emit(InstructionCodes.XMLLOAD, varRefRegIndex, indexRegIndex, elementRegIndex); } else { emit(InstructionCodes.XMLSEQLOAD, varRefRegIndex, indexRegIndex, elementRegIndex); } this.varAssignment = variableStore; } @Override public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(arrayIndexAccessExpr.expr, this.env); Operand varRefRegIndex = arrayIndexAccessExpr.expr.regIndex; genNode(arrayIndexAccessExpr.indexExpr, this.env); Operand indexRegIndex = arrayIndexAccessExpr.indexExpr.regIndex; BArrayType arrayType = (BArrayType) arrayIndexAccessExpr.expr.type; if (variableStore) { int opcode = getOpcode(arrayType.eType.tag, InstructionCodes.IASTORE); emit(opcode, varRefRegIndex, indexRegIndex, arrayIndexAccessExpr.regIndex); } else { int opcode = getOpcode(arrayType.eType.tag, InstructionCodes.IALOAD); emit(opcode, varRefRegIndex, indexRegIndex, calcAndGetExprRegIndex(arrayIndexAccessExpr)); } this.varAssignment = variableStore; } @Override public void visit(BLangEnumeratorAccessExpr enumeratorAccessExpr) { } @Override public void visit(BLangElvisExpr elvisExpr) { } @Override public void visit(BLangIsAssignableExpr assignableExpr) { genNode(assignableExpr.lhsExpr, this.env); RegIndex regIndex = calcAndGetExprRegIndex(assignableExpr); Operand typeCPIndex = getTypeCPIndex(assignableExpr.targetType); emit(assignableExpr.opSymbol.opcode, assignableExpr.lhsExpr.regIndex, typeCPIndex, regIndex); } @Override public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) { RegIndex exprRegIndex = calcAndGetExprRegIndex(bracedOrTupleExpr); Operand typeCPIndex = getTypeCPIndex(bracedOrTupleExpr.type); emit(InstructionCodes.RNEWARRAY, exprRegIndex, typeCPIndex); for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) { BLangExpression argExpr = bracedOrTupleExpr.expressions.get(i); genNode(argExpr, this.env); BLangLiteral indexLiteral = new BLangLiteral(); indexLiteral.pos = argExpr.pos; indexLiteral.value = (long) i; indexLiteral.type = symTable.intType; genNode(indexLiteral, this.env); emit(InstructionCodes.RASTORE, exprRegIndex, indexLiteral.regIndex, argExpr.regIndex); } } private void visitAndExpression(BLangBinaryExpr binaryExpr) { Operand falseJumpAddr = getOperand(-1); genNode(binaryExpr.lhsExpr, this.env); emit(InstructionCodes.BR_FALSE, binaryExpr.lhsExpr.regIndex, falseJumpAddr); genNode(binaryExpr.rhsExpr, this.env); emit(InstructionCodes.BR_FALSE, binaryExpr.rhsExpr.regIndex, falseJumpAddr); calcAndGetExprRegIndex(binaryExpr); emit(InstructionCodes.BCONST_1, binaryExpr.regIndex); Operand gotoAddr = getOperand(-1); emit(InstructionCodes.GOTO, gotoAddr); falseJumpAddr.value = nextIP(); emit(InstructionCodes.BCONST_0, binaryExpr.regIndex); gotoAddr.value = nextIP(); } private void visitOrExpression(BLangBinaryExpr binaryExpr) { Operand lExprTrueJumpAddr = getOperand(-1); Operand rExprFalseJumpAddr = getOperand(-1); genNode(binaryExpr.lhsExpr, this.env); emit(InstructionCodes.BR_TRUE, binaryExpr.lhsExpr.regIndex, lExprTrueJumpAddr); genNode(binaryExpr.rhsExpr, this.env); emit(InstructionCodes.BR_FALSE, binaryExpr.rhsExpr.regIndex, rExprFalseJumpAddr); lExprTrueJumpAddr.value = nextIP(); RegIndex exprRegIndex = calcAndGetExprRegIndex(binaryExpr); emit(InstructionCodes.BCONST_1, exprRegIndex); Operand gotoAddr = getOperand(-1); emit(InstructionCodes.GOTO, gotoAddr); rExprFalseJumpAddr.value = nextIP(); emit(InstructionCodes.BCONST_0, exprRegIndex); gotoAddr.value = nextIP(); } public void visit(BLangInvocation iExpr) { if (iExpr.expr != null) { return; } Operand[] operands = getFuncOperands(iExpr); emit(InstructionCodes.CALL, operands); } public void visit(BLangActionInvocation aIExpr) { } public void visit(BLangTypeInit cIExpr) { BSymbol structSymbol = cIExpr.type.tsymbol; int pkgCPIndex = addPackageRefCPEntry(currentPkgInfo, structSymbol.pkgID); int structNameCPIndex = addUTF8CPEntry(currentPkgInfo, structSymbol.name.value); StructureRefCPEntry structureRefCPEntry = new StructureRefCPEntry(pkgCPIndex, structNameCPIndex); Operand structCPIndex = getOperand(currentPkgInfo.addCPEntry(structureRefCPEntry)); RegIndex structRegIndex = calcAndGetExprRegIndex(cIExpr); emit(InstructionCodes.NEWSTRUCT, structCPIndex, structRegIndex); Operand[] operands = getFuncOperands(cIExpr.objectInitInvocation); Operand[] callOperands = new Operand[operands.length + 1]; callOperands[0] = operands[0]; callOperands[1] = operands[1]; callOperands[2] = getOperand(operands[2].value + 1); callOperands[3] = structRegIndex; System.arraycopy(operands, 3, callOperands, 4, operands.length - 3); emit(InstructionCodes.CALL, callOperands); } public void visit(BLangAttachedFunctionInvocation iExpr) { Operand[] operands = getFuncOperands(iExpr); if (iExpr.expr.type.tag == TypeTags.OBJECT) { Operand[] vCallOperands = new Operand[operands.length + 1]; vCallOperands[0] = iExpr.expr.regIndex; System.arraycopy(operands, 0, vCallOperands, 1, operands.length); emit(InstructionCodes.VCALL, vCallOperands); } else { emit(InstructionCodes.CALL, operands); } } public void visit(BFunctionPointerInvocation iExpr) { Operand[] operands = getFuncOperands(iExpr, -1); genNode(iExpr.expr, env); operands[0] = iExpr.expr.regIndex; emit(InstructionCodes.FPCALL, operands); } public void visit(BLangTypeConversionExpr convExpr) { int opcode = convExpr.conversionSymbol.opcode; BType castExprType = convExpr.type; RegIndex convExprRegIndex = calcAndGetExprRegIndex(convExpr.regIndex, castExprType.tag); convExpr.regIndex = convExprRegIndex; if (opcode == InstructionCodes.NOP) { convExpr.expr.regIndex = createLHSRegIndex(convExprRegIndex); genNode(convExpr.expr, this.env); return; } genNode(convExpr.expr, this.env); if (opcode == InstructionCodes.MAP2T || opcode == InstructionCodes.JSON2T || opcode == InstructionCodes.ANY2T || opcode == InstructionCodes.ANY2C || opcode == InstructionCodes.ANY2E || opcode == InstructionCodes.ANY2M || opcode == InstructionCodes.T2JSON || opcode == InstructionCodes.MAP2JSON || opcode == InstructionCodes.JSON2MAP || opcode == InstructionCodes.JSON2ARRAY || opcode == InstructionCodes.CHECKCAST) { Operand typeCPIndex = getTypeCPIndex(convExpr.targetType); emit(opcode, convExpr.expr.regIndex, typeCPIndex, convExprRegIndex); } else { emit(opcode, convExpr.expr.regIndex, convExprRegIndex); } } public void visit(BLangRecordLiteral recordLiteral) { /* ignore */ } public void visit(BLangTernaryExpr ternaryExpr) { RegIndex ternaryExprRegIndex = calcAndGetExprRegIndex(ternaryExpr); this.genNode(ternaryExpr.expr, this.env); Operand ifFalseJumpAddr = getOperand(-1); this.emit(InstructionCodes.BR_FALSE, ternaryExpr.expr.regIndex, ifFalseJumpAddr); ternaryExpr.thenExpr.regIndex = createLHSRegIndex(ternaryExprRegIndex); this.genNode(ternaryExpr.thenExpr, this.env); Operand endJumpAddr = getOperand(-1); this.emit(InstructionCodes.GOTO, endJumpAddr); ifFalseJumpAddr.value = nextIP(); ternaryExpr.elseExpr.regIndex = createLHSRegIndex(ternaryExprRegIndex); this.genNode(ternaryExpr.elseExpr, this.env); endJumpAddr.value = nextIP(); } public void visit(BLangAwaitExpr awaitExpr) { Operand valueRegIndex; if (awaitExpr.type != null) { valueRegIndex = calcAndGetExprRegIndex(awaitExpr); } else { valueRegIndex = this.getOperand(-1); } genNode(awaitExpr.expr, this.env); Operand futureRegIndex = awaitExpr.expr.regIndex; this.emit(InstructionCodes.AWAIT, futureRegIndex, valueRegIndex); } public void visit(BLangTypedescExpr accessExpr) { Operand typeCPIndex = getTypeCPIndex(accessExpr.resolvedType); emit(InstructionCodes.TYPELOAD, typeCPIndex, calcAndGetExprRegIndex(accessExpr)); } public void visit(BLangUnaryExpr unaryExpr) { RegIndex exprIndex = calcAndGetExprRegIndex(unaryExpr); if (OperatorKind.ADD.equals(unaryExpr.operator) || OperatorKind.UNTAINT.equals(unaryExpr.operator)) { unaryExpr.expr.regIndex = createLHSRegIndex(unaryExpr.regIndex); genNode(unaryExpr.expr, this.env); return; } int opcode; genNode(unaryExpr.expr, this.env); if (OperatorKind.LENGTHOF.equals(unaryExpr.operator)) { Operand typeCPIndex = getTypeCPIndex(unaryExpr.expr.type); opcode = unaryExpr.opSymbol.opcode; emit(opcode, unaryExpr.expr.regIndex, typeCPIndex, exprIndex); } else { opcode = unaryExpr.opSymbol.opcode; emit(opcode, unaryExpr.expr.regIndex, exprIndex); } } public void visit(BLangLambdaFunction bLangLambdaFunction) { visitFunctionPointerLoad(bLangLambdaFunction, ((BLangFunction) bLangLambdaFunction.getFunctionNode()).symbol); } public void visit(BLangStatementExpression bLangStatementExpression) { bLangStatementExpression.regIndex = calcAndGetExprRegIndex(bLangStatementExpression); boolean prevRegIndexResetDisabledState = this.regIndexResetDisabled; this.regIndexResetDisabled = true; genNode(bLangStatementExpression.stmt, this.env); this.regIndexResetDisabled = prevRegIndexResetDisabledState; genNode(bLangStatementExpression.expr, this.env); emit(getOpcode(bLangStatementExpression.expr.type.tag, InstructionCodes.IMOVE), bLangStatementExpression.expr.regIndex, bLangStatementExpression.regIndex); } private <T extends BLangNode, U extends SymbolEnv> T genNode(T t, U u) { SymbolEnv prevEnv = this.env; this.env = u; t.accept(this); this.env = prevEnv; return t; } private String generateSig(BType[] types) { StringBuilder builder = new StringBuilder(); Arrays.stream(types).forEach(e -> builder.append(e.getDesc())); return builder.toString(); } private String generateFunctionSig(BType[] paramTypes, BType retType) { return "(" + generateSig(paramTypes) + ")(" + retType.getDesc() + ")"; } private String generateFunctionSig(BType[] paramTypes) { return "(" + generateSig(paramTypes) + ")()"; } private int getNextIndex(int typeTag, VariableIndex indexes) { int index; switch (typeTag) { case TypeTags.INT: index = ++indexes.tInt; break; case TypeTags.FLOAT: index = ++indexes.tFloat; break; case TypeTags.STRING: index = ++indexes.tString; break; case TypeTags.BOOLEAN: index = ++indexes.tBoolean; break; case TypeTags.BLOB: index = ++indexes.tBlob; break; default: index = ++indexes.tRef; break; } return index; } private int getOpcode(int typeTag, int baseOpcode) { int opcode; switch (typeTag) { case TypeTags.INT: opcode = baseOpcode; break; case TypeTags.FLOAT: opcode = baseOpcode + FLOAT_OFFSET; break; case TypeTags.STRING: opcode = baseOpcode + STRING_OFFSET; break; case TypeTags.BOOLEAN: opcode = baseOpcode + BOOL_OFFSET; break; case TypeTags.BLOB: opcode = baseOpcode + BLOB_OFFSET; break; default: opcode = baseOpcode + REF_OFFSET; break; } return opcode; } private Operand getOperand(int value) { return new Operand(value); } private Operand getOperand(boolean value) { return new Operand(value ? 1 : 0); } private RegIndex getLVIndex(int typeTag) { return getRegIndexInternal(typeTag, LOCAL); } private RegIndex getPVIndex(int typeTag) { return getRegIndexInternal(typeTag, PACKAGE); } private RegIndex getFieldIndex(int typeTag) { return getRegIndexInternal(typeTag, FIELD); } private RegIndex getRegIndex(int typeTag) { RegIndex regIndex = getRegIndexInternal(typeTag, REG); addToRegIndexList(regIndex); return regIndex; } private RegIndex getRegIndexInternal(int typeTag, VariableIndex.Kind varIndexKind) { int index; switch (varIndexKind) { case REG: return new RegIndex(getNextIndex(typeTag, regIndexes), typeTag); case PACKAGE: index = getNextIndex(typeTag, pvIndexes); break; case FIELD: index = getNextIndex(typeTag, fieldIndexes); break; default: index = getNextIndex(typeTag, lvIndexes); break; } RegIndex regIndex = new RegIndex(index, typeTag); regIndex.isVarIndex = true; return regIndex; } private RegIndex calcAndGetExprRegIndex(BLangExpression expr) { expr.regIndex = calcAndGetExprRegIndex(expr.regIndex, expr.type.tag); return expr.regIndex; } private RegIndex calcAndGetExprRegIndex(RegIndex regIndex, int typeTag) { if (regIndex != null && (regIndex.isVarIndex || regIndex.isLHSIndex)) { return regIndex; } return getRegIndex(typeTag); } private RegIndex createLHSRegIndex(RegIndex regIndex) { if (regIndex.isVarIndex || regIndex.isLHSIndex) { return regIndex; } RegIndex lhsRegIndex = new RegIndex(regIndex.value, regIndex.typeTag, true); addToRegIndexList(lhsRegIndex); return lhsRegIndex; } private void addToRegIndexList(RegIndex regIndex) { if (regIndex.isVarIndex) { throw new IllegalStateException(""); } regIndexList.add(regIndex); } private LocalVariableInfo getLocalVarAttributeInfo(BVarSymbol varSymbol) { int varNameCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.name.value); int varIndex = varSymbol.varIndex.value; int sigCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.type.getDesc()); return new LocalVariableInfo(varNameCPIndex, sigCPIndex, varIndex); } private void visitInvokableNode(BLangInvokableNode invokableNode, CallableUnitInfo callableUnitInfo, SymbolEnv invokableSymbolEnv) { int localVarAttrNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE.value()); LocalVariableAttributeInfo localVarAttributeInfo = new LocalVariableAttributeInfo(localVarAttrNameIndex); visitInvokableNodeParams(invokableNode.symbol, callableUnitInfo, localVarAttributeInfo); if (Symbols.isNative(invokableNode.symbol)) { this.processWorker(callableUnitInfo.defaultWorkerInfo, null, localVarAttributeInfo, invokableSymbolEnv, null); } else { VariableIndex lvIndexCopy = this.copyVarIndex(lvIndexes); this.processWorker(callableUnitInfo.defaultWorkerInfo, invokableNode.body, localVarAttributeInfo, invokableSymbolEnv, lvIndexCopy); for (BLangWorker worker : invokableNode.getWorkers()) { this.processWorker(callableUnitInfo.getWorkerInfo(worker.name.value), worker.body, localVarAttributeInfo, invokableSymbolEnv, this.copyVarIndex(lvIndexCopy)); } } if (invokableNode.symbol.taintTable != null) { int taintTableAttributeNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.TAINT_TABLE.value()); TaintTableAttributeInfo taintTableAttributeInfo = new TaintTableAttributeInfo(taintTableAttributeNameIndex); visitTaintTable(invokableNode.symbol.taintTable, taintTableAttributeInfo); callableUnitInfo.addAttributeInfo(AttributeInfo.Kind.TAINT_TABLE, taintTableAttributeInfo); } } private void visitTaintTable(Map<Integer, TaintRecord> taintTable, TaintTableAttributeInfo taintTableAttributeInfo) { int rowCount = 0; for (Integer paramIndex : taintTable.keySet()) { TaintRecord taintRecord = taintTable.get(paramIndex); boolean added = addTaintTableEntry(taintTableAttributeInfo, paramIndex, taintRecord); if (added) { taintTableAttributeInfo.columnCount = taintRecord.retParamTaintedStatus.size(); rowCount++; } } taintTableAttributeInfo.rowCount = rowCount; } private boolean addTaintTableEntry(TaintTableAttributeInfo taintTableAttributeInfo, int index, TaintRecord taintRecord) { if (taintRecord.taintError == null || taintRecord.taintError.isEmpty()) { taintTableAttributeInfo.taintTable.put(index, taintRecord.retParamTaintedStatus); return true; } return false; } private void processWorker(WorkerInfo workerInfo, BLangBlockStmt body, LocalVariableAttributeInfo localVarAttributeInfo, SymbolEnv invokableSymbolEnv, VariableIndex lvIndexCopy) { workerInfo.codeAttributeInfo.attributeNameIndex = this.addUTF8CPEntry( this.currentPkgInfo, AttributeInfo.Kind.CODE_ATTRIBUTE.value()); workerInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, localVarAttributeInfo); if (body != null) { localVarAttrInfo = new LocalVariableAttributeInfo(localVarAttributeInfo.attributeNameIndex); localVarAttrInfo.localVars = new ArrayList<>(localVarAttributeInfo.localVars); workerInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, localVarAttrInfo); workerInfo.codeAttributeInfo.codeAddrs = nextIP(); this.lvIndexes = lvIndexCopy; this.currentWorkerInfo = workerInfo; this.genNode(body, invokableSymbolEnv); } this.endWorkerInfoUnit(workerInfo.codeAttributeInfo); this.emit(InstructionCodes.HALT); } private void visitInvokableNodeParams(BInvokableSymbol invokableSymbol, CallableUnitInfo callableUnitInfo, LocalVariableAttributeInfo localVarAttrInfo) { invokableSymbol.params.forEach(param -> visitVarSymbol(param, lvIndexes, localVarAttrInfo)); invokableSymbol.defaultableParams.forEach(param -> visitVarSymbol(param, lvIndexes, localVarAttrInfo)); if (invokableSymbol.restParam != null) { visitVarSymbol(invokableSymbol.restParam, lvIndexes, localVarAttrInfo); } callableUnitInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, localVarAttrInfo); } private void visitVarSymbol(BVarSymbol varSymbol, VariableIndex variableIndex, LocalVariableAttributeInfo localVarAttrInfo) { varSymbol.varIndex = getRegIndexInternal(varSymbol.type.tag, variableIndex.kind); LocalVariableInfo localVarInfo = getLocalVarAttributeInfo(varSymbol); localVarAttrInfo.localVars.add(localVarInfo); } private VariableIndex copyVarIndex(VariableIndex that) { VariableIndex vIndexes = new VariableIndex(that.kind); vIndexes.tInt = that.tInt; vIndexes.tFloat = that.tFloat; vIndexes.tString = that.tString; vIndexes.tBoolean = that.tBoolean; vIndexes.tBlob = that.tBlob; vIndexes.tRef = that.tRef; return vIndexes; } private int nextIP() { return currentPkgInfo.instructionList.size(); } private void endWorkerInfoUnit(CodeAttributeInfo codeAttributeInfo) { codeAttributeInfo.maxLongLocalVars = lvIndexes.tInt + 1; codeAttributeInfo.maxDoubleLocalVars = lvIndexes.tFloat + 1; codeAttributeInfo.maxStringLocalVars = lvIndexes.tString + 1; codeAttributeInfo.maxIntLocalVars = lvIndexes.tBoolean + 1; codeAttributeInfo.maxByteLocalVars = lvIndexes.tBlob + 1; codeAttributeInfo.maxRefLocalVars = lvIndexes.tRef + 1; codeAttributeInfo.maxLongRegs = codeAttributeInfo.maxLongLocalVars + maxRegIndexes.tInt + 1; codeAttributeInfo.maxDoubleRegs = codeAttributeInfo.maxDoubleLocalVars + maxRegIndexes.tFloat + 1; codeAttributeInfo.maxStringRegs = codeAttributeInfo.maxStringLocalVars + maxRegIndexes.tString + 1; codeAttributeInfo.maxIntRegs = codeAttributeInfo.maxIntLocalVars + maxRegIndexes.tBoolean + 1; codeAttributeInfo.maxByteRegs = codeAttributeInfo.maxByteLocalVars + maxRegIndexes.tBlob + 1; codeAttributeInfo.maxRefRegs = codeAttributeInfo.maxRefLocalVars + maxRegIndexes.tRef + 1; for (RegIndex regIndex : regIndexList) { switch (regIndex.typeTag) { case TypeTags.INT: regIndex.value = regIndex.value + codeAttributeInfo.maxLongLocalVars; break; case TypeTags.FLOAT: regIndex.value = regIndex.value + codeAttributeInfo.maxDoubleLocalVars; break; case TypeTags.STRING: regIndex.value = regIndex.value + codeAttributeInfo.maxStringLocalVars; break; case TypeTags.BOOLEAN: regIndex.value = regIndex.value + codeAttributeInfo.maxIntLocalVars; break; case TypeTags.BLOB: regIndex.value = regIndex.value + codeAttributeInfo.maxByteLocalVars; break; default: regIndex.value = regIndex.value + codeAttributeInfo.maxRefLocalVars; break; } } regIndexList = new ArrayList<>(); lvIndexes = new VariableIndex(LOCAL); regIndexes = new VariableIndex(REG); maxRegIndexes = new VariableIndex(REG); } private void setMaxRegIndexes(VariableIndex current, VariableIndex max) { max.tInt = (max.tInt > current.tInt) ? max.tInt : current.tInt; max.tFloat = (max.tFloat > current.tFloat) ? max.tFloat : current.tFloat; max.tString = (max.tString > current.tString) ? max.tString : current.tString; max.tBoolean = (max.tBoolean > current.tBoolean) ? max.tBoolean : current.tBoolean; max.tBlob = (max.tBlob > current.tBlob) ? max.tBlob : current.tBlob; max.tRef = (max.tRef > current.tRef) ? max.tRef : current.tRef; } private void prepareIndexes(VariableIndex indexes) { indexes.tInt++; indexes.tFloat++; indexes.tString++; indexes.tBoolean++; indexes.tBlob++; indexes.tRef++; } private int emit(int opcode) { currentPkgInfo.instructionList.add(InstructionFactory.get(opcode)); return currentPkgInfo.instructionList.size(); } private int emit(int opcode, Operand... operands) { currentPkgInfo.instructionList.add(InstructionFactory.get(opcode, operands)); return currentPkgInfo.instructionList.size(); } private int emit(Instruction instr) { currentPkgInfo.instructionList.add(instr); return currentPkgInfo.instructionList.size(); } private void addVarCountAttrInfo(ConstantPool constantPool, AttributeInfoPool attributeInfoPool, VariableIndex fieldCount) { int attrNameCPIndex = addUTF8CPEntry(constantPool, AttributeInfo.Kind.VARIABLE_TYPE_COUNT_ATTRIBUTE.value()); VarTypeCountAttributeInfo varCountAttribInfo = new VarTypeCountAttributeInfo(attrNameCPIndex); varCountAttribInfo.setMaxLongVars(fieldCount.tInt); varCountAttribInfo.setMaxDoubleVars(fieldCount.tFloat); varCountAttribInfo.setMaxStringVars(fieldCount.tString); varCountAttribInfo.setMaxIntVars(fieldCount.tBoolean); varCountAttribInfo.setMaxByteVars(fieldCount.tBlob); varCountAttribInfo.setMaxRefVars(fieldCount.tRef); attributeInfoPool.addAttributeInfo(AttributeInfo.Kind.VARIABLE_TYPE_COUNT_ATTRIBUTE, varCountAttribInfo); } private Operand[] getFuncOperands(BLangInvocation iExpr) { int funcRefCPIndex = getFuncRefCPIndex((BInvokableSymbol) iExpr.symbol); return getFuncOperands(iExpr, funcRefCPIndex); } private int getFuncRefCPIndex(BInvokableSymbol invokableSymbol) { int pkgRefCPIndex = addPackageRefCPEntry(currentPkgInfo, invokableSymbol.pkgID); int funcNameCPIndex = addUTF8CPEntry(currentPkgInfo, invokableSymbol.name.value); FunctionRefCPEntry funcRefCPEntry = new FunctionRefCPEntry(pkgRefCPIndex, funcNameCPIndex); return currentPkgInfo.addCPEntry(funcRefCPEntry); } private Operand[] getFuncOperands(BLangInvocation iExpr, int funcRefCPIndex) { int i = 0; int nArgRegs = iExpr.requiredArgs.size() + iExpr.namedArgs.size() + iExpr.restArgs.size(); int nRetRegs = 1; int flags = FunctionFlags.NOTHING; Operand[] operands = new Operand[nArgRegs + nRetRegs + 4]; operands[i++] = getOperand(funcRefCPIndex); if (iExpr.async) { flags = FunctionFlags.markAsync(flags); } if (iExpr.actionInvocation) { flags = FunctionFlags.markObserved(flags); } operands[i++] = getOperand(flags); operands[i++] = getOperand(nArgRegs); for (BLangExpression argExpr : iExpr.requiredArgs) { operands[i++] = genNode(argExpr, this.env).regIndex; } i = generateNamedArgs(iExpr, operands, i); for (BLangExpression argExpr : iExpr.restArgs) { operands[i++] = genNode(argExpr, this.env).regIndex; } operands[i++] = getOperand(nRetRegs); iExpr.regIndex = calcAndGetExprRegIndex(iExpr.regIndex, iExpr.type.tag); operands[i] = iExpr.regIndex; return operands; } private int generateNamedArgs(BLangInvocation iExpr, Operand[] operands, int currentIndex) { if (iExpr.namedArgs.isEmpty()) { return currentIndex; } if (iExpr.symbol.kind != SymbolKind.FUNCTION) { throw new IllegalStateException("Unsupported callable unit"); } for (BLangExpression argExpr : iExpr.namedArgs) { operands[currentIndex++] = genNode(argExpr, this.env).regIndex; } return currentIndex; } private void addVariableCountAttributeInfo(ConstantPool constantPool, AttributeInfoPool attributeInfoPool, int[] fieldCount) { UTF8CPEntry attribNameCPEntry = new UTF8CPEntry(AttributeInfo.Kind.VARIABLE_TYPE_COUNT_ATTRIBUTE.toString()); int attribNameCPIndex = constantPool.addCPEntry(attribNameCPEntry); VarTypeCountAttributeInfo varCountAttribInfo = new VarTypeCountAttributeInfo(attribNameCPIndex); varCountAttribInfo.setMaxLongVars(fieldCount[INT_OFFSET]); varCountAttribInfo.setMaxDoubleVars(fieldCount[FLOAT_OFFSET]); varCountAttribInfo.setMaxStringVars(fieldCount[STRING_OFFSET]); varCountAttribInfo.setMaxIntVars(fieldCount[BOOL_OFFSET]); varCountAttribInfo.setMaxByteVars(fieldCount[BLOB_OFFSET]); varCountAttribInfo.setMaxRefVars(fieldCount[REF_OFFSET]); attributeInfoPool.addAttributeInfo(AttributeInfo.Kind.VARIABLE_TYPE_COUNT_ATTRIBUTE, varCountAttribInfo); } private DefaultValue getDefaultValue(BLangLiteral literalExpr) { String desc = literalExpr.type.getDesc(); int typeDescCPIndex = addUTF8CPEntry(currentPkgInfo, desc); DefaultValue defaultValue = new DefaultValue(typeDescCPIndex, desc); int typeTag = literalExpr.type.tag; switch (typeTag) { case TypeTags.INT: defaultValue.intValue = (Long) literalExpr.value; defaultValue.valueCPIndex = currentPkgInfo.addCPEntry(new IntegerCPEntry(defaultValue.intValue)); break; case TypeTags.FLOAT: defaultValue.floatValue = (Double) literalExpr.value; defaultValue.valueCPIndex = currentPkgInfo.addCPEntry(new FloatCPEntry(defaultValue.floatValue)); break; case TypeTags.STRING: defaultValue.stringValue = (String) literalExpr.value; defaultValue.valueCPIndex = currentPkgInfo.addCPEntry(new UTF8CPEntry(defaultValue.stringValue)); break; case TypeTags.BOOLEAN: defaultValue.booleanValue = (Boolean) literalExpr.value; break; case TypeTags.BLOB: defaultValue.blobValue = (byte[]) literalExpr.value; defaultValue.valueCPIndex = currentPkgInfo.addCPEntry(new BlobCPEntry(defaultValue.blobValue)); break; case TypeTags.NIL: break; default: defaultValue = null; } return defaultValue; } private DefaultValueAttributeInfo getDefaultValueAttributeInfo(BLangLiteral literalExpr) { DefaultValue defaultValue = getDefaultValue(literalExpr); UTF8CPEntry defaultValueAttribUTF8CPEntry = new UTF8CPEntry(AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE.toString()); int defaultValueAttribNameIndex = currentPkgInfo.addCPEntry(defaultValueAttribUTF8CPEntry); return new DefaultValueAttributeInfo(defaultValueAttribNameIndex, defaultValue); } private void createPackageVarInfo(BLangVariable varNode) { BVarSymbol varSymbol = varNode.symbol; varSymbol.varIndex = getPVIndex(varSymbol.type.tag); int varNameCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.name.value); int typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.type.getDesc()); PackageVarInfo pkgVarInfo = new PackageVarInfo(varNameCPIndex, typeSigCPIndex, varSymbol.flags, varSymbol.varIndex.value); currentPkgInfo.pkgVarInfoMap.put(varSymbol.name.value, pkgVarInfo); LocalVariableInfo localVarInfo = getLocalVarAttributeInfo(varSymbol); LocalVariableAttributeInfo pkgVarAttrInfo = (LocalVariableAttributeInfo) currentPkgInfo.getAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE); pkgVarAttrInfo.localVars.add(localVarInfo); addDocumentAttachmentAttrInfo(varNode.docAttachments, pkgVarInfo); } public void visit(BLangTypeDefinition typeDefinition) { } private void createAnnotationInfoEntry(BLangAnnotation annotation) { int nameCPIndex = addUTF8CPEntry(currentPkgInfo, annotation.name.value); int typeSigCPIndex = -1; if (annotation.typeNode != null) { typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, annotation.typeNode.type.getDesc()); } int[] attachPointCPIndexes = new int[annotation.attachmentPoints.size()]; List<BLangAnnotationAttachmentPoint> attachmentPoints = annotation.attachmentPoints; for (int i = 0; i < attachmentPoints.size(); i++) { String pointName = attachmentPoints.get(i).attachmentPoint.getValue(); attachPointCPIndexes[i] = addUTF8CPEntry(currentPkgInfo, pointName); } AnnotationInfo annotationInfo = new AnnotationInfo(nameCPIndex, typeSigCPIndex, annotation.symbol.flags, attachPointCPIndexes); currentPkgInfo.annotationInfoMap.put(annotation.name.value, annotationInfo); } private void createTypeDefinitionInfoEntry(BLangTypeDefinition typeDefinition) { BTypeSymbol typeDefSymbol = typeDefinition.symbol; int typeDefNameCPIndex = addUTF8CPEntry(currentPkgInfo, typeDefSymbol.name.value); TypeDefInfo typeDefInfo = new TypeDefInfo(currentPackageRefCPIndex, typeDefNameCPIndex, typeDefSymbol.flags); typeDefInfo.isLabel = typeDefinition.symbol.isLabel; typeDefInfo.typeTag = typeDefSymbol.type.tag; if (typeDefinition.symbol.isLabel) { createLabelTypeTypeDef(typeDefinition, typeDefInfo); addDocumentAttachmentAttrInfo(typeDefinition.docAttachments, typeDefInfo); currentPkgInfo.addTypeDefInfo(typeDefSymbol.name.value, typeDefInfo); return; } switch (typeDefinition.symbol.tag) { case SymTag.OBJECT: createObjectTypeTypeDef(typeDefinition, typeDefInfo, typeDefSymbol); break; case SymTag.RECORD: createRecordTypeTypeDef(typeDefinition, typeDefInfo, typeDefSymbol); break; case SymTag.FINITE_TYPE: createFiniteTypeTypeDef(typeDefinition, typeDefInfo); break; default: createLabelTypeTypeDef(typeDefinition, typeDefInfo); break; } addDocumentAttachmentAttrInfo(typeDefinition.docAttachments, typeDefInfo); currentPkgInfo.addTypeDefInfo(typeDefSymbol.name.value, typeDefInfo); } private void createObjectTypeTypeDef(BLangTypeDefinition typeDefinition, TypeDefInfo typeDefInfo, BTypeSymbol typeDefSymbol) { ObjectTypeInfo objInfo = new ObjectTypeInfo(); BObjectTypeSymbol objectSymbol = (BObjectTypeSymbol) typeDefSymbol; objInfo.objectType = (BObjectType) objectSymbol.type; BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDefinition.typeNode; List<BLangVariable> objFields = objectTypeNode.fields; for (BLangVariable objField : objFields) { int fieldNameCPIndex = addUTF8CPEntry(currentPkgInfo, objField.name.value); int sigCPIndex = addUTF8CPEntry(currentPkgInfo, objField.type.getDesc()); objField.symbol.varIndex = getFieldIndex(objField.symbol.type.tag); StructFieldInfo objFieldInfo = new StructFieldInfo(fieldNameCPIndex, sigCPIndex, objField.symbol.flags, objField.symbol.varIndex.value); objFieldInfo.fieldType = objField.type; if (objField.expr != null && objField.expr.getKind() == NodeKind.LITERAL) { DefaultValueAttributeInfo defaultVal = getDefaultValueAttributeInfo((BLangLiteral) objField.expr); objFieldInfo.addAttributeInfo(AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE, defaultVal); } objInfo.fieldInfoEntries.add(objFieldInfo); addDocumentAttachmentAttrInfo(objField.docAttachments, objFieldInfo); } prepareIndexes(fieldIndexes); int[] fieldCount = new int[]{fieldIndexes.tInt, fieldIndexes.tFloat, fieldIndexes.tString, fieldIndexes.tBoolean, fieldIndexes.tBlob, fieldIndexes.tRef}; addVariableCountAttributeInfo(currentPkgInfo, objInfo, fieldCount); fieldIndexes = new VariableIndex(FIELD); for (BAttachedFunction attachedFunc : objectSymbol.attachedFuncs) { int funcNameCPIndex = addUTF8CPEntry(currentPkgInfo, attachedFunc.funcName.value); BType[] paramTypes = attachedFunc.type.paramTypes.toArray(new BType[0]); int sigCPIndex = addUTF8CPEntry(currentPkgInfo, generateFunctionSig(paramTypes, attachedFunc.type.retType)); int flags = attachedFunc.symbol.flags; objInfo.attachedFuncInfoEntries.add(new AttachedFunctionInfo(funcNameCPIndex, sigCPIndex, flags)); } typeDefInfo.typeInfo = objInfo; } private void createRecordTypeTypeDef(BLangTypeDefinition typeDefinition, TypeDefInfo typeDefInfo, BTypeSymbol typeDefSymbol) { RecordTypeInfo recordInfo = new RecordTypeInfo(); BRecordTypeSymbol recordSymbol = (BRecordTypeSymbol) typeDefSymbol; recordInfo.recordType = (BRecordType) recordSymbol.type; BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDefinition.typeNode; List<BLangVariable> recordFields = recordTypeNode.fields; for (BLangVariable recordField : recordFields) { int fieldNameCPIndex = addUTF8CPEntry(currentPkgInfo, recordField.name.value); int sigCPIndex = addUTF8CPEntry(currentPkgInfo, recordField.type.getDesc()); recordField.symbol.varIndex = getFieldIndex(recordField.symbol.type.tag); StructFieldInfo recordFieldInfo = new StructFieldInfo(fieldNameCPIndex, sigCPIndex, recordField.symbol.flags, recordField.symbol.varIndex.value); recordFieldInfo.fieldType = recordField.type; if (recordField.expr != null && recordField.expr.getKind() == NodeKind.LITERAL) { DefaultValueAttributeInfo defaultVal = getDefaultValueAttributeInfo((BLangLiteral) recordField.expr); recordFieldInfo.addAttributeInfo(AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE, defaultVal); } recordInfo.fieldInfoEntries.add(recordFieldInfo); addDocumentAttachmentAttrInfo(recordField.docAttachments, recordFieldInfo); } prepareIndexes(fieldIndexes); int[] fieldCount = new int[]{fieldIndexes.tInt, fieldIndexes.tFloat, fieldIndexes.tString, fieldIndexes.tBoolean, fieldIndexes.tBlob, fieldIndexes.tRef}; addVariableCountAttributeInfo(currentPkgInfo, recordInfo, fieldCount); fieldIndexes = new VariableIndex(FIELD); BAttachedFunction attachedFunc = recordSymbol.initializerFunc; int funcNameCPIndex = addUTF8CPEntry(currentPkgInfo, attachedFunc.funcName.value); BType[] paramTypes = attachedFunc.type.paramTypes.toArray(new BType[0]); int sigCPIndex = addUTF8CPEntry(currentPkgInfo, generateFunctionSig(paramTypes, attachedFunc.type.retType)); int flags = attachedFunc.symbol.flags; recordInfo.attachedFuncInfoEntries.add(new AttachedFunctionInfo(funcNameCPIndex, sigCPIndex, flags)); typeDefInfo.typeInfo = recordInfo; } private void createFiniteTypeTypeDef(BLangTypeDefinition typeDefinition, TypeDefInfo typeDefInfo) { BLangFiniteTypeNode typeNode = (BLangFiniteTypeNode) typeDefinition.typeNode; FiniteTypeInfo typeInfo = new FiniteTypeInfo(); for (BLangExpression literal : typeNode.valueSpace) { typeInfo.valueSpaceItemInfos.add(new ValueSpaceItemInfo(getDefaultValue((BLangLiteral) literal))); } typeDefInfo.typeInfo = typeInfo; } private void createLabelTypeTypeDef(BLangTypeDefinition typeDefinition, TypeDefInfo typeDefInfo) { int sigCPIndex = addUTF8CPEntry(currentPkgInfo, typeDefinition.typeNode.type.getDesc()); typeDefInfo.typeInfo = new LabelTypeInfo(sigCPIndex); } /** * Creates a {@code FunctionInfo} from the given function node in AST. * * @param funcNode function node in AST */ private void createFunctionInfoEntry(BLangFunction funcNode) { BInvokableSymbol funcSymbol = funcNode.symbol; BInvokableType funcType = (BInvokableType) funcSymbol.type; int funcNameCPIndex = this.addUTF8CPEntry(currentPkgInfo, funcNode.name.value); FunctionInfo funcInfo = new FunctionInfo(currentPackageRefCPIndex, funcNameCPIndex); funcInfo.paramTypes = funcType.paramTypes.toArray(new BType[0]); populateInvokableSignature(funcType, funcInfo); funcInfo.flags = funcSymbol.flags; if (funcNode.receiver != null) { funcInfo.attachedToTypeCPIndex = getTypeCPIndex(funcNode.receiver.type).value; } this.addWorkerInfoEntries(funcInfo, funcNode.getWorkers()); addParameterAttributeInfo(funcNode, funcInfo); addDocumentAttachmentAttrInfo(funcNode.docAttachments, funcInfo); this.currentPkgInfo.functionInfoMap.put(funcSymbol.name.value, funcInfo); } private void populateInvokableSignature(BInvokableType bInvokableType, CallableUnitInfo callableUnitInfo) { if (bInvokableType.retType == symTable.nilType) { callableUnitInfo.retParamTypes = new BType[0]; callableUnitInfo.signatureCPIndex = addUTF8CPEntry(this.currentPkgInfo, generateFunctionSig(callableUnitInfo.paramTypes)); } else { callableUnitInfo.retParamTypes = new BType[1]; callableUnitInfo.retParamTypes[0] = bInvokableType.retType; callableUnitInfo.signatureCPIndex = addUTF8CPEntry(this.currentPkgInfo, generateFunctionSig(callableUnitInfo.paramTypes, bInvokableType.retType)); } } private void addWorkerInfoEntries(CallableUnitInfo callableUnitInfo, List<BLangWorker> workers) { UTF8CPEntry workerNameCPEntry = new UTF8CPEntry("default"); int workerNameCPIndex = this.currentPkgInfo.addCPEntry(workerNameCPEntry); WorkerInfo defaultWorkerInfo = new WorkerInfo(workerNameCPIndex, "default"); callableUnitInfo.defaultWorkerInfo = defaultWorkerInfo; for (BLangWorker worker : workers) { workerNameCPEntry = new UTF8CPEntry(worker.name.value); workerNameCPIndex = currentPkgInfo.addCPEntry(workerNameCPEntry); WorkerInfo workerInfo = new WorkerInfo(workerNameCPIndex, worker.getName().value); callableUnitInfo.addWorkerInfo(worker.getName().value, workerInfo); } } @Override public void visit(BLangEndpoint endpointNode) { } private void createServiceInfoEntry(BLangService serviceNode) { int serviceNameCPIndex = addUTF8CPEntry(currentPkgInfo, serviceNode.name.value); if (serviceNode.endpointType != null) { String endPointQName = serviceNode.endpointType.tsymbol.toString(); int epNameCPIndex = addUTF8CPEntry(currentPkgInfo, endPointQName); ServiceInfo serviceInfo = new ServiceInfo(currentPackageRefCPIndex, serviceNameCPIndex, serviceNode.symbol.flags, epNameCPIndex); int localVarAttNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE.value()); LocalVariableAttributeInfo localVarAttributeInfo = new LocalVariableAttributeInfo(localVarAttNameIndex); serviceNode.vars.forEach(var -> visitVarSymbol(var.var.symbol, pvIndexes, localVarAttributeInfo)); serviceInfo.addAttributeInfo(AttributeInfo.Kind.LOCAL_VARIABLES_ATTRIBUTE, localVarAttributeInfo); BLangFunction serviceInitFunction = (BLangFunction) serviceNode.getInitFunction(); createFunctionInfoEntry(serviceInitFunction); serviceInfo.initFuncInfo = currentPkgInfo.functionInfoMap.get(serviceInitFunction.name.toString()); currentPkgInfo.addServiceInfo(serviceNode.name.value, serviceInfo); serviceNode.resources.forEach(res -> createResourceInfoEntry(res, serviceInfo)); addDocumentAttachmentAttrInfo(serviceNode.docAttachments, serviceInfo); } } private void createResourceInfoEntry(BLangResource resourceNode, ServiceInfo serviceInfo) { BInvokableType resourceType = (BInvokableType) resourceNode.symbol.type; int serviceNameCPIndex = addUTF8CPEntry(currentPkgInfo, resourceNode.name.value); ResourceInfo resourceInfo = new ResourceInfo(currentPackageRefCPIndex, serviceNameCPIndex); resourceInfo.paramTypes = resourceType.paramTypes.toArray(new BType[0]); setParameterNames(resourceNode, resourceInfo); resourceInfo.retParamTypes = new BType[0]; resourceInfo.signatureCPIndex = addUTF8CPEntry(currentPkgInfo, generateFunctionSig(resourceInfo.paramTypes)); int workerNameCPIndex = addUTF8CPEntry(currentPkgInfo, "default"); resourceInfo.defaultWorkerInfo = new WorkerInfo(workerNameCPIndex, "default"); resourceNode.workers.forEach(worker -> addWorkerInfoEntry(worker, resourceInfo)); serviceInfo.resourceInfoMap.put(resourceNode.name.getValue(), resourceInfo); addDocumentAttachmentAttrInfo(resourceNode.docAttachments, resourceInfo); } private void addWorkerInfoEntry(BLangWorker worker, CallableUnitInfo callableUnitInfo) { int workerNameCPIndex = addUTF8CPEntry(currentPkgInfo, worker.name.value); WorkerInfo workerInfo = new WorkerInfo(workerNameCPIndex, worker.name.value); callableUnitInfo.addWorkerInfo(worker.name.value, workerInfo); } private ErrorTableAttributeInfo createErrorTableIfAbsent(PackageInfo packageInfo) { ErrorTableAttributeInfo errorTable = (ErrorTableAttributeInfo) packageInfo.getAttributeInfo(AttributeInfo.Kind.ERROR_TABLE); if (errorTable == null) { UTF8CPEntry attribNameCPEntry = new UTF8CPEntry(AttributeInfo.Kind.ERROR_TABLE.toString()); int attribNameCPIndex = packageInfo.addCPEntry(attribNameCPEntry); errorTable = new ErrorTableAttributeInfo(attribNameCPIndex); packageInfo.addAttributeInfo(AttributeInfo.Kind.ERROR_TABLE, errorTable); } return errorTable; } private void addLineNumberInfo(DiagnosticPos pos) { LineNumberInfo lineNumInfo = createLineNumberInfo(pos, currentPkgInfo, currentPkgInfo.instructionList.size()); lineNoAttrInfo.addLineNumberInfo(lineNumInfo); } private LineNumberInfo createLineNumberInfo(DiagnosticPos pos, PackageInfo packageInfo, int ip) { UTF8CPEntry fileNameUTF8CPEntry = new UTF8CPEntry(pos.src.cUnitName); int fileNameCPEntryIndex = packageInfo.addCPEntry(fileNameUTF8CPEntry); LineNumberInfo lineNumberInfo = new LineNumberInfo(pos.sLine, fileNameCPEntryIndex, pos.src.cUnitName, ip); lineNumberInfo.setPackageInfo(packageInfo); lineNumberInfo.setIp(ip); return lineNumberInfo; } private void setParameterNames(BLangResource resourceNode, ResourceInfo resourceInfo) { int paramCount = resourceNode.requiredParams.size(); resourceInfo.paramNameCPIndexes = new int[paramCount]; for (int i = 0; i < paramCount; i++) { BLangVariable paramVar = resourceNode.requiredParams.get(i); String paramName = null; boolean isAnnotated = false; for (BLangAnnotationAttachment annotationAttachment : paramVar.annAttachments) { String attachmentName = annotationAttachment.getAnnotationName().getValue(); if ("PathParam".equalsIgnoreCase(attachmentName) || "QueryParam".equalsIgnoreCase(attachmentName)) { isAnnotated = true; break; } } if (!isAnnotated) { paramName = paramVar.name.getValue(); } int paramNameCPIndex = addUTF8CPEntry(currentPkgInfo, paramName); resourceInfo.paramNameCPIndexes[i] = paramNameCPIndex; } } private WorkerDataChannelInfo getWorkerDataChannelInfo(CallableUnitInfo callableUnit, String source, String target) { WorkerDataChannelInfo workerDataChannelInfo = callableUnit.getWorkerDataChannelInfo( WorkerDataChannelInfo.generateChannelName(source, target)); if (workerDataChannelInfo == null) { UTF8CPEntry sourceCPEntry = new UTF8CPEntry(source); int sourceCPIndex = this.currentPkgInfo.addCPEntry(sourceCPEntry); UTF8CPEntry targetCPEntry = new UTF8CPEntry(target); int targetCPIndex = this.currentPkgInfo.addCPEntry(targetCPEntry); workerDataChannelInfo = new WorkerDataChannelInfo(sourceCPIndex, source, targetCPIndex, target); workerDataChannelInfo.setUniqueName(workerDataChannelInfo.getChannelName() + this.workerChannelCount); String uniqueName = workerDataChannelInfo.getUniqueName(); UTF8CPEntry uniqueNameCPEntry = new UTF8CPEntry(uniqueName); int uniqueNameCPIndex = this.currentPkgInfo.addCPEntry(uniqueNameCPEntry); workerDataChannelInfo.setUniqueNameCPIndex(uniqueNameCPIndex); callableUnit.addWorkerDataChannelInfo(workerDataChannelInfo); this.workerChannelCount++; } return workerDataChannelInfo; } private int addUTF8CPEntry(ConstantPool pool, String value) { UTF8CPEntry pkgPathCPEntry = new UTF8CPEntry(value); return pool.addCPEntry(pkgPathCPEntry); } private int addPackageRefCPEntry(ConstantPool pool, PackageID pkgID) { int nameCPIndex = addUTF8CPEntry(pool, pkgID.bvmAlias()); int versionCPIndex = addUTF8CPEntry(pool, pkgID.version.value); PackageRefCPEntry packageRefCPEntry = new PackageRefCPEntry(nameCPIndex, versionCPIndex); return pool.addCPEntry(packageRefCPEntry); } /** * Holds the variable index per type. * * @since 0.94 */ static class VariableIndex { public enum Kind { LOCAL, FIELD, PACKAGE, REG } int tInt = -1; int tFloat = -1; int tString = -1; int tBoolean = -1; int tBlob = -1; int tRef = -1; Kind kind; VariableIndex(Kind kind) { this.kind = kind; } public int[] toArray() { int[] result = new int[6]; result[0] = this.tInt; result[1] = this.tFloat; result[2] = this.tString; result[3] = this.tBoolean; result[4] = this.tBlob; result[5] = this.tRef; return result; } } public void visit(BLangWorker workerNode) { this.genNode(workerNode.body, this.env); } /* visit the workers within fork-join block */ private void processJoinWorkers(BLangForkJoin forkJoin, ForkjoinInfo forkjoinInfo, SymbolEnv forkJoinEnv) { UTF8CPEntry codeUTF8CPEntry = new UTF8CPEntry(AttributeInfo.Kind.CODE_ATTRIBUTE.toString()); int codeAttribNameIndex = this.currentPkgInfo.addCPEntry(codeUTF8CPEntry); for (BLangWorker worker : forkJoin.workers) { VariableIndex lvIndexesCopy = copyVarIndex(this.lvIndexes); this.regIndexes = new VariableIndex(REG); VariableIndex regIndexesCopy = this.regIndexes; this.regIndexes = new VariableIndex(REG); VariableIndex maxRegIndexesCopy = this.maxRegIndexes; this.maxRegIndexes = new VariableIndex(REG); List<RegIndex> regIndexListCopy = this.regIndexList; this.regIndexList = new ArrayList<>(); WorkerInfo workerInfo = forkjoinInfo.getWorkerInfo(worker.name.value); workerInfo.codeAttributeInfo.attributeNameIndex = codeAttribNameIndex; workerInfo.codeAttributeInfo.codeAddrs = this.nextIP(); this.currentWorkerInfo = workerInfo; this.genNode(worker.body, forkJoinEnv); this.endWorkerInfoUnit(workerInfo.codeAttributeInfo); this.emit(InstructionCodes.HALT); this.lvIndexes = lvIndexesCopy; this.regIndexes = regIndexesCopy; this.maxRegIndexes = maxRegIndexesCopy; this.regIndexList = regIndexListCopy; } } private void populateForkJoinWorkerInfo(BLangForkJoin forkJoin, ForkjoinInfo forkjoinInfo) { for (BLangWorker worker : forkJoin.workers) { UTF8CPEntry workerNameCPEntry = new UTF8CPEntry(worker.name.value); int workerNameCPIndex = this.currentPkgInfo.addCPEntry(workerNameCPEntry); WorkerInfo workerInfo = new WorkerInfo(workerNameCPIndex, worker.name.value); forkjoinInfo.addWorkerInfo(worker.name.value, workerInfo); } } /* generate code for Join block */ private void processJoinBlock(BLangForkJoin forkJoin, ForkjoinInfo forkjoinInfo, SymbolEnv forkJoinEnv, RegIndex joinVarRegIndex, Operand joinBlockAddr) { UTF8CPEntry joinType = new UTF8CPEntry(forkJoin.joinType.name()); int joinTypeCPIndex = this.currentPkgInfo.addCPEntry(joinType); forkjoinInfo.setJoinType(forkJoin.joinType.name()); forkjoinInfo.setJoinTypeCPIndex(joinTypeCPIndex); joinBlockAddr.value = nextIP(); if (forkJoin.joinResultVar != null) { visitForkJoinParameterDefs(forkJoin.joinResultVar, forkJoinEnv); joinVarRegIndex.value = forkJoin.joinResultVar.symbol.varIndex.value; } if (forkJoin.joinedBody != null) { this.genNode(forkJoin.joinedBody, forkJoinEnv); } } /* generate code for timeout block */ private void processTimeoutBlock(BLangForkJoin forkJoin, SymbolEnv forkJoinEnv, RegIndex timeoutVarRegIndex, Operand timeoutBlockAddr) { /* emit a GOTO instruction to jump out of the timeout block */ Operand gotoAddr = getOperand(-1); this.emit(InstructionCodes.GOTO, gotoAddr); timeoutBlockAddr.value = nextIP(); if (forkJoin.timeoutVariable != null) { visitForkJoinParameterDefs(forkJoin.timeoutVariable, forkJoinEnv); timeoutVarRegIndex.value = forkJoin.timeoutVariable.symbol.varIndex.value; } if (forkJoin.timeoutBody != null) { this.genNode(forkJoin.timeoutBody, forkJoinEnv); } gotoAddr.value = nextIP(); } public void visit(BLangForkJoin forkJoin) { SymbolEnv forkJoinEnv = SymbolEnv.createForkJoinSymbolEnv(forkJoin, this.env); ForkjoinInfo forkjoinInfo = new ForkjoinInfo(this.lvIndexes.toArray()); this.populateForkJoinWorkerInfo(forkJoin, forkjoinInfo); int forkJoinInfoIndex = this.forkJoinCount++; /* was I already inside a fork/join */ if (this.env.forkJoin != null) { this.currentWorkerInfo.addForkJoinInfo(forkjoinInfo); } else { this.currentCallableUnitInfo.defaultWorkerInfo.addForkJoinInfo(forkjoinInfo); } ForkJoinCPEntry forkJoinCPEntry = new ForkJoinCPEntry(forkJoinInfoIndex); Operand forkJoinCPIndex = getOperand(this.currentPkgInfo.addCPEntry(forkJoinCPEntry)); forkjoinInfo.setIndexCPIndex(forkJoinCPIndex.value); RegIndex timeoutRegIndex = new RegIndex(-1, TypeTags.INT); addToRegIndexList(timeoutRegIndex); if (forkJoin.timeoutExpression != null) { forkjoinInfo.setTimeoutAvailable(true); this.genNode(forkJoin.timeoutExpression, forkJoinEnv); timeoutRegIndex.value = forkJoin.timeoutExpression.regIndex.value; } RegIndex joinVarRegIndex = new RegIndex(-1, TypeTags.MAP); Operand joinBlockAddr = getOperand(-1); RegIndex timeoutVarRegIndex = new RegIndex(-1, TypeTags.MAP); Operand timeoutBlockAddr = getOperand(-1); this.emit(InstructionCodes.FORKJOIN, forkJoinCPIndex, timeoutRegIndex, joinVarRegIndex, joinBlockAddr, timeoutVarRegIndex, timeoutBlockAddr); VariableIndex lvIndexesCopy = copyVarIndex(this.lvIndexes); VariableIndex regIndexesCopy = this.regIndexes; VariableIndex maxRegIndexesCopy = this.maxRegIndexes; List<RegIndex> regIndexListCopy = this.regIndexList; this.processJoinWorkers(forkJoin, forkjoinInfo, forkJoinEnv); this.lvIndexes = lvIndexesCopy; this.regIndexes = regIndexesCopy; this.maxRegIndexes = maxRegIndexesCopy; this.regIndexList = regIndexListCopy; int i = 0; int[] joinWrkrNameCPIndexes = new int[forkJoin.joinedWorkers.size()]; String[] joinWrkrNames = new String[joinWrkrNameCPIndexes.length]; for (BLangIdentifier workerName : forkJoin.joinedWorkers) { UTF8CPEntry workerNameCPEntry = new UTF8CPEntry(workerName.value); int workerNameCPIndex = this.currentPkgInfo.addCPEntry(workerNameCPEntry); joinWrkrNameCPIndexes[i] = workerNameCPIndex; joinWrkrNames[i] = workerName.value; i++; } forkjoinInfo.setJoinWrkrNameIndexes(joinWrkrNameCPIndexes); forkjoinInfo.setJoinWorkerNames(joinWrkrNames); forkjoinInfo.setWorkerCount(forkJoin.joinedWorkerCount); this.processJoinBlock(forkJoin, forkjoinInfo, forkJoinEnv, joinVarRegIndex, joinBlockAddr); this.processTimeoutBlock(forkJoin, forkJoinEnv, timeoutVarRegIndex, timeoutBlockAddr); } private void visitForkJoinParameterDefs(BLangVariable parameterDef, SymbolEnv forkJoinEnv) { LocalVariableAttributeInfo localVariableAttributeInfo = new LocalVariableAttributeInfo(1); parameterDef.symbol.varIndex = getLVIndex(parameterDef.type.tag); this.genNode(parameterDef, forkJoinEnv); LocalVariableInfo localVariableDetails = this.getLocalVarAttributeInfo(parameterDef.symbol); localVariableAttributeInfo.localVars.add(localVariableDetails); } public void visit(BLangWorkerSend workerSendStmt) { WorkerDataChannelInfo workerDataChannelInfo = this.getWorkerDataChannelInfo(this.currentCallableUnitInfo, this.currentWorkerInfo.getWorkerName(), workerSendStmt.workerIdentifier.value); WorkerDataChannelRefCPEntry wrkrInvRefCPEntry = new WorkerDataChannelRefCPEntry(workerDataChannelInfo .getUniqueNameCPIndex(), workerDataChannelInfo.getUniqueName()); wrkrInvRefCPEntry.setWorkerDataChannelInfo(workerDataChannelInfo); Operand wrkrInvRefCPIndex = getOperand(currentPkgInfo.addCPEntry(wrkrInvRefCPEntry)); if (workerSendStmt.isForkJoinSend) { this.currentWorkerInfo.setWrkrDtChnlRefCPIndex(wrkrInvRefCPIndex.value); this.currentWorkerInfo.setWorkerDataChannelInfoForForkJoin(workerDataChannelInfo); } workerDataChannelInfo.setDataChannelRefIndex(wrkrInvRefCPIndex.value); genNode(workerSendStmt.expr, this.env); RegIndex argReg = workerSendStmt.expr.regIndex; BType bType = workerSendStmt.expr.type; UTF8CPEntry sigCPEntry = new UTF8CPEntry(this.generateSig(new BType[] { bType })); Operand sigCPIndex = getOperand(this.currentPkgInfo.addCPEntry(sigCPEntry)); Operand[] wrkSendArgRegs = new Operand[3]; wrkSendArgRegs[0] = wrkrInvRefCPIndex; wrkSendArgRegs[1] = sigCPIndex; wrkSendArgRegs[2] = argReg; this.emit(InstructionCodes.WRKSEND, wrkSendArgRegs); } public void visit(BLangWorkerReceive workerReceiveStmt) { WorkerDataChannelInfo workerDataChannelInfo = this.getWorkerDataChannelInfo(this.currentCallableUnitInfo, workerReceiveStmt.workerIdentifier.value, this.currentWorkerInfo.getWorkerName()); WorkerDataChannelRefCPEntry wrkrChnlRefCPEntry = new WorkerDataChannelRefCPEntry(workerDataChannelInfo .getUniqueNameCPIndex(), workerDataChannelInfo.getUniqueName()); wrkrChnlRefCPEntry.setWorkerDataChannelInfo(workerDataChannelInfo); Operand wrkrRplyRefCPIndex = getOperand(currentPkgInfo.addCPEntry(wrkrChnlRefCPEntry)); workerDataChannelInfo.setDataChannelRefIndex(wrkrRplyRefCPIndex.value); BLangExpression lExpr = workerReceiveStmt.expr; RegIndex regIndex; BType bType; if (lExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && lExpr instanceof BLangLocalVarRef) { lExpr.regIndex = ((BLangLocalVarRef) lExpr).varSymbol.varIndex; regIndex = lExpr.regIndex; } else { lExpr.regIndex = getRegIndex(lExpr.type.tag); lExpr.regIndex.isLHSIndex = true; regIndex = lExpr.regIndex; } bType = lExpr.type; UTF8CPEntry sigCPEntry = new UTF8CPEntry(this.generateSig(new BType[] { bType })); Operand sigCPIndex = getOperand(currentPkgInfo.addCPEntry(sigCPEntry)); Operand[] wrkReceiveArgRegs = new Operand[3]; wrkReceiveArgRegs[0] = wrkrRplyRefCPIndex; wrkReceiveArgRegs[1] = sigCPIndex; wrkReceiveArgRegs[2] = regIndex; emit(InstructionCodes.WRKRECEIVE, wrkReceiveArgRegs); if (!(lExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && lExpr instanceof BLangLocalVarRef)) { this.varAssignment = true; this.genNode(lExpr, this.env); this.varAssignment = false; } } public void visit(BLangAction actionNode) { } public void visit(BLangForever foreverStatement) { /* ignore */ } public void visit(BLangSimpleVarRef varRefExpr) { /* ignore */ } public void visit(BLangIdentifier identifierNode) { /* ignore */ } public void visit(BLangAnnotation annotationNode) { /* ignore */ } public void visit(BLangAnnotAttribute annotationAttribute) { /* ignore */ } public void visit(BLangAnnotationAttachment annAttachmentNode) { /* ignore */ } public void visit(BLangAnnotAttachmentAttributeValue annotAttributeValue) { /* ignore */ } public void visit(BLangAnnotAttachmentAttribute annotAttachmentAttribute) { /* ignore */ } public void visit(BLangAssignment assignNode) { BLangExpression lhrExpr = assignNode.varRef; if (assignNode.declaredWithVar) { BLangVariableReference varRef = (BLangVariableReference) lhrExpr; visitVarSymbol((BVarSymbol) varRef.symbol, lvIndexes, localVarAttrInfo); } BLangExpression rhsExpr = assignNode.expr; if (lhrExpr.type.tag != TypeTags.NONE && lhrExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && lhrExpr instanceof BLangLocalVarRef) { lhrExpr.regIndex = ((BVarSymbol) ((BLangVariableReference) lhrExpr).symbol).varIndex; rhsExpr.regIndex = lhrExpr.regIndex; } genNode(rhsExpr, this.env); if (lhrExpr.type.tag == TypeTags.NONE || (lhrExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && lhrExpr instanceof BLangLocalVarRef)) { return; } varAssignment = true; lhrExpr.regIndex = rhsExpr.regIndex; genNode(lhrExpr, this.env); varAssignment = false; } public void visit(BLangContinue continueNode) { generateFinallyInstructions(continueNode, NodeKind.WHILE, NodeKind.FOREACH); this.emit(this.loopResetInstructionStack.peek()); } public void visit(BLangBreak breakNode) { generateFinallyInstructions(breakNode, NodeKind.WHILE, NodeKind.FOREACH); this.emit(this.loopExitInstructionStack.peek()); } public void visit(BLangThrow throwNode) { genNode(throwNode.expr, env); emit(InstructionFactory.get(InstructionCodes.THROW, throwNode.expr.regIndex)); } public void visit(BLangIf ifNode) { addLineNumberInfo(ifNode.pos); genNode(ifNode.expr, this.env); Operand ifCondJumpAddr = getOperand(-1); emit(InstructionCodes.BR_FALSE, ifNode.expr.regIndex, ifCondJumpAddr); genNode(ifNode.body, this.env); Operand endJumpAddr = getOperand(-1); emit(InstructionCodes.GOTO, endJumpAddr); ifCondJumpAddr.value = nextIP(); if (ifNode.elseStmt != null) { genNode(ifNode.elseStmt, this.env); } endJumpAddr.value = nextIP(); } public void visit(BLangForeach foreach) { Operand iteratorVar = getLVIndex(TypeTags.ITERATOR); Operand conditionVar = getLVIndex(TypeTags.BOOLEAN); this.genNode(foreach.collection, env); this.emit(InstructionCodes.ITR_NEW, foreach.collection.regIndex, iteratorVar); Operand foreachStartAddress = new Operand(nextIP()); Operand foreachEndAddress = new Operand(-1); Instruction gotoStartInstruction = InstructionFactory.get(InstructionCodes.GOTO, foreachStartAddress); Instruction gotoEndInstruction = InstructionFactory.get(InstructionCodes.GOTO, foreachEndAddress); this.emit(InstructionCodes.ITR_HAS_NEXT, iteratorVar, conditionVar); this.emit(InstructionCodes.BR_FALSE, conditionVar, foreachEndAddress); generateForeachVarAssignment(foreach, iteratorVar); this.loopResetInstructionStack.push(gotoStartInstruction); this.loopExitInstructionStack.push(gotoEndInstruction); this.genNode(foreach.body, env); this.loopResetInstructionStack.pop(); this.loopExitInstructionStack.pop(); this.emit(gotoStartInstruction); foreachEndAddress.value = this.nextIP(); } public void visit(BLangWhile whileNode) { Instruction gotoTopJumpInstr = InstructionFactory.get(InstructionCodes.GOTO, getOperand(this.nextIP())); this.genNode(whileNode.expr, this.env); Operand exitLoopJumpAddr = getOperand(-1); Instruction exitLoopJumpInstr = InstructionFactory.get(InstructionCodes.GOTO, exitLoopJumpAddr); emit(InstructionCodes.BR_FALSE, whileNode.expr.regIndex, exitLoopJumpAddr); this.loopResetInstructionStack.push(gotoTopJumpInstr); this.loopExitInstructionStack.push(exitLoopJumpInstr); this.genNode(whileNode.body, this.env); this.loopResetInstructionStack.pop(); this.loopExitInstructionStack.pop(); this.emit(gotoTopJumpInstr); exitLoopJumpAddr.value = nextIP(); } public void visit(BLangLock lockNode) { if (lockNode.lockVariables.isEmpty()) { this.genNode(lockNode.body, this.env); return; } Operand gotoLockEndAddr = getOperand(-1); Instruction instructGotoLockEnd = InstructionFactory.get(InstructionCodes.GOTO, gotoLockEndAddr); Operand[] operands = getOperands(lockNode); ErrorTableAttributeInfo errorTable = createErrorTableIfAbsent(currentPkgInfo); int fromIP = nextIP(); emit((InstructionCodes.LOCK), operands); this.genNode(lockNode.body, this.env); int toIP = nextIP() - 1; emit((InstructionCodes.UNLOCK), operands); emit(instructGotoLockEnd); ErrorTableEntry errorTableEntry = new ErrorTableEntry(fromIP, toIP, nextIP(), 0, -1); errorTable.addErrorTableEntry(errorTableEntry); emit((InstructionCodes.UNLOCK), operands); emit(InstructionFactory.get(InstructionCodes.THROW, getOperand(-1))); gotoLockEndAddr.value = nextIP(); } private Operand[] getOperands(BLangLock lockNode) { Operand[] operands = new Operand[(lockNode.lockVariables.size() * 3) + 1]; int i = 0; operands[i++] = new Operand(lockNode.lockVariables.size()); for (BVarSymbol varSymbol : lockNode.lockVariables) { BPackageSymbol pkgSymbol; BSymbol ownerSymbol = varSymbol.owner; if (ownerSymbol.tag == SymTag.SERVICE) { pkgSymbol = (BPackageSymbol) ownerSymbol.owner; } else { pkgSymbol = (BPackageSymbol) ownerSymbol; } int pkgRefCPIndex = addPackageRefCPEntry(currentPkgInfo, pkgSymbol.pkgID); int typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, varSymbol.getType().getDesc()); TypeRefCPEntry typeRefCPEntry = new TypeRefCPEntry(typeSigCPIndex); operands[i++] = getOperand(currentPkgInfo.addCPEntry(typeRefCPEntry)); operands[i++] = getOperand(pkgRefCPIndex); operands[i++] = varSymbol.varIndex; } return operands; } public void visit(BLangTransaction transactionNode) { ++transactionIndex; Operand transactionIndexOperand = getOperand(transactionIndex); Operand retryCountRegIndex = new RegIndex(-1, TypeTags.INT); if (transactionNode.retryCount != null) { this.genNode(transactionNode.retryCount, this.env); retryCountRegIndex = transactionNode.retryCount.regIndex; } Operand committedFuncRegIndex = new RegIndex(-1, TypeTags.INVOKABLE); if (transactionNode.onCommitFunction != null) { committedFuncRegIndex.value = getFuncRefCPIndex( (BInvokableSymbol) ((BLangFunctionVarRef) transactionNode.onCommitFunction).symbol); } Operand abortedFuncRegIndex = new RegIndex(-1, TypeTags.INVOKABLE); if (transactionNode.onAbortFunction != null) { abortedFuncRegIndex.value = getFuncRefCPIndex( (BInvokableSymbol) ((BLangFunctionVarRef) transactionNode.onAbortFunction).symbol); } ErrorTableAttributeInfo errorTable = createErrorTableIfAbsent(currentPkgInfo); Operand transStmtEndAddr = getOperand(-1); Operand transStmtAbortEndAddr = getOperand(-1); Operand transStmtFailEndAddr = getOperand(-1); Instruction gotoAbortTransBlockEnd = InstructionFactory.get(InstructionCodes.GOTO, transStmtAbortEndAddr); Instruction gotoFailTransBlockEnd = InstructionFactory.get(InstructionCodes.GOTO, transStmtFailEndAddr); abortInstructions.push(gotoAbortTransBlockEnd); failInstructions.push(gotoFailTransBlockEnd); this.emit(InstructionCodes.TR_BEGIN, transactionIndexOperand, retryCountRegIndex, committedFuncRegIndex, abortedFuncRegIndex); Operand transBlockStartAddr = getOperand(nextIP()); Operand retryEndWithThrowAddr = getOperand(-1); Operand retryEndWithNoThrowAddr = getOperand(-1); this.emit(InstructionCodes.TR_RETRY, transactionIndexOperand, retryEndWithThrowAddr, retryEndWithNoThrowAddr); this.genNode(transactionNode.transactionBody, this.env); int transBlockEndAddr = nextIP(); this.emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.SUCCESS.value())); abortInstructions.pop(); failInstructions.pop(); emit(InstructionCodes.GOTO, transStmtEndAddr); int errorTargetIP = nextIP(); transStmtFailEndAddr.value = errorTargetIP; emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.FAILED.value())); if (transactionNode.onRetryBody != null) { this.genNode(transactionNode.onRetryBody, this.env); } emit(InstructionCodes.GOTO, transBlockStartAddr); retryEndWithThrowAddr.value = nextIP(); emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.END.value())); emit(InstructionCodes.THROW, getOperand(-1)); ErrorTableEntry errorTableEntry = new ErrorTableEntry(transBlockStartAddr.value, transBlockEndAddr, errorTargetIP, 0, -1); errorTable.addErrorTableEntry(errorTableEntry); transStmtAbortEndAddr.value = nextIP(); emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.ABORTED.value())); int transactionEndIp = nextIP(); transStmtEndAddr.value = transactionEndIp; retryEndWithNoThrowAddr.value = transactionEndIp; emit(InstructionCodes.TR_END, transactionIndexOperand, getOperand(TransactionStatus.END.value())); } public void visit(BLangAbort abortNode) { generateFinallyInstructions(abortNode, NodeKind.TRANSACTION); this.emit(abortInstructions.peek()); } public void visit(BLangDone doneNode) { generateFinallyInstructions(doneNode, NodeKind.DONE); this.emit(InstructionCodes.HALT); } public void visit(BLangRetry retryNode) { generateFinallyInstructions(retryNode, NodeKind.TRANSACTION); this.emit(failInstructions.peek()); } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { xmlnsStmtNode.xmlnsDecl.accept(this); } @Override public void visit(BLangXMLNS xmlnsNode) { } @Override public void visit(BLangLocalXMLNS xmlnsNode) { RegIndex lvIndex = getLVIndex(TypeTags.STRING); BLangExpression nsURIExpr = xmlnsNode.namespaceURI; nsURIExpr.regIndex = createLHSRegIndex(lvIndex); genNode(nsURIExpr, env); BXMLNSSymbol nsSymbol = (BXMLNSSymbol) xmlnsNode.symbol; nsSymbol.nsURIIndex = lvIndex; } @Override public void visit(BLangPackageXMLNS xmlnsNode) { BLangExpression nsURIExpr = xmlnsNode.namespaceURI; Operand pvIndex = getPVIndex(TypeTags.STRING); BXMLNSSymbol nsSymbol = (BXMLNSSymbol) xmlnsNode.symbol; genNode(nsURIExpr, env); nsSymbol.nsURIIndex = pvIndex; int pkgIndex = addPackageRefCPEntry(this.currentPkgInfo, this.currentPkgID); emit(InstructionCodes.SGSTORE, getOperand(pkgIndex), nsURIExpr.regIndex, pvIndex); } @Override public void visit(BLangXMLQName xmlQName) { if (!xmlQName.isUsedInXML) { xmlQName.regIndex = calcAndGetExprRegIndex(xmlQName); String qName = xmlQName.namespaceURI == null ? xmlQName.localname.value : ("{" + xmlQName.namespaceURI + "}" + xmlQName.localname); xmlQName.regIndex = createStringLiteral(qName, xmlQName.regIndex, env); return; } RegIndex nsURIIndex = getNamespaceURIIndex(xmlQName.nsSymbol, env); RegIndex localnameIndex = createStringLiteral(xmlQName.localname.value, null, env); RegIndex prefixIndex = createStringLiteral(xmlQName.prefix.value, null, env); xmlQName.regIndex = calcAndGetExprRegIndex(xmlQName.regIndex, TypeTags.XML); emit(InstructionCodes.NEWQNAME, localnameIndex, nsURIIndex, prefixIndex, xmlQName.regIndex); } @Override public void visit(BLangXMLAttribute xmlAttribute) { SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(xmlAttribute, env); BLangExpression attrNameExpr = xmlAttribute.name; attrNameExpr.regIndex = calcAndGetExprRegIndex(attrNameExpr); genNode(attrNameExpr, xmlAttributeEnv); RegIndex attrQNameRegIndex = attrNameExpr.regIndex; if (attrNameExpr.getKind() != NodeKind.XML_QNAME) { RegIndex localNameRegIndex = getRegIndex(TypeTags.STRING); RegIndex uriRegIndex = getRegIndex(TypeTags.STRING); emit(InstructionCodes.S2QNAME, attrQNameRegIndex, localNameRegIndex, uriRegIndex); attrQNameRegIndex = getRegIndex(TypeTags.XML); generateURILookupInstructions(((BLangXMLElementLiteral) env.node).namespacesInScope, localNameRegIndex, uriRegIndex, attrQNameRegIndex, xmlAttribute.pos, xmlAttributeEnv); attrNameExpr.regIndex = attrQNameRegIndex; } BLangExpression attrValueExpr = xmlAttribute.value; genNode(attrValueExpr, env); if (xmlAttribute.isNamespaceDeclr) { ((BXMLNSSymbol) xmlAttribute.symbol).nsURIIndex = attrValueExpr.regIndex; } } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(xmlElementLiteral, env); xmlElementLiteral.regIndex = calcAndGetExprRegIndex(xmlElementLiteral); xmlElementLiteral.inlineNamespaces.forEach(xmlns -> { genNode(xmlns, xmlElementEnv); }); BLangExpression startTagName = (BLangExpression) xmlElementLiteral.getStartTagName(); RegIndex startTagNameRegIndex = visitXMLTagName(startTagName, xmlElementEnv, xmlElementLiteral); BLangExpression endTagName = (BLangExpression) xmlElementLiteral.getEndTagName(); RegIndex endTagNameRegIndex = endTagName == null ? startTagNameRegIndex : visitXMLTagName(endTagName, xmlElementEnv, xmlElementLiteral); RegIndex defaultNsURIIndex = getNamespaceURIIndex(xmlElementLiteral.defaultNsSymbol, xmlElementEnv); emit(InstructionCodes.NEWXMLELEMENT, xmlElementLiteral.regIndex, startTagNameRegIndex, endTagNameRegIndex, defaultNsURIIndex); xmlElementLiteral.namespacesInScope.forEach((name, symbol) -> { BLangXMLQName nsQName = new BLangXMLQName(name.getValue(), XMLConstants.XMLNS_ATTRIBUTE); genNode(nsQName, xmlElementEnv); RegIndex uriIndex = getNamespaceURIIndex(symbol, xmlElementEnv); emit(InstructionCodes.XMLATTRSTORE, xmlElementLiteral.regIndex, nsQName.regIndex, uriIndex); }); xmlElementLiteral.attributes.forEach(attribute -> { genNode(attribute, xmlElementEnv); emit(InstructionCodes.XMLATTRSTORE, xmlElementLiteral.regIndex, attribute.name.regIndex, attribute.value.regIndex); }); xmlElementLiteral.modifiedChildren.forEach(child -> { genNode(child, xmlElementEnv); emit(InstructionCodes.XMLSEQSTORE, xmlElementLiteral.regIndex, child.regIndex); }); } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { if (xmlTextLiteral.type == null) { xmlTextLiteral.regIndex = calcAndGetExprRegIndex(xmlTextLiteral.regIndex, TypeTags.XML); } else { xmlTextLiteral.regIndex = calcAndGetExprRegIndex(xmlTextLiteral); } genNode(xmlTextLiteral.concatExpr, env); emit(InstructionCodes.NEWXMLTEXT, xmlTextLiteral.regIndex, xmlTextLiteral.concatExpr.regIndex); } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.regIndex = calcAndGetExprRegIndex(xmlCommentLiteral); genNode(xmlCommentLiteral.concatExpr, env); emit(InstructionCodes.NEWXMLCOMMENT, xmlCommentLiteral.regIndex, xmlCommentLiteral.concatExpr.regIndex); } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.regIndex = calcAndGetExprRegIndex(xmlProcInsLiteral); genNode(xmlProcInsLiteral.dataConcatExpr, env); genNode(xmlProcInsLiteral.target, env); emit(InstructionCodes.NEWXMLPI, xmlProcInsLiteral.regIndex, xmlProcInsLiteral.target.regIndex, xmlProcInsLiteral.dataConcatExpr.regIndex); } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.concatExpr.regIndex = calcAndGetExprRegIndex(xmlQuotedString); genNode(xmlQuotedString.concatExpr, env); xmlQuotedString.regIndex = xmlQuotedString.concatExpr.regIndex; } @Override public void visit(BLangXMLSequenceLiteral xmlSeqLiteral) { xmlSeqLiteral.regIndex = calcAndGetExprRegIndex(xmlSeqLiteral); emit(InstructionCodes.NEWXMLSEQ, xmlSeqLiteral.regIndex); } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { stringTemplateLiteral.concatExpr.regIndex = calcAndGetExprRegIndex(stringTemplateLiteral); genNode(stringTemplateLiteral.concatExpr, env); stringTemplateLiteral.regIndex = stringTemplateLiteral.concatExpr.regIndex; } @Override public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { boolean variableStore = this.varAssignment; this.varAssignment = false; genNode(xmlAttributeAccessExpr.expr, this.env); RegIndex varRefRegIndex = xmlAttributeAccessExpr.expr.regIndex; if (xmlAttributeAccessExpr.indexExpr == null) { RegIndex xmlValueRegIndex = calcAndGetExprRegIndex(xmlAttributeAccessExpr); emit(InstructionCodes.XML2XMLATTRS, varRefRegIndex, xmlValueRegIndex); return; } BLangExpression indexExpr = xmlAttributeAccessExpr.indexExpr; genNode(xmlAttributeAccessExpr.indexExpr, this.env); RegIndex qnameRegIndex = xmlAttributeAccessExpr.indexExpr.regIndex; if (indexExpr.getKind() != NodeKind.XML_QNAME) { RegIndex localNameRegIndex = getRegIndex(TypeTags.STRING); RegIndex uriRegIndex = getRegIndex(TypeTags.STRING); emit(InstructionCodes.S2QNAME, qnameRegIndex, localNameRegIndex, uriRegIndex); qnameRegIndex = getRegIndex(TypeTags.XML); generateURILookupInstructions(xmlAttributeAccessExpr.namespaces, localNameRegIndex, uriRegIndex, qnameRegIndex, indexExpr.pos, env); } if (variableStore) { emit(InstructionCodes.XMLATTRSTORE, varRefRegIndex, qnameRegIndex, xmlAttributeAccessExpr.regIndex); } else { RegIndex xmlValueRegIndex = calcAndGetExprRegIndex(xmlAttributeAccessExpr); emit(InstructionCodes.XMLATTRLOAD, varRefRegIndex, qnameRegIndex, xmlValueRegIndex); } } public void visit(BLangTryCatchFinally tryNode) { Operand gotoTryCatchEndAddr = getOperand(-1); Instruction instructGotoTryCatchEnd = InstructionFactory.get(InstructionCodes.GOTO, gotoTryCatchEndAddr); List<int[]> unhandledErrorRangeList = new ArrayList<>(); ErrorTableAttributeInfo errorTable = createErrorTableIfAbsent(currentPkgInfo); int fromIP = nextIP(); genNode(tryNode.tryBody, env); int toIP = nextIP() - 1; if (tryNode.finallyBody != null) { genNode(tryNode.finallyBody, env); } emit(instructGotoTryCatchEnd); unhandledErrorRangeList.add(new int[]{fromIP, toIP}); int order = 0; for (BLangCatch bLangCatch : tryNode.getCatchBlocks()) { addLineNumberInfo(bLangCatch.pos); int targetIP = nextIP(); genNode(bLangCatch, env); unhandledErrorRangeList.add(new int[]{targetIP, nextIP() - 1}); if (tryNode.finallyBody != null) { genNode(tryNode.finallyBody, env); } emit(instructGotoTryCatchEnd); BTypeSymbol structSymbol = bLangCatch.param.symbol.type.tsymbol; BPackageSymbol packageSymbol = (BPackageSymbol) bLangCatch.param.symbol.type.tsymbol.owner; int pkgCPIndex = addPackageRefCPEntry(currentPkgInfo, packageSymbol.pkgID); int structNameCPIndex = addUTF8CPEntry(currentPkgInfo, structSymbol.name.value); StructureRefCPEntry structureRefCPEntry = new StructureRefCPEntry(pkgCPIndex, structNameCPIndex); int structCPEntryIndex = currentPkgInfo.addCPEntry(structureRefCPEntry); ErrorTableEntry errorTableEntry = new ErrorTableEntry(fromIP, toIP, targetIP, order++, structCPEntryIndex); errorTable.addErrorTableEntry(errorTableEntry); } if (tryNode.finallyBody != null) { for (int[] range : unhandledErrorRangeList) { ErrorTableEntry errorTableEntry = new ErrorTableEntry(range[0], range[1], nextIP(), order++, -1); errorTable.addErrorTableEntry(errorTableEntry); } genNode(tryNode.finallyBody, env); emit(InstructionFactory.get(InstructionCodes.THROW, getOperand(-1))); } gotoTryCatchEndAddr.value = nextIP(); } public void visit(BLangCatch bLangCatch) { BLangVariable variable = bLangCatch.param; RegIndex lvIndex = getLVIndex(variable.symbol.type.tag); variable.symbol.varIndex = lvIndex; emit(InstructionFactory.get(InstructionCodes.ERRSTORE, lvIndex)); genNode(bLangCatch.body, env); } public void visit(BLangExpressionStmt exprStmtNode) { genNode(exprStmtNode.expr, this.env); } @Override public void visit(BLangIntRangeExpression rangeExpr) { BLangExpression startExpr = rangeExpr.startExpr; BLangExpression endExpr = rangeExpr.endExpr; genNode(startExpr, env); genNode(endExpr, env); rangeExpr.regIndex = calcAndGetExprRegIndex(rangeExpr); emit(InstructionCodes.NEW_INT_RANGE, startExpr.regIndex, endExpr.regIndex, rangeExpr.regIndex); } private void generateForeachVarAssignment(BLangForeach foreach, Operand iteratorIndex) { List<BLangVariableReference> variables = foreach.varRefs.stream() .map(expr -> (BLangVariableReference) expr) .collect(Collectors.toList()); variables.stream() .filter(v -> v.type.tag != TypeTags.NONE) .forEach(varRef -> visitVarSymbol((BVarSymbol) varRef.symbol, lvIndexes, localVarAttrInfo)); List<Operand> nextOperands = new ArrayList<>(); nextOperands.add(iteratorIndex); nextOperands.add(new Operand(variables.size())); foreach.varTypes.forEach(v -> nextOperands.add(new Operand(v.tag))); nextOperands.add(new Operand(variables.size())); for (int i = 0; i < variables.size(); i++) { BLangVariableReference varRef = variables.get(i); nextOperands.add(Optional.ofNullable(((BVarSymbol) varRef.symbol).varIndex) .orElse(getRegIndex(foreach.varTypes.get(i).tag))); } this.emit(InstructionCodes.ITR_NEXT, nextOperands.toArray(new Operand[0])); } private void visitFunctionPointerLoad(BLangExpression fpExpr, BInvokableSymbol funcSymbol) { int pkgRefCPIndex = addPackageRefCPEntry(currentPkgInfo, funcSymbol.pkgID); int funcNameCPIndex = addUTF8CPEntry(currentPkgInfo, funcSymbol.name.value); FunctionRefCPEntry funcRefCPEntry = new FunctionRefCPEntry(pkgRefCPIndex, funcNameCPIndex); Operand typeCPIndex = getTypeCPIndex(funcSymbol.type); int funcRefCPIndex = currentPkgInfo.addCPEntry(funcRefCPEntry); RegIndex nextIndex = calcAndGetExprRegIndex(fpExpr); Operand[] operands; if (!(fpExpr instanceof BLangLambdaFunction)) { operands = new Operand[4]; operands[0] = getOperand(funcRefCPIndex); operands[1] = nextIndex; operands[2] = typeCPIndex; operands[3] = new Operand(0); } else { Operand[] closureIndexes = calcAndGetClosureIndexes(((BLangLambdaFunction) fpExpr).function); operands = new Operand[3 + closureIndexes.length]; operands[0] = getOperand(funcRefCPIndex); operands[1] = nextIndex; operands[2] = typeCPIndex; System.arraycopy(closureIndexes, 0, operands, 3, closureIndexes.length); } emit(InstructionCodes.FPLOAD, operands); } private Operand[] calcAndGetClosureIndexes(BLangFunction function) { List<Operand> operands = new ArrayList<>(); int closureOperandPairs = 0; for (BVarSymbol symbol : function.symbol.params) { if (!symbol.closure || function.requiredParams.stream().anyMatch(var -> var.symbol.equals(symbol))) { continue; } Operand type = new Operand(symbol.type.tag); Operand index = new Operand(symbol.varIndex.value); operands.add(type); operands.add(index); closureOperandPairs++; } operands.add(0, new Operand(closureOperandPairs)); return operands.toArray(new Operand[]{}); } private void generateFinallyInstructions(BLangStatement statement) { generateFinallyInstructions(statement, new NodeKind[0]); } private void generateFinallyInstructions(BLangStatement statement, NodeKind... expectedParentKinds) { BLangStatement current = statement; while (current != null && current.statementLink.parent != null) { BLangStatement parent = current.statementLink.parent.statement; for (NodeKind expected : expectedParentKinds) { if (expected == parent.getKind()) { return; } } if (NodeKind.TRY == parent.getKind()) { BLangTryCatchFinally tryCatchFinally = (BLangTryCatchFinally) parent; if (tryCatchFinally.finallyBody != null && current != tryCatchFinally.finallyBody) { genNode(tryCatchFinally.finallyBody, env); } } else if (NodeKind.LOCK == parent.getKind()) { BLangLock lockNode = (BLangLock) parent; if (!lockNode.lockVariables.isEmpty()) { Operand[] operands = getOperands(lockNode); emit((InstructionCodes.UNLOCK), operands); } } current = parent; } } private RegIndex getNamespaceURIIndex(BXMLNSSymbol namespaceSymbol, SymbolEnv env) { if (namespaceSymbol == null && env.node.getKind() == NodeKind.XML_ATTRIBUTE) { return createStringLiteral(XMLConstants.NULL_NS_URI, null, env); } if (namespaceSymbol == null) { return createStringLiteral(null, null, env); } if ((namespaceSymbol.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (namespaceSymbol.owner.tag & SymTag.SERVICE) == SymTag.SERVICE) { return (RegIndex) namespaceSymbol.nsURIIndex; } int pkgIndex = addPackageRefCPEntry(this.currentPkgInfo, namespaceSymbol.owner.pkgID); RegIndex index = getRegIndex(TypeTags.STRING); emit(InstructionCodes.SGLOAD, getOperand(pkgIndex), namespaceSymbol.nsURIIndex, index); return index; } private void generateURILookupInstructions(Map<Name, BXMLNSSymbol> namespaces, RegIndex localNameRegIndex, RegIndex uriRegIndex, RegIndex targetQNameRegIndex, DiagnosticPos pos, SymbolEnv symbolEnv) { if (namespaces.isEmpty()) { createQNameWithoutPrefix(localNameRegIndex, uriRegIndex, targetQNameRegIndex); return; } Stack<Operand> endJumpInstrStack = new Stack<>(); String prefix; for (Entry<Name, BXMLNSSymbol> keyValues : namespaces.entrySet()) { prefix = keyValues.getKey().getValue(); if (prefix.equals(XMLConstants.DEFAULT_NS_PREFIX)) { continue; } BXMLNSSymbol nsSymbol = keyValues.getValue(); int opcode = getOpcode(TypeTags.STRING, InstructionCodes.IEQ); RegIndex conditionExprIndex = getRegIndex(TypeTags.BOOLEAN); emit(opcode, uriRegIndex, getNamespaceURIIndex(nsSymbol, symbolEnv), conditionExprIndex); Operand ifCondJumpAddr = getOperand(-1); emit(InstructionCodes.BR_FALSE, conditionExprIndex, ifCondJumpAddr); RegIndex prefixIndex = createStringLiteral(prefix, null, env); emit(InstructionCodes.NEWQNAME, localNameRegIndex, uriRegIndex, prefixIndex, targetQNameRegIndex); Operand endJumpAddr = getOperand(-1); emit(InstructionCodes.GOTO, endJumpAddr); endJumpInstrStack.add(endJumpAddr); ifCondJumpAddr.value = nextIP(); } createQNameWithoutPrefix(localNameRegIndex, uriRegIndex, targetQNameRegIndex); while (!endJumpInstrStack.isEmpty()) { endJumpInstrStack.pop().value = nextIP(); } } private void createQNameWithoutPrefix(RegIndex localNameRegIndex, RegIndex uriRegIndex, RegIndex targetQNameRegIndex) { RegIndex prefixIndex = createStringLiteral(null, null, env); emit(InstructionCodes.NEWQNAME, localNameRegIndex, uriRegIndex, prefixIndex, targetQNameRegIndex); } /** * Creates a string literal expression, generate the code and returns the registry index. * * @param value String value to generate the string literal * @param regIndex String literal expression's reg index * @param env Environment * @return String registry index of the generated string */ private RegIndex createStringLiteral(String value, RegIndex regIndex, SymbolEnv env) { BLangLiteral prefixLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); prefixLiteral.value = value; prefixLiteral.typeTag = TypeTags.STRING; prefixLiteral.type = symTable.stringType; prefixLiteral.regIndex = regIndex; genNode(prefixLiteral, env); return prefixLiteral.regIndex; } /** * Visit XML tag name and return the index of the tag name in the reference registry. * * @param tagName Tag name expression * @param xmlElementEnv Environment of the XML element of the tag * @param xmlElementLiteral XML element literal to which the tag name belongs to * @return Index of the tag name, in the reference registry */ private RegIndex visitXMLTagName(BLangExpression tagName, SymbolEnv xmlElementEnv, BLangXMLElementLiteral xmlElementLiteral) { genNode(tagName, xmlElementEnv); RegIndex startTagNameRegIndex = tagName.regIndex; if (tagName.getKind() != NodeKind.XML_QNAME) { RegIndex localNameRegIndex = getRegIndex(TypeTags.STRING); RegIndex uriRegIndex = getRegIndex(TypeTags.STRING); emit(InstructionCodes.S2QNAME, startTagNameRegIndex, localNameRegIndex, uriRegIndex); startTagNameRegIndex = getRegIndex(TypeTags.XML); generateURILookupInstructions(xmlElementLiteral.namespacesInScope, localNameRegIndex, uriRegIndex, startTagNameRegIndex, xmlElementLiteral.pos, xmlElementEnv); tagName.regIndex = startTagNameRegIndex; } return startTagNameRegIndex; } /** * Get the constant pool entry index of a given type. * * @param type Type to get the constant pool entry index * @return constant pool entry index of the type */ private Operand getTypeCPIndex(BType type) { int typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, type.getDesc()); TypeRefCPEntry typeRefCPEntry = new TypeRefCPEntry(typeSigCPIndex); return getOperand(currentPkgInfo.addCPEntry(typeRefCPEntry)); } private void addDocumentAttachmentAttrInfo(List<BLangDocumentation> docNodeList, AttributeInfoPool attrInfoPool) { docNodeList.forEach(docNode -> addDocumentAttachmentAttrInfo(docNode, attrInfoPool)); } private void addDocumentAttachmentAttrInfo(BLangDocumentation docNode, AttributeInfoPool attrInfoPool) { int docAttrIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.DOCUMENT_ATTACHMENT_ATTRIBUTE.value()); int descCPIndex = addUTF8CPEntry(currentPkgInfo, docNode.documentationText); DocumentationAttributeInfo docAttributeInfo = new DocumentationAttributeInfo(docAttrIndex, descCPIndex); for (BLangDocumentationAttribute paramDocNode : docNode.attributes) { int nameCPIndex = addUTF8CPEntry(currentPkgInfo, paramDocNode.documentationField.value); int typeSigCPIndex = addUTF8CPEntry(currentPkgInfo, paramDocNode.type.getDesc()); int paramKindCPIndex = addUTF8CPEntry(currentPkgInfo, paramDocNode.docTag.getValue()); int descriptionCPIndex = addUTF8CPEntry(currentPkgInfo, paramDocNode.documentationText); ParameterDocumentInfo paramDocInfo = new ParameterDocumentInfo( nameCPIndex, typeSigCPIndex, paramKindCPIndex, descriptionCPIndex); docAttributeInfo.paramDocInfoList.add(paramDocInfo); } attrInfoPool.addAttributeInfo(AttributeInfo.Kind.DOCUMENT_ATTACHMENT_ATTRIBUTE, docAttributeInfo); } private void addParameterAttributeInfo(BLangInvokableNode invokableNode, CallableUnitInfo callableUnitInfo) { int paramAttrIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.PARAMETERS_ATTRIBUTE.value()); ParameterAttributeInfo paramAttrInfo = new ParameterAttributeInfo(paramAttrIndex); paramAttrInfo.requiredParamsCount = invokableNode.requiredParams.size(); paramAttrInfo.defaultableParamsCount = invokableNode.defaultableParams.size(); paramAttrInfo.restParamCount = invokableNode.restParam != null ? 1 : 0; callableUnitInfo.addAttributeInfo(AttributeInfo.Kind.PARAMETERS_ATTRIBUTE, paramAttrInfo); addParameterDefaultValues(invokableNode, callableUnitInfo); } private void addParameterDefaultValues(BLangInvokableNode invokableNode, CallableUnitInfo callableUnitInfo) { int paramDefaultsAttrNameIndex = addUTF8CPEntry(currentPkgInfo, AttributeInfo.Kind.PARAMETER_DEFAULTS_ATTRIBUTE.value()); ParamDefaultValueAttributeInfo paramDefaulValAttrInfo = new ParamDefaultValueAttributeInfo(paramDefaultsAttrNameIndex); for (BLangVariableDef param : invokableNode.defaultableParams) { DefaultValue defaultVal = getDefaultValue((BLangLiteral) param.var.expr); paramDefaulValAttrInfo.addParamDefaultValueInfo(defaultVal); } callableUnitInfo.addAttributeInfo(AttributeInfo.Kind.PARAMETER_DEFAULTS_ATTRIBUTE, paramDefaulValAttrInfo); } private int getValueToRefTypeCastOpcode(int typeTag) { int opcode; switch (typeTag) { case TypeTags.INT: opcode = InstructionCodes.I2ANY; break; case TypeTags.FLOAT: opcode = InstructionCodes.F2ANY; break; case TypeTags.STRING: opcode = InstructionCodes.S2ANY; break; case TypeTags.BOOLEAN: opcode = InstructionCodes.B2ANY; break; case TypeTags.BLOB: opcode = InstructionCodes.L2ANY; break; default: opcode = InstructionCodes.NOP; break; } return opcode; } private int getRefToValueTypeCastOpcode(int typeTag) { int opcode; switch (typeTag) { case TypeTags.INT: opcode = InstructionCodes.ANY2I; break; case TypeTags.FLOAT: opcode = InstructionCodes.ANY2F; break; case TypeTags.STRING: opcode = InstructionCodes.ANY2S; break; case TypeTags.BOOLEAN: opcode = InstructionCodes.ANY2B; break; case TypeTags.BLOB: opcode = InstructionCodes.ANY2L; break; default: opcode = InstructionCodes.NOP; break; } return opcode; } private void addPackageInfo(BPackageSymbol packageSymbol, ProgramFile programFile) { BLangPackage pkgNode = this.packageCache.get(packageSymbol.pkgID); if (pkgNode == null) { packageSymbol.imports.forEach(importPkdSymbol -> addPackageInfo(importPkdSymbol, programFile)); if (!programFile.packageFileMap.containsKey(packageSymbol.pkgID.bvmAlias())) { programFile.packageFileMap.put(packageSymbol.pkgID.bvmAlias(), packageSymbol.packageFile); } return; } pkgNode.imports.forEach(importPkdNode -> addPackageInfo(importPkdNode.symbol, programFile)); if (!programFile.packageFileMap.containsKey(packageSymbol.pkgID.bvmAlias())) { programFile.packageFileMap.put(packageSymbol.pkgID.bvmAlias(), packageSymbol.packageFile); } } private byte[] getPackageBinaryContent(BLangPackage pkgNode) { try { return PackageInfoWriter.getPackageBinary(this.currentPkgInfo); } catch (IOException e) { throw new BLangCompilerException("failed to generate bytecode for package '" + pkgNode.packageID + "': " + e.getMessage(), e); } } }
If we are handling only the `versionFlag` and `helpFlag`, then we can get rid of this outer `if` block.
public void execute() { if (versionFlag) { printVersionInfo(); return; } if (helpFlag) { if (!argList.isEmpty()) { printUsageInfo(argList.get(0)); return; } } printUsageInfo(BallerinaCliCommands.HELP); }
if (helpFlag) {
public void execute() { if (helpCommands == null) { printUsageInfo(BallerinaCliCommands.HELP); return; } else if (helpCommands.size() > 1) { throw LauncherUtils.createUsageExceptionWithHelp("too many arguments given"); } String userCommand = helpCommands.get(0); if (parentCmdParser.getSubcommands().get(userCommand) == null) { throw LauncherUtils.createUsageExceptionWithHelp("unknown help topic `" + userCommand + "`"); } String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(userCommand); errStream.println(commandUsageInfo); }
class HelpCmd implements BLauncherCmd { @CommandLine.Parameters(description = "Command name") private List<String> helpCommands; private CommandLine parentCmdParser; @Override public String getName() { return BallerinaCliCommands.HELP; } @Override public void printLongDesc(StringBuilder out) { } @Override public void printUsage(StringBuilder out) { } @Override public void setParentCmdParser(CommandLine parentCmdParser) { this.parentCmdParser = parentCmdParser; } }
class HelpCmd implements BLauncherCmd { @CommandLine.Parameters(description = "Command name") private List<String> helpCommands; private CommandLine parentCmdParser; @Override public String getName() { return BallerinaCliCommands.HELP; } @Override public void printLongDesc(StringBuilder out) { } @Override public void printUsage(StringBuilder out) { } @Override public void setParentCmdParser(CommandLine parentCmdParser) { this.parentCmdParser = parentCmdParser; } }
@pubudu91 yes, In that case, the `symbolEnvScopeOwner` node shall be passed down to the `addToCompiledSymbols()` method instead of the `isChildOfEnclosingNode` flag.
public List<Symbol> visibleSymbols(Document sourceFile, LinePosition position, DiagnosticState... states) { BLangCompilationUnit compilationUnit = getCompilationUnit(sourceFile); BPackageSymbol moduleSymbol = getModuleSymbol(compilationUnit); SymbolTable symbolTable = SymbolTable.getInstance(this.compilerContext); SymbolEnv pkgEnv = symbolTable.pkgEnvMap.get(moduleSymbol); EnvironmentResolver envResolver = new EnvironmentResolver(pkgEnv); SymbolResolver symbolResolver = SymbolResolver.getInstance(this.compilerContext); SymbolEnv symbolEnv = envResolver.lookUp(compilationUnit, position); Map<Name, List<Scope.ScopeEntry>> scopeSymbols = symbolResolver.getAllVisibleInScopeSymbols(symbolEnv); Location cursorPos = new BLangDiagnosticLocation(compilationUnit.name, position.line(), position.line(), position.offset(), position.offset()); Set<DiagnosticState> statesSet = new HashSet<>(Arrays.asList(states)); Set<Symbol> compiledSymbols = new HashSet<>(); for (Map.Entry<Name, List<Scope.ScopeEntry>> entry : scopeSymbols.entrySet()) { Name name = entry.getKey(); List<Scope.ScopeEntry> scopeEntries = entry.getValue(); for (Scope.ScopeEntry scopeEntry : scopeEntries) { BSymbol symbolEnvScopeOwner = symbolEnv.scope.owner; BSymbol scopeEntryOwner = scopeEntry.symbol.owner; boolean isChildOfEnclosingNode = symbolEnvScopeOwner.getName().equals(scopeEntryOwner.getName()) && symbolEnvScopeOwner.pkgID.equals(scopeEntryOwner.pkgID) && symbolEnvScopeOwner.getPosition().equals(scopeEntryOwner.getPosition()); addToCompiledSymbols(compiledSymbols, scopeEntry, cursorPos, name, isChildOfEnclosingNode, statesSet); } } return new ArrayList<>(compiledSymbols); }
&& symbolEnvScopeOwner.getPosition().equals(scopeEntryOwner.getPosition());
public List<Symbol> visibleSymbols(Document sourceFile, LinePosition position, DiagnosticState... states) { BLangCompilationUnit compilationUnit = getCompilationUnit(sourceFile); BPackageSymbol moduleSymbol = getModuleSymbol(compilationUnit); SymbolTable symbolTable = SymbolTable.getInstance(this.compilerContext); SymbolEnv pkgEnv = symbolTable.pkgEnvMap.get(moduleSymbol); EnvironmentResolver envResolver = new EnvironmentResolver(pkgEnv); SymbolResolver symbolResolver = SymbolResolver.getInstance(this.compilerContext); SymbolEnv symbolEnv = envResolver.lookUp(compilationUnit, position); Map<Name, List<Scope.ScopeEntry>> scopeSymbols = symbolResolver.getAllVisibleInScopeSymbols(symbolEnv); Location cursorPos = new BLangDiagnosticLocation(compilationUnit.name, position.line(), position.line(), position.offset(), position.offset()); Set<DiagnosticState> statesSet = new HashSet<>(Arrays.asList(states)); Set<Symbol> compiledSymbols = new HashSet<>(); for (Map.Entry<Name, List<Scope.ScopeEntry>> entry : scopeSymbols.entrySet()) { Name name = entry.getKey(); List<Scope.ScopeEntry> scopeEntries = entry.getValue(); for (Scope.ScopeEntry scopeEntry : scopeEntries) { addToCompiledSymbols(compiledSymbols, scopeEntry, cursorPos, name, symbolEnv.scope.owner, statesSet); } } return new ArrayList<>(compiledSymbols); }
class BallerinaSemanticModel implements SemanticModel { private final BLangPackage bLangPackage; private final CompilerContext compilerContext; private final SymbolFactory symbolFactory; private final TypesFactory typesFactory; private final SymbolTable symbolTable; public BallerinaSemanticModel(BLangPackage bLangPackage, CompilerContext context) { this.compilerContext = context; this.bLangPackage = bLangPackage; this.symbolFactory = SymbolFactory.getInstance(context); this.typesFactory = TypesFactory.getInstance(context); this.symbolTable = SymbolTable.getInstance(context); } /** * {@inheritDoc} */ @Override public List<Symbol> visibleSymbols(Document srcFile, LinePosition linePosition) { return visibleSymbols(srcFile, linePosition, DiagnosticState.VALID, DiagnosticState.UNKNOWN_TYPE); } @Override /** * {@inheritDoc} */ @Override public Optional<Symbol> symbol(Document sourceDocument, LinePosition position) { BLangCompilationUnit compilationUnit = getCompilationUnit(sourceDocument); return lookupSymbol(compilationUnit, position); } @Override public Optional<Symbol> symbol(Node node) { Optional<Location> nodeIdentifierLocation = node.apply(new SyntaxNodeToLocationMapper()); if (nodeIdentifierLocation.isEmpty()) { return Optional.empty(); } BLangCompilationUnit compilationUnit = getCompilationUnit(nodeIdentifierLocation.get().lineRange().filePath()); return lookupSymbol(compilationUnit, nodeIdentifierLocation.get().lineRange().startLine()); } /** * {@inheritDoc} */ @Override public List<Symbol> moduleSymbols() { List<Symbol> compiledSymbols = new ArrayList<>(); for (Map.Entry<Name, Scope.ScopeEntry> e : bLangPackage.symbol.scope.entries.entrySet()) { Scope.ScopeEntry value = e.getValue(); BSymbol symbol = value.symbol; if (symbol.origin == SOURCE) { compiledSymbols.add(symbolFactory.getBCompiledSymbol(symbol, symbol.getOriginalName().getValue())); } } return compiledSymbols; } /** * {@inheritDoc} */ @Override public List<Location> references(Symbol symbol) { return references(symbol, true); } /** * {@inheritDoc} */ @Override public List<Location> references(Document sourceDocument, LinePosition position) { BSymbol symbolAtCursor = findSymbolAtCursorPosition(sourceDocument, position); if (symbolAtCursor == null) { return Collections.emptyList(); } Location symbolLocation = symbolAtCursor.getPosition(); BLangNode node = new NodeFinder(false).lookupEnclosingContainer(this.bLangPackage, symbolLocation.lineRange()); return getReferences(symbolAtCursor, node, true); } @Override public List<Location> references(Symbol symbol, boolean withDefinition) { BSymbol symbolAtCursor = getInternalSymbol(symbol); Optional<Location> symbolLocation = symbol.getLocation(); if (symbolLocation.isEmpty()) { return Collections.emptyList(); } BLangNode node = new NodeFinder(false) .lookupEnclosingContainer(this.bLangPackage, symbolLocation.get().lineRange()); return getReferences(symbolAtCursor, node, withDefinition); } @Override public List<Location> references(Document sourceDocument, LinePosition position, boolean withDefinition) { BSymbol symbolAtCursor = findSymbolAtCursorPosition(sourceDocument, position); if (symbolAtCursor == null) { return Collections.emptyList(); } Location symbolLocation = symbolAtCursor.getPosition(); BLangNode node = new NodeFinder(false) .lookupEnclosingContainer(this.bLangPackage, symbolLocation.lineRange()); return getReferences(symbolAtCursor, node, withDefinition); } @Override public List<Location> references(Symbol symbol, Document targetDocument, boolean withDefinition) { BSymbol symbolAtCursor = getInternalSymbol(symbol); Optional<Location> symbolLocation = symbol.getLocation(); if (symbolLocation.isEmpty()) { return Collections.emptyList(); } BLangNode node = new NodeFinder(false) .lookupEnclosingContainer(getCompilationUnit(targetDocument), symbolLocation.get().lineRange()); return getReferences(symbolAtCursor, node, withDefinition); } @Override public List<Location> references(Document sourceDocument, Document targetDocument, LinePosition position, boolean withDefinition) { BSymbol symbolAtCursor = findSymbolAtCursorPosition(sourceDocument, position); if (symbolAtCursor == null) { return Collections.emptyList(); } Location symbolLocation = symbolAtCursor.getPosition(); BLangNode node = new NodeFinder(false) .lookupEnclosingContainer(getCompilationUnit(targetDocument), symbolLocation.lineRange()); return getReferences(symbolAtCursor, node, withDefinition); } private BSymbol findSymbolAtCursorPosition(Document sourceDocument, LinePosition linePosition) { BLangCompilationUnit sourceCompilationUnit = getCompilationUnit(sourceDocument); SymbolFinder symbolFinder = new SymbolFinder(); return symbolFinder.lookup(sourceCompilationUnit, linePosition); } private List<Location> getReferences(BSymbol symbol, BLangNode node, boolean withDefinition) { ReferenceFinder refFinder = new ReferenceFinder(withDefinition); return refFinder.findReferences(node, symbol); } /** * {@inheritDoc} */ @Override public Optional<TypeSymbol> type(LineRange range) { BLangCompilationUnit compilationUnit = getCompilationUnit(range.filePath()); NodeFinder nodeFinder = new NodeFinder(true); BLangNode node = nodeFinder.lookup(compilationUnit, range); if (node == null) { return Optional.empty(); } return Optional.ofNullable(typesFactory.getTypeDescriptor(node.getBType())); } @Override public Optional<TypeSymbol> typeOf(LineRange range) { BLangCompilationUnit compilationUnit = getCompilationUnit(range.filePath()); NodeFinder nodeFinder = new NodeFinder(false); BLangNode node = nodeFinder.lookup(compilationUnit, range); if (!(node instanceof BLangExpression) && !isObjectConstructorExpr(node) && !isAnonFunctionExpr(node)) { return Optional.empty(); } return Optional.ofNullable(typesFactory.getTypeDescriptor(node.getDeterminedType())); } /** * {@inheritDoc} */ @Override public Optional<TypeSymbol> type(Node node) { Optional<Location> nodeIdentifierLocation = node.apply(new SyntaxNodeToLocationMapper()); if (nodeIdentifierLocation.isEmpty()) { return Optional.empty(); } return type(node.location().lineRange()); } @Override public Optional<TypeSymbol> typeOf(Node node) { Optional<Location> nodeIdentifierLocation = node.apply(new SyntaxNodeToLocationMapper()); if (nodeIdentifierLocation.isEmpty()) { return Optional.empty(); } return typeOf(node.location().lineRange()); } /** * {@inheritDoc} */ @Override public List<Diagnostic> diagnostics(LineRange range) { List<Diagnostic> allDiagnostics = this.bLangPackage.getDiagnostics(); List<Diagnostic> filteredDiagnostics = new ArrayList<>(); for (Diagnostic diagnostic : allDiagnostics) { LineRange lineRange = diagnostic.location().lineRange(); if (lineRange.filePath().equals(range.filePath()) && withinRange(lineRange, range)) { filteredDiagnostics.add(diagnostic); } } return filteredDiagnostics; } /** * {@inheritDoc} */ @Override public List<Diagnostic> diagnostics() { return this.bLangPackage.getDiagnostics(); } private Optional<Symbol> lookupSymbol(BLangCompilationUnit compilationUnit, LinePosition position) { SymbolFinder symbolFinder = new SymbolFinder(); BSymbol symbolAtCursor = symbolFinder.lookup(compilationUnit, position); if (symbolAtCursor == null || symbolAtCursor == symbolTable.notFoundSymbol) { return Optional.empty(); } if (isTypeSymbol(symbolAtCursor) && ((isInlineSingletonType((BTypeSymbol) symbolAtCursor)) || isCursorPosAtDefinition(compilationUnit, symbolAtCursor, position))) { return Optional.ofNullable( typesFactory.getTypeDescriptor(symbolAtCursor.type, (BTypeSymbol) symbolAtCursor)); } return Optional.ofNullable(symbolFactory.getBCompiledSymbol(symbolAtCursor, symbolAtCursor.getOriginalName().getValue())); } private boolean hasCursorPosPassedSymbolPos(BSymbol symbol, Location cursorPos) { if (symbol.origin != SOURCE) { return false; } if (symbol.owner.getKind() == SymbolKind.PACKAGE || Symbols.isFlagOn(symbol.flags, Flags.WORKER)) { return true; } if (!bLangPackage.packageID.equals(symbol.pkgID)) { return false; } LinePosition cursorPosStartLine = cursorPos.lineRange().startLine(); LinePosition symbolStartLine = symbol.pos.lineRange().startLine(); if (cursorPosStartLine.line() < symbolStartLine.line()) { return false; } if (cursorPosStartLine.line() > symbolStartLine.line()) { return true; } return cursorPosStartLine.offset() > symbolStartLine.offset(); } private boolean isImportedSymbol(BSymbol symbol) { return symbol.origin == COMPILED_SOURCE && (Symbols.isFlagOn(symbol.flags, Flags.PUBLIC) || symbol.getKind() == SymbolKind.PACKAGE); } private BLangCompilationUnit getCompilationUnit(Document srcFile) { return getCompilationUnit(srcFile.name()); } private BLangCompilationUnit getCompilationUnit(String srcFile) { List<BLangCompilationUnit> testSrcs = new ArrayList<>(); for (BLangTestablePackage pkg : bLangPackage.testablePkgs) { testSrcs.addAll(pkg.compUnits); } Stream<BLangCompilationUnit> units = Stream.concat(bLangPackage.compUnits.stream(), testSrcs.stream()); return units .filter(unit -> unit.name.equals(srcFile)) .findFirst() .get(); } private boolean isCursorPosAtDefinition(BLangCompilationUnit compilationUnit, BSymbol symbolAtCursor, LinePosition cursorPos) { return !(compilationUnit.getPackageID().equals(symbolAtCursor.pkgID) && compilationUnit.getName().equals(symbolAtCursor.pos.lineRange().filePath()) && PositionUtil.withinBlock(cursorPos, symbolAtCursor.pos)); } private boolean isInlineSingletonType(BTypeSymbol symbol) { return symbol.type.tag == TypeTags.FINITE && !symbol.isLabel && ((BFiniteType) symbol.type).getValueSpace().size() == 1; } private boolean isTypeSymbol(BSymbol symbol) { return symbol instanceof BTypeSymbol && !Symbols.isTagOn(symbol, PACKAGE) && !Symbols.isTagOn(symbol, ANNOTATION); } private BSymbol getInternalSymbol(Symbol symbol) { if (symbol.kind() == TYPE) { return ((AbstractTypeSymbol) symbol).getBType().tsymbol; } return ((BallerinaSymbol) symbol).getInternalSymbol(); } private BPackageSymbol getModuleSymbol(BLangCompilationUnit compilationUnit) { return compilationUnit.getSourceKind() == REGULAR_SOURCE ? bLangPackage.symbol : bLangPackage.getTestablePkg().symbol; } private boolean withinRange(LineRange range, LineRange specifiedRange) { int startLine = range.startLine().line(); int startOffset = range.startLine().offset(); int specifiedStartLine = specifiedRange.startLine().line(); int specifiedEndLine = specifiedRange.endLine().line(); int specifiedStartOffset = specifiedRange.startLine().offset(); int specifiedEndOffset = specifiedRange.endLine().offset(); return startLine >= specifiedStartLine && startLine <= specifiedEndLine && startOffset >= specifiedStartOffset && startOffset <= specifiedEndOffset; } private void addToCompiledSymbols(Set<Symbol> compiledSymbols, Scope.ScopeEntry scopeEntry, Location cursorPos, Name name, boolean isChildOfEnclosingNode, Set<DiagnosticState> states) { if (scopeEntry == null || scopeEntry.symbol == null || isFilteredVarSymbol(scopeEntry.symbol, states)) { return; } BSymbol symbol = scopeEntry.symbol; if ((hasCursorPosPassedSymbolPos(symbol, cursorPos) || isImportedSymbol(symbol)) && !isServiceDeclSymbol(symbol)) { Symbol compiledSymbol; if (symbol.getKind() == SymbolKind.PACKAGE) { compiledSymbol = symbolFactory.getBCompiledSymbol(symbol, name.getValue()); } else { compiledSymbol = symbolFactory.getBCompiledSymbol(symbol, symbol.getOriginalName().getValue()); } if (compiledSymbol == null || compiledSymbols.contains(compiledSymbol) || (isFieldSymbol(compiledSymbol) && isChildOfEnclosingNode)) { return; } compiledSymbols.add(compiledSymbol); } addToCompiledSymbols(compiledSymbols, scopeEntry.next, cursorPos, name, isChildOfEnclosingNode, states); } private boolean isFieldSymbol(Symbol symbol) { return symbol.kind() == CLASS_FIELD || symbol.kind() == OBJECT_FIELD || symbol.kind() == RECORD_FIELD; } private boolean isServiceDeclSymbol(BSymbol symbol) { return symbol.kind == SymbolKind.SERVICE; } private boolean isFilteredVarSymbol(BSymbol symbol, Set<DiagnosticState> states) { return symbol instanceof BVarSymbol && !states.contains(((BVarSymbol) symbol).state); } private boolean isObjectConstructorExpr(BLangNode node) { return node instanceof BLangClassDefinition && ((BLangClassDefinition) node).flagSet.contains(Flag.OBJECT_CTOR); } private boolean isAnonFunctionExpr(BLangNode node) { return (node instanceof BLangFunction && ((BLangFunction) node).flagSet.contains(Flag.LAMBDA)) || node instanceof BLangArrowFunction; } }
class BallerinaSemanticModel implements SemanticModel { private final BLangPackage bLangPackage; private final CompilerContext compilerContext; private final SymbolFactory symbolFactory; private final TypesFactory typesFactory; private final SymbolTable symbolTable; public BallerinaSemanticModel(BLangPackage bLangPackage, CompilerContext context) { this.compilerContext = context; this.bLangPackage = bLangPackage; this.symbolFactory = SymbolFactory.getInstance(context); this.typesFactory = TypesFactory.getInstance(context); this.symbolTable = SymbolTable.getInstance(context); } /** * {@inheritDoc} */ @Override public List<Symbol> visibleSymbols(Document srcFile, LinePosition linePosition) { return visibleSymbols(srcFile, linePosition, DiagnosticState.VALID, DiagnosticState.UNKNOWN_TYPE); } @Override /** * {@inheritDoc} */ @Override public Optional<Symbol> symbol(Document sourceDocument, LinePosition position) { BLangCompilationUnit compilationUnit = getCompilationUnit(sourceDocument); return lookupSymbol(compilationUnit, position); } @Override public Optional<Symbol> symbol(Node node) { Optional<Location> nodeIdentifierLocation = node.apply(new SyntaxNodeToLocationMapper()); if (nodeIdentifierLocation.isEmpty()) { return Optional.empty(); } BLangCompilationUnit compilationUnit = getCompilationUnit(nodeIdentifierLocation.get().lineRange().filePath()); return lookupSymbol(compilationUnit, nodeIdentifierLocation.get().lineRange().startLine()); } /** * {@inheritDoc} */ @Override public List<Symbol> moduleSymbols() { List<Symbol> compiledSymbols = new ArrayList<>(); for (Map.Entry<Name, Scope.ScopeEntry> e : bLangPackage.symbol.scope.entries.entrySet()) { Scope.ScopeEntry value = e.getValue(); BSymbol symbol = value.symbol; if (symbol.origin == SOURCE) { compiledSymbols.add(symbolFactory.getBCompiledSymbol(symbol, symbol.getOriginalName().getValue())); } } return compiledSymbols; } /** * {@inheritDoc} */ @Override public List<Location> references(Symbol symbol) { return references(symbol, true); } /** * {@inheritDoc} */ @Override public List<Location> references(Document sourceDocument, LinePosition position) { BSymbol symbolAtCursor = findSymbolAtCursorPosition(sourceDocument, position); if (symbolAtCursor == null) { return Collections.emptyList(); } Location symbolLocation = symbolAtCursor.getPosition(); BLangNode node = new NodeFinder(false).lookupEnclosingContainer(this.bLangPackage, symbolLocation.lineRange()); return getReferences(symbolAtCursor, node, true); } @Override public List<Location> references(Symbol symbol, boolean withDefinition) { BSymbol symbolAtCursor = getInternalSymbol(symbol); Optional<Location> symbolLocation = symbol.getLocation(); if (symbolLocation.isEmpty()) { return Collections.emptyList(); } BLangNode node = new NodeFinder(false) .lookupEnclosingContainer(this.bLangPackage, symbolLocation.get().lineRange()); return getReferences(symbolAtCursor, node, withDefinition); } @Override public List<Location> references(Document sourceDocument, LinePosition position, boolean withDefinition) { BSymbol symbolAtCursor = findSymbolAtCursorPosition(sourceDocument, position); if (symbolAtCursor == null) { return Collections.emptyList(); } Location symbolLocation = symbolAtCursor.getPosition(); BLangNode node = new NodeFinder(false) .lookupEnclosingContainer(this.bLangPackage, symbolLocation.lineRange()); return getReferences(symbolAtCursor, node, withDefinition); } @Override public List<Location> references(Symbol symbol, Document targetDocument, boolean withDefinition) { BSymbol symbolAtCursor = getInternalSymbol(symbol); Optional<Location> symbolLocation = symbol.getLocation(); if (symbolLocation.isEmpty()) { return Collections.emptyList(); } BLangNode node = new NodeFinder(false) .lookupEnclosingContainer(getCompilationUnit(targetDocument), symbolLocation.get().lineRange()); return getReferences(symbolAtCursor, node, withDefinition); } @Override public List<Location> references(Document sourceDocument, Document targetDocument, LinePosition position, boolean withDefinition) { BSymbol symbolAtCursor = findSymbolAtCursorPosition(sourceDocument, position); if (symbolAtCursor == null) { return Collections.emptyList(); } Location symbolLocation = symbolAtCursor.getPosition(); BLangNode node = new NodeFinder(false) .lookupEnclosingContainer(getCompilationUnit(targetDocument), symbolLocation.lineRange()); return getReferences(symbolAtCursor, node, withDefinition); } private BSymbol findSymbolAtCursorPosition(Document sourceDocument, LinePosition linePosition) { BLangCompilationUnit sourceCompilationUnit = getCompilationUnit(sourceDocument); SymbolFinder symbolFinder = new SymbolFinder(); return symbolFinder.lookup(sourceCompilationUnit, linePosition); } private List<Location> getReferences(BSymbol symbol, BLangNode node, boolean withDefinition) { ReferenceFinder refFinder = new ReferenceFinder(withDefinition); return refFinder.findReferences(node, symbol); } /** * {@inheritDoc} */ @Override public Optional<TypeSymbol> type(LineRange range) { BLangCompilationUnit compilationUnit = getCompilationUnit(range.filePath()); NodeFinder nodeFinder = new NodeFinder(true); BLangNode node = nodeFinder.lookup(compilationUnit, range); if (node == null) { return Optional.empty(); } return Optional.ofNullable(typesFactory.getTypeDescriptor(node.getBType())); } @Override public Optional<TypeSymbol> typeOf(LineRange range) { BLangCompilationUnit compilationUnit = getCompilationUnit(range.filePath()); NodeFinder nodeFinder = new NodeFinder(false); BLangNode node = nodeFinder.lookup(compilationUnit, range); if (!(node instanceof BLangExpression) && !isObjectConstructorExpr(node) && !isAnonFunctionExpr(node)) { return Optional.empty(); } return Optional.ofNullable(typesFactory.getTypeDescriptor(node.getDeterminedType())); } /** * {@inheritDoc} */ @Override public Optional<TypeSymbol> type(Node node) { Optional<Location> nodeIdentifierLocation = node.apply(new SyntaxNodeToLocationMapper()); if (nodeIdentifierLocation.isEmpty()) { return Optional.empty(); } return type(node.location().lineRange()); } @Override public Optional<TypeSymbol> typeOf(Node node) { Optional<Location> nodeIdentifierLocation = node.apply(new SyntaxNodeToLocationMapper()); if (nodeIdentifierLocation.isEmpty()) { return Optional.empty(); } return typeOf(node.location().lineRange()); } /** * {@inheritDoc} */ @Override public List<Diagnostic> diagnostics(LineRange range) { List<Diagnostic> allDiagnostics = this.bLangPackage.getDiagnostics(); List<Diagnostic> filteredDiagnostics = new ArrayList<>(); for (Diagnostic diagnostic : allDiagnostics) { LineRange lineRange = diagnostic.location().lineRange(); if (lineRange.filePath().equals(range.filePath()) && withinRange(lineRange, range)) { filteredDiagnostics.add(diagnostic); } } return filteredDiagnostics; } /** * {@inheritDoc} */ @Override public List<Diagnostic> diagnostics() { return this.bLangPackage.getDiagnostics(); } private Optional<Symbol> lookupSymbol(BLangCompilationUnit compilationUnit, LinePosition position) { SymbolFinder symbolFinder = new SymbolFinder(); BSymbol symbolAtCursor = symbolFinder.lookup(compilationUnit, position); if (symbolAtCursor == null || symbolAtCursor == symbolTable.notFoundSymbol) { return Optional.empty(); } if (isTypeSymbol(symbolAtCursor) && ((isInlineSingletonType((BTypeSymbol) symbolAtCursor)) || isCursorPosAtDefinition(compilationUnit, symbolAtCursor, position))) { return Optional.ofNullable( typesFactory.getTypeDescriptor(symbolAtCursor.type, (BTypeSymbol) symbolAtCursor)); } return Optional.ofNullable(symbolFactory.getBCompiledSymbol(symbolAtCursor, symbolAtCursor.getOriginalName().getValue())); } private boolean hasCursorPosPassedSymbolPos(BSymbol symbol, Location cursorPos) { if (symbol.origin != SOURCE) { return false; } if (symbol.owner.getKind() == SymbolKind.PACKAGE || Symbols.isFlagOn(symbol.flags, Flags.WORKER)) { return true; } if (!bLangPackage.packageID.equals(symbol.pkgID)) { return false; } LinePosition cursorPosStartLine = cursorPos.lineRange().startLine(); LinePosition symbolStartLine = symbol.pos.lineRange().startLine(); if (cursorPosStartLine.line() < symbolStartLine.line()) { return false; } if (cursorPosStartLine.line() > symbolStartLine.line()) { return true; } return cursorPosStartLine.offset() > symbolStartLine.offset(); } private boolean isImportedSymbol(BSymbol symbol) { return symbol.origin == COMPILED_SOURCE && (Symbols.isFlagOn(symbol.flags, Flags.PUBLIC) || symbol.getKind() == SymbolKind.PACKAGE); } private BLangCompilationUnit getCompilationUnit(Document srcFile) { return getCompilationUnit(srcFile.name()); } private BLangCompilationUnit getCompilationUnit(String srcFile) { List<BLangCompilationUnit> testSrcs = new ArrayList<>(); for (BLangTestablePackage pkg : bLangPackage.testablePkgs) { testSrcs.addAll(pkg.compUnits); } Stream<BLangCompilationUnit> units = Stream.concat(bLangPackage.compUnits.stream(), testSrcs.stream()); return units .filter(unit -> unit.name.equals(srcFile)) .findFirst() .get(); } private boolean isCursorPosAtDefinition(BLangCompilationUnit compilationUnit, BSymbol symbolAtCursor, LinePosition cursorPos) { return !(compilationUnit.getPackageID().equals(symbolAtCursor.pkgID) && compilationUnit.getName().equals(symbolAtCursor.pos.lineRange().filePath()) && PositionUtil.withinBlock(cursorPos, symbolAtCursor.pos)); } private boolean isInlineSingletonType(BTypeSymbol symbol) { return symbol.type.tag == TypeTags.FINITE && !symbol.isLabel && ((BFiniteType) symbol.type).getValueSpace().size() == 1; } private boolean isTypeSymbol(BSymbol symbol) { return symbol instanceof BTypeSymbol && !Symbols.isTagOn(symbol, PACKAGE) && !Symbols.isTagOn(symbol, ANNOTATION); } private BSymbol getInternalSymbol(Symbol symbol) { if (symbol.kind() == TYPE) { return ((AbstractTypeSymbol) symbol).getBType().tsymbol; } return ((BallerinaSymbol) symbol).getInternalSymbol(); } private BPackageSymbol getModuleSymbol(BLangCompilationUnit compilationUnit) { return compilationUnit.getSourceKind() == REGULAR_SOURCE ? bLangPackage.symbol : bLangPackage.getTestablePkg().symbol; } private boolean withinRange(LineRange range, LineRange specifiedRange) { int startLine = range.startLine().line(); int startOffset = range.startLine().offset(); int specifiedStartLine = specifiedRange.startLine().line(); int specifiedEndLine = specifiedRange.endLine().line(); int specifiedStartOffset = specifiedRange.startLine().offset(); int specifiedEndOffset = specifiedRange.endLine().offset(); return startLine >= specifiedStartLine && startLine <= specifiedEndLine && startOffset >= specifiedStartOffset && startOffset <= specifiedEndOffset; } private void addToCompiledSymbols(Set<Symbol> compiledSymbols, Scope.ScopeEntry scopeEntry, Location cursorPos, Name name, BSymbol symbolEnvScopeOwner, Set<DiagnosticState> states) { if (scopeEntry == null || scopeEntry.symbol == null || isFilteredVarSymbol(scopeEntry.symbol, states)) { return; } BSymbol symbol = scopeEntry.symbol; if ((hasCursorPosPassedSymbolPos(symbol, cursorPos) || isImportedSymbol(symbol)) && !isServiceDeclSymbol(symbol)) { Symbol compiledSymbol; if (symbol.getKind() == SymbolKind.PACKAGE) { compiledSymbol = symbolFactory.getBCompiledSymbol(symbol, name.getValue()); } else { compiledSymbol = symbolFactory.getBCompiledSymbol(symbol, symbol.getOriginalName().getValue()); } if (compiledSymbol == null || compiledSymbols.contains(compiledSymbol)) { return; } if (isFieldSymbol(compiledSymbol)) { BSymbol scopeEntryOwner = scopeEntry.symbol.owner; if (symbolEnvScopeOwner.getName().equals(scopeEntryOwner.getName()) && symbolEnvScopeOwner.pkgID.equals(scopeEntryOwner.pkgID) && symbolEnvScopeOwner.getPosition().equals(scopeEntryOwner.getPosition())) { return; } } compiledSymbols.add(compiledSymbol); } addToCompiledSymbols(compiledSymbols, scopeEntry.next, cursorPos, name, symbolEnvScopeOwner, states); } private boolean isFieldSymbol(Symbol symbol) { return symbol.kind() == CLASS_FIELD || symbol.kind() == OBJECT_FIELD || symbol.kind() == RECORD_FIELD; } private boolean isServiceDeclSymbol(BSymbol symbol) { return symbol.kind == SymbolKind.SERVICE; } private boolean isFilteredVarSymbol(BSymbol symbol, Set<DiagnosticState> states) { return symbol instanceof BVarSymbol && !states.contains(((BVarSymbol) symbol).state); } private boolean isObjectConstructorExpr(BLangNode node) { return node instanceof BLangClassDefinition && ((BLangClassDefinition) node).flagSet.contains(Flag.OBJECT_CTOR); } private boolean isAnonFunctionExpr(BLangNode node) { return (node instanceof BLangFunction && ((BLangFunction) node).flagSet.contains(Flag.LAMBDA)) || node instanceof BLangArrowFunction; } }
Instead of having a global variable, I think we should either write a separate type visitor to check if both belong to the same ordered type or even just a util method to get the minimum type representing the type of each operand and check if they represent the same basic type, which has to be ordered. Please create an issue, let's fix this separately.
public Boolean visit(BFiniteType t, BType s) { if (inOrderedType) { inOrderedType = false; return checkValueSpaceHasSameType(t, s); } return s == t; }
if (inOrderedType) {
public Boolean visit(BFiniteType t, BType s) { if (inOrderedType) { inOrderedType = false; return checkValueSpaceHasSameType(t, s); } return s == t; }
class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> { Set<TypePair> unresolvedTypes; BSameTypeVisitor(Set<TypePair> unresolvedTypes) { this.unresolvedTypes = unresolvedTypes; } @Override public Boolean visit(BType t, BType s) { if (t == s) { return true; } switch (t.tag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.BOOLEAN: return t.tag == s.tag && (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s)); case TypeTags.ANY: case TypeTags.ANYDATA: return t.tag == s.tag && hasSameReadonlyFlag(s, t) && (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s)); default: break; } return false; } @Override public Boolean visit(BBuiltInRefType t, BType s) { return t == s; } @Override public Boolean visit(BAnyType t, BType s) { return t == s; } @Override public Boolean visit(BAnydataType t, BType s) { if (t == s) { return true; } return t.tag == s.tag; } @Override public Boolean visit(BMapType t, BType s) { if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) { return false; } BMapType sType = ((BMapType) s); return isSameType(sType.constraint, t.constraint, this.unresolvedTypes); } @Override public Boolean visit(BFutureType t, BType s) { return s.tag == TypeTags.FUTURE && isSameType(t.constraint, ((BFutureType) s).constraint, this.unresolvedTypes); } @Override public Boolean visit(BXMLType t, BType s) { return visit((BBuiltInRefType) t, s); } @Override public Boolean visit(BJSONType t, BType s) { return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t); } @Override public Boolean visit(BArrayType t, BType s) { return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes); } @Override public Boolean visit(BObjectType t, BType s) { if (t == s) { return true; } if (s.tag != TypeTags.OBJECT) { return false; } return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name); } @Override public Boolean visit(BRecordType t, BType s) { if (t == s) { return true; } if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) { return false; } BRecordType source = (BRecordType) s; if (source.fields.size() != t.fields.size()) { return false; } for (BField sourceField : source.fields.values()) { if (t.fields.containsKey(sourceField.name.value)) { BField targetField = t.fields.get(sourceField.name.value); if (isSameType(sourceField.type, targetField.type, this.unresolvedTypes) && hasSameOptionalFlag(sourceField.symbol, targetField.symbol) && (!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) || Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) { continue; } } return false; } return isSameType(source.restFieldType, t.restFieldType, this.unresolvedTypes); } private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) { return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL; } private boolean hasSameReadonlyFlag(BType source, BType target) { return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY); } public Boolean visit(BTupleType t, BType s) { if (((!t.tupleTypes.isEmpty() && checkAllTupleMembersBelongNoType(t.tupleTypes)) || (t.restType != null && t.restType.tag == TypeTags.NONE)) && !(s.tag == TypeTags.ARRAY && ((BArrayType) s).state == BArrayState.OPEN)) { return true; } if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) { return false; } BTupleType source = (BTupleType) s; if (source.tupleTypes.size() != t.tupleTypes.size()) { return false; } BType sourceRestType = source.restType; BType targetRestType = t.restType; if ((sourceRestType == null || targetRestType == null) && sourceRestType != targetRestType) { return false; } for (int i = 0; i < source.tupleTypes.size(); i++) { if (t.getTupleTypes().get(i) == symTable.noType) { continue; } if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) { return false; } } if (sourceRestType == null || targetRestType == symTable.noType) { return true; } return isSameType(sourceRestType, targetRestType, this.unresolvedTypes); } @Override public Boolean visit(BStreamType t, BType s) { return s.tag == TypeTags.STREAM && isSameStreamType(s, t, this.unresolvedTypes); } @Override public Boolean visit(BTableType t, BType s) { return t == s; } @Override public Boolean visit(BInvokableType t, BType s) { return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes); } @Override public Boolean visit(BUnionType tUnionType, BType s) { if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) { if (inOrderedType) { inOrderedType = false; return isSimpleBasicType(s.tag) && checkUnionHasSameFiniteType(tUnionType.getMemberTypes(), s); } return false; } BUnionType sUnionType = (BUnionType) s; if (sUnionType.getMemberTypes().size() != tUnionType.getMemberTypes().size()) { return false; } Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes().size()); Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes().size()); sourceTypes.add(sUnionType); sourceTypes.addAll(sUnionType.getMemberTypes()); targetTypes.add(tUnionType); targetTypes.addAll(tUnionType.getMemberTypes()); boolean notSameType = sourceTypes .stream() .map(sT -> targetTypes .stream() .anyMatch(it -> isSameType(it, sT, this.unresolvedTypes))) .anyMatch(foundSameType -> !foundSameType); return !notSameType; } @Override public Boolean visit(BIntersectionType tIntersectionType, BType s) { if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) { return false; } BIntersectionType sIntersectionType = (BIntersectionType) s; if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) { return false; } Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes()); Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes()); for (BType sourceType : sourceTypes) { boolean foundSameType = false; for (BType targetType : targetTypes) { if (isSameType(sourceType, targetType, this.unresolvedTypes)) { foundSameType = true; break; } } if (!foundSameType) { return false; } } return true; } @Override public Boolean visit(BErrorType t, BType s) { if (s.tag != TypeTags.ERROR) { return false; } BErrorType source = (BErrorType) s; if (!source.typeIdSet.equals(t.typeIdSet)) { return false; } if (source.detailType == t.detailType) { return true; } return isSameType(source.detailType, t.detailType, this.unresolvedTypes); } @Override public Boolean visit(BTypedescType t, BType s) { if (s.tag != TypeTags.TYPEDESC) { return false; } BTypedescType sType = ((BTypedescType) s); return isSameType(sType.constraint, t.constraint, this.unresolvedTypes); } @Override @Override public Boolean visit(BParameterizedType t, BType s) { if (s.tag != TypeTags.PARAMETERIZED_TYPE) { return false; } BParameterizedType sType = (BParameterizedType) s; return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol); } }
class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> { Set<TypePair> unresolvedTypes; BSameTypeVisitor(Set<TypePair> unresolvedTypes) { this.unresolvedTypes = unresolvedTypes; } @Override public Boolean visit(BType t, BType s) { if (t == s) { return true; } switch (t.tag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.BOOLEAN: return t.tag == s.tag && (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s)); case TypeTags.ANY: case TypeTags.ANYDATA: return t.tag == s.tag && hasSameReadonlyFlag(s, t) && (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s)); default: break; } return false; } @Override public Boolean visit(BBuiltInRefType t, BType s) { return t == s; } @Override public Boolean visit(BAnyType t, BType s) { return t == s; } @Override public Boolean visit(BAnydataType t, BType s) { if (t == s) { return true; } return t.tag == s.tag; } @Override public Boolean visit(BMapType t, BType s) { if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) { return false; } BMapType sType = ((BMapType) s); return isSameType(sType.constraint, t.constraint, this.unresolvedTypes); } @Override public Boolean visit(BFutureType t, BType s) { return s.tag == TypeTags.FUTURE && isSameType(t.constraint, ((BFutureType) s).constraint, this.unresolvedTypes); } @Override public Boolean visit(BXMLType t, BType s) { return visit((BBuiltInRefType) t, s); } @Override public Boolean visit(BJSONType t, BType s) { return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t); } @Override public Boolean visit(BArrayType t, BType s) { return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes); } @Override public Boolean visit(BObjectType t, BType s) { if (t == s) { return true; } if (s.tag != TypeTags.OBJECT) { return false; } return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name); } @Override public Boolean visit(BRecordType t, BType s) { if (t == s) { return true; } if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) { return false; } BRecordType source = (BRecordType) s; if (source.fields.size() != t.fields.size()) { return false; } for (BField sourceField : source.fields.values()) { if (t.fields.containsKey(sourceField.name.value)) { BField targetField = t.fields.get(sourceField.name.value); if (isSameType(sourceField.type, targetField.type, this.unresolvedTypes) && hasSameOptionalFlag(sourceField.symbol, targetField.symbol) && (!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) || Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) { continue; } } return false; } return isSameType(source.restFieldType, t.restFieldType, this.unresolvedTypes); } private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) { return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL; } private boolean hasSameReadonlyFlag(BType source, BType target) { return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY); } public Boolean visit(BTupleType t, BType s) { if (((!t.tupleTypes.isEmpty() && checkAllTupleMembersBelongNoType(t.tupleTypes)) || (t.restType != null && t.restType.tag == TypeTags.NONE)) && !(s.tag == TypeTags.ARRAY && ((BArrayType) s).state == BArrayState.OPEN)) { return true; } if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) { return false; } BTupleType source = (BTupleType) s; if (source.tupleTypes.size() != t.tupleTypes.size()) { return false; } BType sourceRestType = source.restType; BType targetRestType = t.restType; if ((sourceRestType == null || targetRestType == null) && sourceRestType != targetRestType) { return false; } for (int i = 0; i < source.tupleTypes.size(); i++) { if (t.getTupleTypes().get(i) == symTable.noType) { continue; } if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) { return false; } } if (sourceRestType == null || targetRestType == symTable.noType) { return true; } return isSameType(sourceRestType, targetRestType, this.unresolvedTypes); } @Override public Boolean visit(BStreamType t, BType s) { return s.tag == TypeTags.STREAM && isSameStreamType(s, t, this.unresolvedTypes); } @Override public Boolean visit(BTableType t, BType s) { return t == s; } @Override public Boolean visit(BInvokableType t, BType s) { return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes); } @Override public Boolean visit(BUnionType tUnionType, BType s) { if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) { if (inOrderedType) { inOrderedType = false; return isSimpleBasicType(s.tag) && checkUnionHasSameFiniteType(tUnionType.getMemberTypes(), s); } return false; } BUnionType sUnionType = (BUnionType) s; if (sUnionType.getMemberTypes().size() != tUnionType.getMemberTypes().size()) { return false; } Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes().size()); Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes().size()); sourceTypes.add(sUnionType); sourceTypes.addAll(sUnionType.getMemberTypes()); targetTypes.add(tUnionType); targetTypes.addAll(tUnionType.getMemberTypes()); boolean notSameType = sourceTypes .stream() .map(sT -> targetTypes .stream() .anyMatch(it -> isSameType(it, sT, this.unresolvedTypes))) .anyMatch(foundSameType -> !foundSameType); return !notSameType; } @Override public Boolean visit(BIntersectionType tIntersectionType, BType s) { if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) { return false; } BIntersectionType sIntersectionType = (BIntersectionType) s; if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) { return false; } Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes()); Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes()); for (BType sourceType : sourceTypes) { boolean foundSameType = false; for (BType targetType : targetTypes) { if (isSameType(sourceType, targetType, this.unresolvedTypes)) { foundSameType = true; break; } } if (!foundSameType) { return false; } } return true; } @Override public Boolean visit(BErrorType t, BType s) { if (s.tag != TypeTags.ERROR) { return false; } BErrorType source = (BErrorType) s; if (!source.typeIdSet.equals(t.typeIdSet)) { return false; } if (source.detailType == t.detailType) { return true; } return isSameType(source.detailType, t.detailType, this.unresolvedTypes); } @Override public Boolean visit(BTypedescType t, BType s) { if (s.tag != TypeTags.TYPEDESC) { return false; } BTypedescType sType = ((BTypedescType) s); return isSameType(sType.constraint, t.constraint, this.unresolvedTypes); } @Override @Override public Boolean visit(BParameterizedType t, BType s) { if (s.tag != TypeTags.PARAMETERIZED_TYPE) { return false; } BParameterizedType sType = (BParameterizedType) s; return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol); } }
internal method should have a blank line
public void watch(final String key, final EventListener eventListener) { final String path = key + "/"; if (!caches.containsKey(path)) { addCacheData(key); } final PathTree cache = caches.get(path); cache.watch(new Listener() { @Override public void process(final WatchedEvent event) { if (!Strings.isNullOrEmpty(event.getPath())) { eventListener.onChange(new DataChangedEvent(getEventType(event), event.getPath(), getWithoutCache(event.getPath()))); } } private DataChangedEvent.Type getEventType(final WatchedEvent event) { switch (event.getType()) { case NodeDataChanged: case NodeChildrenChanged: return DataChangedEvent.Type.UPDATED; case NodeDeleted: return DataChangedEvent.Type.DELETED; default: return DataChangedEvent.Type.IGNORED; } } }); }
@Override
public void watch(final String key, final EventListener eventListener) { final String path = key + "/"; if (!caches.containsKey(path)) { addCacheData(key); } final PathTree cache = caches.get(path); cache.watch(new ZookeeperEventListener() { @Override public void process(final WatchedEvent event) { if (!Strings.isNullOrEmpty(event.getPath())) { eventListener.onChange(new DataChangedEvent(getEventType(event), event.getPath(), getWithoutCache(event.getPath()))); } } private DataChangedEvent.Type getEventType(final WatchedEvent event) { return extractEventType(event); } }); }
class NewZookeeperRegistryCenter implements RegistryCenter { private final IClient client; private final Map<String, PathTree> caches = new HashMap<>(); public NewZookeeperRegistryCenter(final ZookeeperConfiguration zkConfig) { ClientFactory creator = buildCreator(zkConfig); client = initClient(creator, zkConfig); } private ClientFactory buildCreator(final ZookeeperConfiguration zkConfig) { ClientFactory creator = new ClientFactory(); creator.setClientNamespace(zkConfig.getNamespace()) .newClient(zkConfig.getServerLists(), zkConfig.getSessionTimeoutMilliseconds()) .setRetryPolicy(new DelayRetryPolicy(zkConfig.getBaseSleepTimeMilliseconds(), zkConfig.getMaxRetries(), zkConfig.getMaxSleepTimeMilliseconds())); if (!Strings.isNullOrEmpty(zkConfig.getDigest())) { creator.authorization("digest", zkConfig.getDigest().getBytes(Charsets.UTF_8), ZooDefs.Ids.CREATOR_ALL_ACL); } return creator; } private IClient initClient(final ClientFactory creator, final ZookeeperConfiguration zkConfig) { IClient newClient = null; try { newClient = creator.start(); if (!newClient.blockUntilConnected(zkConfig.getMaxSleepTimeMilliseconds() * zkConfig.getMaxRetries(), TimeUnit.MILLISECONDS)) { newClient.close(); throw new KeeperException.OperationTimeoutException(); } newClient.useExecStrategy(StrategyType.SYNC_RETRY); } catch (Exception e) { RegExceptionHandler.handleException(e); } return newClient; } @Override public String get(final String key) { PathTree cache = findTreeCache(key); if (null == cache) { return getDirectly(key); } byte[] resultInCache = cache.getValue(key); if (null != resultInCache) { return null == resultInCache ? null : new String(resultInCache, Charsets.UTF_8); } return getDirectly(key); } private PathTree findTreeCache(final String key) { for (Entry<String, PathTree> entry : caches.entrySet()) { if (key.startsWith(entry.getKey())) { return entry.getValue(); } } return null; } @Override public String getDirectly(final String key) { try { return new String(client.getData(key), Charsets.UTF_8); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); return null; } } @Override public boolean isExisted(final String key) { try { return client.checkExists(key); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); return false; } } @Override public List<String> getChildrenKeys(final String key) { try { List<String> result = client.getChildren(key); Collections.sort(result, new Comparator<String>() { @Override public int compare(final String o1, final String o2) { return o2.compareTo(o1); } }); return result; } catch (final Exception ex) { RegExceptionHandler.handleException(ex); return Collections.emptyList(); } } @Override public void persist(final String key, final String value) { try { if (!isExisted(key)) { client.createAllNeedPath(key, value, CreateMode.PERSISTENT); } else { update(key, value); } } catch (final Exception ex) { RegExceptionHandler.handleException(ex); } } @Override public void update(final String key, final String value) { try { client.transaction().check(key, ZookeeperConstants.VERSION).setData(key, value.getBytes(ZookeeperConstants.UTF_8), ZookeeperConstants.VERSION).commit(); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); } } @Override public void persistEphemeral(final String key, final String value) { try { if (isExisted(key)) { client.deleteAllChildren(key); } client.createAllNeedPath(key, value, CreateMode.EPHEMERAL); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); } } @Override private synchronized String getWithoutCache(final String key) { try { client.useExecStrategy(StrategyType.USUAL); byte[] data = client.getData(key); client.useExecStrategy(StrategyType.SYNC_RETRY); return null == data ? null : new String(data, Charsets.UTF_8); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); return null; } } private void addCacheData(final String cachePath) { PathTree cache = new PathTree(cachePath, client); try { cache.load(); cache.watch(); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); } caches.put(cachePath + "/", cache); } @Override public void close() { for (Entry<String, PathTree> each : caches.entrySet()) { each.getValue().close(); } client.close(); } }
class NewZookeeperRegistryCenter implements RegistryCenter { private final IClient client; private final Map<String, PathTree> caches = new HashMap<>(); public NewZookeeperRegistryCenter(final ZookeeperConfiguration zkConfig) { ClientFactory creator = buildCreator(zkConfig); client = initClient(creator, zkConfig); } private ClientFactory buildCreator(final ZookeeperConfiguration zkConfig) { ClientFactory creator = new ClientFactory(); creator.setClientNamespace(zkConfig.getNamespace()) .newClient(zkConfig.getServerLists(), zkConfig.getSessionTimeoutMilliseconds()) .setRetryPolicy(new DelayRetryPolicy(zkConfig.getBaseSleepTimeMilliseconds(), zkConfig.getMaxRetries(), zkConfig.getMaxSleepTimeMilliseconds())); if (!Strings.isNullOrEmpty(zkConfig.getDigest())) { creator.authorization("digest", zkConfig.getDigest().getBytes(Charsets.UTF_8), ZooDefs.Ids.CREATOR_ALL_ACL); } return creator; } private IClient initClient(final ClientFactory creator, final ZookeeperConfiguration zkConfig) { IClient newClient = null; try { newClient = creator.start(); if (!newClient.blockUntilConnected(zkConfig.getMaxSleepTimeMilliseconds() * zkConfig.getMaxRetries(), TimeUnit.MILLISECONDS)) { newClient.close(); throw new KeeperException.OperationTimeoutException(); } newClient.useExecStrategy(StrategyType.SYNC_RETRY); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); } return newClient; } @Override public String get(final String key) { PathTree cache = findTreeCache(key); if (null == cache) { return getDirectly(key); } byte[] resultInCache = cache.getValue(key); if (null != resultInCache) { return null == resultInCache ? null : new String(resultInCache, Charsets.UTF_8); } return getDirectly(key); } private PathTree findTreeCache(final String key) { for (Entry<String, PathTree> entry : caches.entrySet()) { if (key.startsWith(entry.getKey())) { return entry.getValue(); } } return null; } @Override public String getDirectly(final String key) { try { return new String(client.getData(key), Charsets.UTF_8); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); return null; } } @Override public boolean isExisted(final String key) { try { return client.checkExists(key); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); return false; } } @Override public List<String> getChildrenKeys(final String key) { try { List<String> result = client.getChildren(key); Collections.sort(result, new Comparator<String>() { @Override public int compare(final String o1, final String o2) { return o2.compareTo(o1); } }); return result; } catch (final Exception ex) { RegExceptionHandler.handleException(ex); return Collections.emptyList(); } } @Override public void persist(final String key, final String value) { try { if (!isExisted(key)) { client.createAllNeedPath(key, value, CreateMode.PERSISTENT); } else { update(key, value); } } catch (final Exception ex) { RegExceptionHandler.handleException(ex); } } @Override public void update(final String key, final String value) { try { client.transaction().check(key, ZookeeperConstants.VERSION).setData(key, value.getBytes(ZookeeperConstants.UTF_8), ZookeeperConstants.VERSION).commit(); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); } } @Override public void persistEphemeral(final String key, final String value) { try { if (isExisted(key)) { client.deleteAllChildren(key); } client.createAllNeedPath(key, value, CreateMode.EPHEMERAL); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); } } @Override private DataChangedEvent.Type extractEventType(final WatchedEvent event) { switch (event.getType()) { case NodeDataChanged: case NodeChildrenChanged: return DataChangedEvent.Type.UPDATED; case NodeDeleted: return DataChangedEvent.Type.DELETED; default: return DataChangedEvent.Type.IGNORED; } } private synchronized String getWithoutCache(final String key) { try { client.useExecStrategy(StrategyType.USUAL); byte[] data = client.getData(key); client.useExecStrategy(StrategyType.SYNC_RETRY); return null == data ? null : new String(data, Charsets.UTF_8); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); return null; } } private void addCacheData(final String cachePath) { PathTree cache = new PathTree(cachePath, client); try { cache.load(); cache.watch(); } catch (final Exception ex) { RegExceptionHandler.handleException(ex); } caches.put(cachePath + "/", cache); } @Override public void close() { for (Entry<String, PathTree> each : caches.entrySet()) { each.getValue().close(); } client.close(); } }
I mean that with [this change](https://github.com/apache/flink/pull/20306#discussion_r930161714), neither wrapping, nor additional flag check (`wrappedStreamClosed`) would be required. Currently, the composition of streams looks like this: 1. `OutputStreamWithPos` <-- closed by the inner `try` block 1. `BufferedOutputStream` 1. `SnappyFramedOutputStream` or `UncompressedStreamCompressionDecorator` 1. maybe `NonClosingOutputStreamDecorator` <- doesn't proxy `close()` 1. `FSDataOutputStream` <-- closed by the outer `try` block Where (4) `NonClosingOutputStreamDecorator` is conditional in `wrap()`. But that condition is unnecessary because `StreamCompressionDecorator` instance is already choosen in (3). So if `decorateWithCompression` would **always** add `NonClosingOutputStreamDecorator` then it should solve the problem. WDYT?
private UploadTasksResult upload(Path path, Collection<UploadTask> tasks) throws IOException { boolean wrappedStreamClosed = false; FSDataOutputStream fsStream = fileSystem.create(path, NO_OVERWRITE); try { fsStream.write(compression ? 1 : 0); try (OutputStreamWithPos stream = wrap(fsStream)) { final Map<UploadTask, Map<StateChangeSet, Long>> tasksOffsets = new HashMap<>(); for (UploadTask task : tasks) { tasksOffsets.put(task, format.write(stream, task.changeSets)); } StreamStateHandle handle = handleFactory.apply(path, stream.getPos()); changelogRegistry.startTracking( handle, tasks.stream() .flatMap(t -> t.getChangeSets().stream()) .map(StateChangeSet::getLogId) .collect(Collectors.toSet())); return new UploadTasksResult(tasksOffsets, handle); } finally { wrappedStreamClosed = true; } } finally { if (!wrappedStreamClosed || compression) { fsStream.close(); } } }
private UploadTasksResult upload(Path path, Collection<UploadTask> tasks) throws IOException { try (FSDataOutputStream fsStream = fileSystem.create(path, NO_OVERWRITE)) { fsStream.write(compression ? 1 : 0); try (OutputStreamWithPos stream = wrap(fsStream)) { final Map<UploadTask, Map<StateChangeSet, Long>> tasksOffsets = new HashMap<>(); for (UploadTask task : tasks) { tasksOffsets.put(task, format.write(stream, task.changeSets)); } StreamStateHandle handle = handleFactory.apply(path, stream.getPos()); changelogRegistry.startTracking( handle, tasks.stream() .flatMap(t -> t.getChangeSets().stream()) .map(StateChangeSet::getLogId) .collect(Collectors.toSet())); return new UploadTasksResult(tasksOffsets, handle); } } }
class StateChangeFsUploader implements StateChangeUploader { private static final Logger LOG = LoggerFactory.getLogger(StateChangeFsUploader.class); @VisibleForTesting public static final String PATH_SUB_DIR = "dstl"; private final Path basePath; private final FileSystem fileSystem; private final StateChangeFormat format; private final boolean compression; private final int bufferSize; private final ChangelogStorageMetricGroup metrics; private final Clock clock; private final TaskChangelogRegistry changelogRegistry; private final BiFunction<Path, Long, StreamStateHandle> handleFactory; @VisibleForTesting public StateChangeFsUploader( JobID jobID, Path basePath, FileSystem fileSystem, boolean compression, int bufferSize, ChangelogStorageMetricGroup metrics, TaskChangelogRegistry changelogRegistry) { this( jobID, basePath, fileSystem, compression, bufferSize, metrics, changelogRegistry, FileStateHandle::new); } public StateChangeFsUploader( JobID jobID, Path basePath, FileSystem fileSystem, boolean compression, int bufferSize, ChangelogStorageMetricGroup metrics, TaskChangelogRegistry changelogRegistry, BiFunction<Path, Long, StreamStateHandle> handleFactory) { this.basePath = new Path(basePath, String.format("%s/%s", jobID.toHexString(), PATH_SUB_DIR)); this.fileSystem = fileSystem; this.format = new StateChangeFormat(); this.compression = compression; this.bufferSize = bufferSize; this.metrics = metrics; this.clock = SystemClock.getInstance(); this.changelogRegistry = changelogRegistry; this.handleFactory = handleFactory; } @VisibleForTesting public Path getBasePath() { return this.basePath; } public UploadTasksResult upload(Collection<UploadTask> tasks) throws IOException { final String fileName = generateFileName(); LOG.debug("upload {} tasks to {}", tasks.size(), fileName); Path path = new Path(basePath, fileName); try { return uploadWithMetrics(path, tasks); } catch (IOException e) { metrics.getUploadFailuresCounter().inc(); try (Closer closer = Closer.create()) { closer.register( () -> { throw e; }); tasks.forEach(cs -> closer.register(() -> cs.fail(e))); closer.register(() -> fileSystem.delete(path, true)); } } return null; } private UploadTasksResult uploadWithMetrics(Path path, Collection<UploadTask> tasks) throws IOException { metrics.getUploadsCounter().inc(); long start = clock.relativeTimeNanos(); UploadTasksResult result = upload(path, tasks); metrics.getUploadLatenciesNanos().update(clock.relativeTimeNanos() - start); metrics.getUploadSizes().update(result.getStateSize()); return result; } private OutputStreamWithPos wrap(FSDataOutputStream fsStream) throws IOException { StreamCompressionDecorator instance = compression ? SnappyStreamCompressionDecorator.INSTANCE : UncompressedStreamCompressionDecorator.INSTANCE; OutputStream compressed = compression ? instance.decorateWithCompression(fsStream) : fsStream; return new OutputStreamWithPos(new BufferedOutputStream(compressed, bufferSize)); } private String generateFileName() { return UUID.randomUUID().toString(); } @Override public void close() {} }
class StateChangeFsUploader implements StateChangeUploader { private static final Logger LOG = LoggerFactory.getLogger(StateChangeFsUploader.class); @VisibleForTesting public static final String PATH_SUB_DIR = "dstl"; private final Path basePath; private final FileSystem fileSystem; private final StateChangeFormat format; private final boolean compression; private final int bufferSize; private final ChangelogStorageMetricGroup metrics; private final Clock clock; private final TaskChangelogRegistry changelogRegistry; private final BiFunction<Path, Long, StreamStateHandle> handleFactory; @VisibleForTesting public StateChangeFsUploader( JobID jobID, Path basePath, FileSystem fileSystem, boolean compression, int bufferSize, ChangelogStorageMetricGroup metrics, TaskChangelogRegistry changelogRegistry) { this( jobID, basePath, fileSystem, compression, bufferSize, metrics, changelogRegistry, FileStateHandle::new); } public StateChangeFsUploader( JobID jobID, Path basePath, FileSystem fileSystem, boolean compression, int bufferSize, ChangelogStorageMetricGroup metrics, TaskChangelogRegistry changelogRegistry, BiFunction<Path, Long, StreamStateHandle> handleFactory) { this.basePath = new Path(basePath, String.format("%s/%s", jobID.toHexString(), PATH_SUB_DIR)); this.fileSystem = fileSystem; this.format = new StateChangeFormat(); this.compression = compression; this.bufferSize = bufferSize; this.metrics = metrics; this.clock = SystemClock.getInstance(); this.changelogRegistry = changelogRegistry; this.handleFactory = handleFactory; } @VisibleForTesting public Path getBasePath() { return this.basePath; } public UploadTasksResult upload(Collection<UploadTask> tasks) throws IOException { final String fileName = generateFileName(); LOG.debug("upload {} tasks to {}", tasks.size(), fileName); Path path = new Path(basePath, fileName); try { return uploadWithMetrics(path, tasks); } catch (IOException e) { metrics.getUploadFailuresCounter().inc(); try (Closer closer = Closer.create()) { closer.register( () -> { throw e; }); tasks.forEach(cs -> closer.register(() -> cs.fail(e))); closer.register(() -> fileSystem.delete(path, true)); } } return null; } private UploadTasksResult uploadWithMetrics(Path path, Collection<UploadTask> tasks) throws IOException { metrics.getUploadsCounter().inc(); long start = clock.relativeTimeNanos(); UploadTasksResult result = upload(path, tasks); metrics.getUploadLatenciesNanos().update(clock.relativeTimeNanos() - start); metrics.getUploadSizes().update(result.getStateSize()); return result; } private OutputStreamWithPos wrap(FSDataOutputStream fsStream) throws IOException { StreamCompressionDecorator instance = compression ? SnappyStreamCompressionDecorator.INSTANCE : UncompressedStreamCompressionDecorator.INSTANCE; return new OutputStreamWithPos( new BufferedOutputStream(instance.decorateWithCompression(fsStream), bufferSize)); } private String generateFileName() { return UUID.randomUUID().toString(); } @Override public void close() {} }
You mean the `tenant-id` and `subscription-id`? it's required for AzureResourceManagerAutoConfiguration, https://github.com/Azure/azure-sdk-for-java/blob/15dda6cdc3219e9128a4d5207cb66d891fd1baf8/sdk/spring/spring-cloud-azure-autoconfigure/src/main/java/com/azure/spring/cloud/autoconfigure/resourcemanager/AzureResourceManagerAutoConfiguration.java#L25
void testAzureProfileWithAzureChina() { this.contextRunner .withUserConfiguration(AzureGlobalPropertiesAutoConfiguration.class) .withBean(AzureResourceManager.class, AzureResourceManagerExt::getAzureResourceManager) .withPropertyValues( "spring.cloud.azure.profile.tenant-id=test-tenant-id", "spring.cloud.azure.profile.subscription-id=test-subscription-id", "spring.cloud.azure.profile.cloud=AZURE_CHINA" ) .run(context -> { assertThat(context).hasSingleBean(AzureProfile.class); AzureProfile azureProfile = context.getBean(AzureProfile.class); Assertions.assertEquals(azureProfile.getEnvironment().getActiveDirectoryEndpoint(), AZURE_CHINA.getActiveDirectoryEndpoint()); }); }
Assertions.assertEquals(azureProfile.getEnvironment().getActiveDirectoryEndpoint(),
void testAzureProfileWithAzureChina() { this.contextRunner .withUserConfiguration(AzureGlobalPropertiesAutoConfiguration.class) .withBean(AzureResourceManager.class, TestAzureResourceManager::getAzureResourceManager) .withPropertyValues( "spring.cloud.azure.profile.tenant-id=test-tenant-id", "spring.cloud.azure.profile.subscription-id=test-subscription-id", "spring.cloud.azure.profile.cloud=AZURE_CHINA" ) .run(context -> { assertThat(context).hasSingleBean(AzureProfile.class); AzureProfile azureProfile = context.getBean(AzureProfile.class); Assertions.assertEquals(azureProfile.getEnvironment().getActiveDirectoryEndpoint(), AZURE_CHINA.getActiveDirectoryEndpoint()); }); }
class AzureResourceManagerAutoConfigurationTest { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(AzureResourceManagerAutoConfiguration.class)); @Test void testAzureResourceManagerDisabled() { this.contextRunner .withPropertyValues("spring.cloud.azure.resourcemanager.enabled=false") .run(context -> { assertThat(context).doesNotHaveBean(AzureResourceManager.class); assertThat(context).doesNotHaveBean(AzureProfile.class); }); } @Test void configureWithoutTenantId() { this.contextRunner .withPropertyValues("spring.cloud.azure.resourcemanager.enabled=true") .run(context -> { assertThat(context).doesNotHaveBean(AzureResourceManager.class); assertThat(context).doesNotHaveBean(AzureProfile.class); }); } @Test void configureWithTenantId() { this.contextRunner .withPropertyValues("spring.cloud.azure.profile.tenant-id=test-tenant") .run(context -> { assertThat(context).doesNotHaveBean(AzureResourceManager.class); assertThat(context).doesNotHaveBean(AzureProfile.class); }); } @Test void testWithoutAzureResourceManagerClass() { this.contextRunner.withClassLoader(new FilteredClassLoader(AzureResourceManager.class)) .run(context -> assertThat(context).doesNotHaveBean(AzureProfile.class)); } @Test void testWithoutAzureResourceMetadataClass() { this.contextRunner.withClassLoader(new FilteredClassLoader(AzureResourceMetadata.class)) .run(context -> assertThat(context).doesNotHaveBean(AzureProfile.class)); } @Test void testAzureProfileWithAzureDefault() { this.contextRunner .withUserConfiguration(AzureGlobalPropertiesAutoConfiguration.class) .withBean(AzureResourceManager.class, AzureResourceManagerExt::getAzureResourceManager) .withPropertyValues( "spring.cloud.azure.profile.tenant-id=test-tenant-id", "spring.cloud.azure.profile.subscription-id=test-subscription-id" ) .run(context -> { assertThat(context).hasSingleBean(AzureProfile.class); AzureProfile azureProfile = context.getBean(AzureProfile.class); Assertions.assertEquals(azureProfile.getEnvironment().getActiveDirectoryEndpoint(), AZURE.getActiveDirectoryEndpoint()); }); } @Test static class AzureResourceManagerExt { static AzureResourceManager getAzureResourceManager() { return mock(AzureResourceManager.class); } } }
class AzureResourceManagerAutoConfigurationTest { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(AzureResourceManagerAutoConfiguration.class)); @Test void testAzureResourceManagerDisabled() { this.contextRunner .withPropertyValues("spring.cloud.azure.resourcemanager.enabled=false") .run(context -> { assertThat(context).doesNotHaveBean(AzureResourceManager.class); assertThat(context).doesNotHaveBean(AzureProfile.class); }); } @Test void configureWithoutTenantId() { this.contextRunner .withPropertyValues("spring.cloud.azure.resourcemanager.enabled=true") .run(context -> { assertThat(context).doesNotHaveBean(AzureResourceManager.class); assertThat(context).doesNotHaveBean(AzureProfile.class); }); } @Test void configureWithTenantId() { this.contextRunner .withPropertyValues("spring.cloud.azure.profile.tenant-id=test-tenant") .run(context -> { assertThat(context).doesNotHaveBean(AzureResourceManager.class); assertThat(context).doesNotHaveBean(AzureProfile.class); }); } @Test void testWithoutAzureResourceManagerClass() { this.contextRunner.withClassLoader(new FilteredClassLoader(AzureResourceManager.class)) .run(context -> assertThat(context).doesNotHaveBean(AzureProfile.class)); } @Test void testWithoutAzureResourceMetadataClass() { this.contextRunner.withClassLoader(new FilteredClassLoader(AzureResourceMetadata.class)) .run(context -> assertThat(context).doesNotHaveBean(AzureProfile.class)); } @Test void testAzureProfileWithAzureDefault() { this.contextRunner .withUserConfiguration(AzureGlobalPropertiesAutoConfiguration.class) .withBean(AzureResourceManager.class, TestAzureResourceManager::getAzureResourceManager) .withPropertyValues( "spring.cloud.azure.profile.tenant-id=test-tenant-id", "spring.cloud.azure.profile.subscription-id=test-subscription-id" ) .run(context -> { assertThat(context).hasSingleBean(AzureProfile.class); AzureProfile azureProfile = context.getBean(AzureProfile.class); Assertions.assertEquals(azureProfile.getEnvironment().getActiveDirectoryEndpoint(), AZURE.getActiveDirectoryEndpoint()); }); } @Test }
looks like sql "SELECT id, v1-2 as v, sum(v2) v2 FROM test_having_alias_tb GROUP BY id,v having(v2>1);" still report the same error. Please double check if it's expected behavior of this pr
private void analyzeAggregation(Analyzer analyzer) throws AnalysisException { if (havingClause != null) { Expr ambiguousAlias = getFirstAmbiguousAlias(havingClause); if (ambiguousAlias != null) { ErrorReport.reportAnalysisException(ErrorCode.ERR_NON_UNIQ_ERROR, ambiguousAlias.toColumnLabel()); } /* * The having clause need to be substitute by aliasSMap. * And it is analyzed after substitute. * For example: * Query: select k1 a, sum(k2) b from table group by k1 having a > 1; * Having clause: a > 1 * aliasSMap: <a, table.k1> <b, sum(table.k2)> * After substitute: a > 1 changed to table.k1 > 1 * Analyzer: check column and other subquery in having clause * having predicate: table.k1 > 1 */ /* * TODO(ml): support substitute outer column in correlated subquery * For example: select k1 key, sum(k1) sum_k1 from table a group by k1 * having k1 > * (select min(k1) from table b where a.key=b.k2); * TODO: the a.key should be replaced by a.k1 instead of unknown column 'key' in 'a' */ /* according to mysql (https: * "For GROUP BY or HAVING clauses, it searches the FROM clause before searching in the * select_expr values. (For GROUP BY and HAVING, this differs from the pre-MySQL 5.0 behavior * that used the same rules as for ORDER BY.)" * case1: having clause use column name table.v1, because it searches the FROM clause firstly * select id, sum(v1) v1 from table group by id,v1 having(v1>1); * case2: having clause used in aggregate functions, such as sum(v2) here * select id, sum(v1) v1, sum(v2) v2 from table group by id,v1 having(v1>1 AND sum(v2)>1); * case3: having clause use alias name v, because table do not have column name v * select id, floor(v1) v, sum(v2) v2 from table group by id,v having(v>1 AND v2>1); * case4: having clause use alias name vsum, because table do not have column name vsum * select id, floor(v1) v, sum(v2) vsum from table group by id,v having(v>1 AND vsum>1); */ if (groupByClause != null) { ExprSubstitutionMap excludeAliasSMap = aliasSMap.clone(); List<Expr> havingSlots = Lists.newArrayList(); havingClause.collect(SlotRef.class, havingSlots); for (Expr expr : havingSlots) { if (excludeAliasSMap.get(expr) == null) { continue; } try { expr.clone().analyze(analyzer); excludeAliasSMap.removeByLhsExpr(expr); } catch (AnalysisException ex) { } } havingClauseAfterAnaylzed = havingClause.substitute(excludeAliasSMap, analyzer, false); } else { havingClauseAfterAnaylzed = havingClause.substitute(aliasSMap, analyzer, false); } havingClauseAfterAnaylzed = rewriteQueryExprByMvColumnExpr(havingClauseAfterAnaylzed, analyzer); havingClauseAfterAnaylzed.checkReturnsBool("HAVING clause", true); if (groupingInfo != null) { groupingInfo.substituteGroupingFn(Arrays.asList(havingClauseAfterAnaylzed), analyzer); } Expr analyticExpr = havingClauseAfterAnaylzed.findFirstOf(AnalyticExpr.class); if (analyticExpr != null) { throw new AnalysisException( "HAVING clause must not contain analytic expressions: " + analyticExpr.toSql()); } if (isContainInBitmap(havingClauseAfterAnaylzed)) { throw new AnalysisException( "HAVING clause dose not support in bitmap syntax: " + havingClauseAfterAnaylzed.toSql()); } } if (groupByClause == null && !selectList.isDistinct() && !TreeNode.contains(resultExprs, Expr.isAggregatePredicate()) && (havingClauseAfterAnaylzed == null || !havingClauseAfterAnaylzed.contains( Expr.isAggregatePredicate())) && (sortInfo == null || !TreeNode.contains(sortInfo.getOrderingExprs(), Expr.isAggregatePredicate()))) { if (havingClauseAfterAnaylzed != null) { if (havingClauseAfterAnaylzed.contains(Subquery.class)) { throw new AnalysisException("Only constant expr could be supported in having clause " + "when no aggregation in stmt"); } analyzer.registerConjuncts(havingClauseAfterAnaylzed, true); } return; } if (fromClause.size() == 0) { throw new AnalysisException("Aggregation without a FROM clause is not allowed"); } if (selectList.isDistinct() && groupByClause == null) { List<Expr> aggregateExpr = Lists.newArrayList(); TreeNode.collect(resultExprs, Expr.isAggregatePredicate(), aggregateExpr); if (aggregateExpr.size() == resultExprs.size()) { selectList.setIsDistinct(false); } } if (selectList.isDistinct() && (groupByClause != null || TreeNode.contains(resultExprs, Expr.isAggregatePredicate()) || (havingClauseAfterAnaylzed != null && havingClauseAfterAnaylzed.contains( Expr.isAggregatePredicate())))) { throw new AnalysisException("cannot combine SELECT DISTINCT with aggregate functions or GROUP BY"); } if (groupByClause != null || TreeNode.contains(resultExprs, Expr.isAggregatePredicate())) { for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { throw new AnalysisException( "cannot combine '*' in select list with GROUP BY: " + item.toSql()); } } } ArrayList<FunctionCallExpr> aggExprs = Lists.newArrayList(); TreeNode.collect(resultExprs, Expr.isAggregatePredicate(), aggExprs); if (havingClauseAfterAnaylzed != null) { havingClauseAfterAnaylzed.collect(Expr.isAggregatePredicate(), aggExprs); } if (sortInfo != null) { TreeNode.collect(sortInfo.getOrderingExprs(), Expr.isAggregatePredicate(), aggExprs); } ExprSubstitutionMap countAllMap = createCountAllMap(aggExprs, analyzer); final ExprSubstitutionMap multiCountOrSumDistinctMap = createSumOrCountMultiDistinctSMap(aggExprs, analyzer); countAllMap = ExprSubstitutionMap.compose(multiCountOrSumDistinctMap, countAllMap, analyzer); List<Expr> substitutedAggs = Expr.substituteList(aggExprs, countAllMap, analyzer, false); resultExprs = Expr.substituteList(resultExprs, countAllMap, analyzer, false); aggExprs.clear(); TreeNode.collect(substitutedAggs, Expr.isAggregatePredicate(), aggExprs); List<TupleId> groupingByTupleIds = new ArrayList<>(); if (groupByClause != null) { groupByClause.genGroupingExprs(); ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingInfo != null) { groupingInfo.buildRepeat(groupingExprs, groupByClause.getGroupingSetList()); } substituteOrdinalsAliases(groupingExprs, "GROUP BY", analyzer, false); if (!groupByClause.isGroupByExtension() && !groupingExprs.isEmpty()) { ArrayList<Expr> tempExprs = new ArrayList<>(groupingExprs); groupingExprs.removeIf(Expr::isConstant); if (groupingExprs.isEmpty() && aggExprs.isEmpty()) { groupingExprs.add(tempExprs.get(0)); } } for (int i = 0; i < groupingExprs.size(); i++) { groupingExprs.set(i, rewriteQueryExprByMvColumnExpr(groupingExprs.get(i), analyzer)); } if (groupingInfo != null) { groupingInfo.genOutputTupleDescAndSMap(analyzer, groupingExprs, aggExprs); groupingByTupleIds.add(groupingInfo.getOutputTupleDesc().getId()); } groupByClause.analyze(analyzer); createAggInfo(groupingExprs, aggExprs, analyzer); } else { createAggInfo(new ArrayList<>(), aggExprs, analyzer); } if (aggInfo == null) { return; } AggregateInfo finalAggInfo = aggInfo.getSecondPhaseDistinctAggInfo() != null ? aggInfo.getSecondPhaseDistinctAggInfo() : aggInfo; groupingByTupleIds.add(finalAggInfo.getOutputTupleId()); ExprSubstitutionMap combinedSmap = ExprSubstitutionMap.compose( countAllMap, finalAggInfo.getOutputSmap(), analyzer); if (LOG.isDebugEnabled()) { LOG.debug("combined smap: " + combinedSmap.debugString()); LOG.debug("desctbl: " + analyzer.getDescTbl().debugString()); LOG.debug("resultexprs: " + Expr.debugString(resultExprs)); } if (havingClauseAfterAnaylzed != null) { List<Subquery> subqueryInHaving = Lists.newArrayList(); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryInHaving); for (Subquery subquery : subqueryInHaving) { if (subquery.isCorrelatedPredicate(getTableRefIds())) { throw new AnalysisException("The correlated having clause is not supported"); } } } /* * All of columns of result and having clause are replaced by new slot ref * which is bound by top tuple of agg info. * For example: * ResultExprs: SlotRef(k1), FunctionCall(sum(SlotRef(k2))) * Having predicate: FunctionCall(sum(SlotRef(k2))) > subquery * CombinedSMap: <SlotRef(k1) tuple 0, SlotRef(k1) of tuple 3>, * <FunctionCall(SlotRef(k2)) tuple 0, SlotRef(sum(k2)) of tuple 3> * * After rewritten: * ResultExprs: SlotRef(k1) of tuple 3, SlotRef(sum(k2)) of tuple 3 * Having predicate: SlotRef(sum(k2)) of tuple 3 > subquery */ resultExprs = Expr.substituteList(resultExprs, combinedSmap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("post-agg selectListExprs: " + Expr.debugString(resultExprs)); } if (havingClauseAfterAnaylzed != null) { havingPred = havingClauseAfterAnaylzed.substitute(combinedSmap, analyzer, false); analyzer.registerConjuncts(havingPred, true, finalAggInfo.getOutputTupleId().asList()); if (LOG.isDebugEnabled()) { LOG.debug("post-agg havingPred: " + havingPred.debugString()); } } if (sortInfo != null) { sortInfo.substituteOrderingExprs(combinedSmap, analyzer); if (LOG.isDebugEnabled()) { LOG.debug("post-agg orderingExprs: " + Expr.debugString(sortInfo.getOrderingExprs())); } } for (int i = 0; i < selectList.getItems().size(); ++i) { if (!resultExprs.get(i).isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "select list expression not produced by aggregation output " + "(missing from " + "GROUP BY clause?): " + selectList.getItems().get(i).getExpr().toSql()); } } if (orderByElements != null) { for (int i = 0; i < orderByElements.size(); ++i) { if (!sortInfo.getOrderingExprs().get(i).isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "ORDER BY expression not produced by aggregation output " + "(missing from " + "GROUP BY clause?): " + orderByElements.get(i).getExpr().toSql()); } if (sortInfo.getOrderingExprs().get(i).type.isObjectStored()) { throw new AnalysisException("ORDER BY expression could not contain object-stored columnx."); } } } if (havingPred != null) { if (!havingPred.isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "HAVING clause not produced by aggregation output " + "(missing from GROUP BY " + "clause?): " + havingClause.toSql()); } } }
if (excludeAliasSMap.get(expr) == null) {
private void analyzeAggregation(Analyzer analyzer) throws AnalysisException { if (havingClause != null) { Expr ambiguousAlias = getFirstAmbiguousAlias(havingClause); if (ambiguousAlias != null) { ErrorReport.reportAnalysisException(ErrorCode.ERR_NON_UNIQ_ERROR, ambiguousAlias.toColumnLabel()); } /* * The having clause need to be substitute by aliasSMap. * And it is analyzed after substitute. * For example: * Query: select k1 a, sum(k2) b from table group by k1 having a > 1; * Having clause: a > 1 * aliasSMap: <a, table.k1> <b, sum(table.k2)> * After substitute: a > 1 changed to table.k1 > 1 * Analyzer: check column and other subquery in having clause * having predicate: table.k1 > 1 */ /* * TODO(ml): support substitute outer column in correlated subquery * For example: select k1 key, sum(k1) sum_k1 from table a group by k1 * having k1 > * (select min(k1) from table b where a.key=b.k2); * TODO: the a.key should be replaced by a.k1 instead of unknown column 'key' in 'a' */ /* according to mysql (https: * "For GROUP BY or HAVING clauses, it searches the FROM clause before searching in the * select_expr values. (For GROUP BY and HAVING, this differs from the pre-MySQL 5.0 behavior * that used the same rules as for ORDER BY.)" * case1: having clause use column name table.v1, because it searches the FROM clause firstly * select id, sum(v1) v1 from table group by id,v1 having(v1>1); * case2: having clause used in aggregate functions, such as sum(v2) here * select id, sum(v1) v1, sum(v2) v2 from table group by id,v1 having(v1>1 AND sum(v2)>1); * case3: having clause use alias name v, because table do not have column name v * select id, floor(v1) v, sum(v2) v2 from table group by id,v having(v>1 AND v2>1); * case4: having clause use alias name vsum, because table do not have column name vsum * select id, floor(v1) v, sum(v2) vsum from table group by id,v having(v>1 AND vsum>1); */ if (groupByClause != null) { ExprSubstitutionMap excludeAliasSMap = aliasSMap.clone(); List<Expr> havingSlots = Lists.newArrayList(); havingClause.collect(SlotRef.class, havingSlots); for (Expr expr : havingSlots) { if (excludeAliasSMap.get(expr) == null) { continue; } try { expr.clone().analyze(analyzer); excludeAliasSMap.removeByLhsExpr(expr); } catch (AnalysisException ex) { } } havingClauseAfterAnaylzed = havingClause.substitute(excludeAliasSMap, analyzer, false); } else { havingClauseAfterAnaylzed = havingClause.substitute(aliasSMap, analyzer, false); } havingClauseAfterAnaylzed = rewriteQueryExprByMvColumnExpr(havingClauseAfterAnaylzed, analyzer); havingClauseAfterAnaylzed.checkReturnsBool("HAVING clause", true); if (groupingInfo != null) { groupingInfo.substituteGroupingFn(Arrays.asList(havingClauseAfterAnaylzed), analyzer); } Expr analyticExpr = havingClauseAfterAnaylzed.findFirstOf(AnalyticExpr.class); if (analyticExpr != null) { throw new AnalysisException( "HAVING clause must not contain analytic expressions: " + analyticExpr.toSql()); } if (isContainInBitmap(havingClauseAfterAnaylzed)) { throw new AnalysisException( "HAVING clause dose not support in bitmap syntax: " + havingClauseAfterAnaylzed.toSql()); } } if (groupByClause == null && !selectList.isDistinct() && !TreeNode.contains(resultExprs, Expr.isAggregatePredicate()) && (havingClauseAfterAnaylzed == null || !havingClauseAfterAnaylzed.contains( Expr.isAggregatePredicate())) && (sortInfo == null || !TreeNode.contains(sortInfo.getOrderingExprs(), Expr.isAggregatePredicate()))) { if (havingClauseAfterAnaylzed != null) { if (havingClauseAfterAnaylzed.contains(Subquery.class)) { throw new AnalysisException("Only constant expr could be supported in having clause " + "when no aggregation in stmt"); } analyzer.registerConjuncts(havingClauseAfterAnaylzed, true); } return; } if (fromClause.size() == 0) { throw new AnalysisException("Aggregation without a FROM clause is not allowed"); } if (selectList.isDistinct() && groupByClause == null) { List<Expr> aggregateExpr = Lists.newArrayList(); TreeNode.collect(resultExprs, Expr.isAggregatePredicate(), aggregateExpr); if (aggregateExpr.size() == resultExprs.size()) { selectList.setIsDistinct(false); } } if (selectList.isDistinct() && (groupByClause != null || TreeNode.contains(resultExprs, Expr.isAggregatePredicate()) || (havingClauseAfterAnaylzed != null && havingClauseAfterAnaylzed.contains( Expr.isAggregatePredicate())))) { throw new AnalysisException("cannot combine SELECT DISTINCT with aggregate functions or GROUP BY"); } if (groupByClause != null || TreeNode.contains(resultExprs, Expr.isAggregatePredicate())) { for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { throw new AnalysisException( "cannot combine '*' in select list with GROUP BY: " + item.toSql()); } } } ArrayList<FunctionCallExpr> aggExprs = Lists.newArrayList(); TreeNode.collect(resultExprs, Expr.isAggregatePredicate(), aggExprs); if (havingClauseAfterAnaylzed != null) { havingClauseAfterAnaylzed.collect(Expr.isAggregatePredicate(), aggExprs); } if (sortInfo != null) { TreeNode.collect(sortInfo.getOrderingExprs(), Expr.isAggregatePredicate(), aggExprs); } ExprSubstitutionMap countAllMap = createCountAllMap(aggExprs, analyzer); final ExprSubstitutionMap multiCountOrSumDistinctMap = createSumOrCountMultiDistinctSMap(aggExprs, analyzer); countAllMap = ExprSubstitutionMap.compose(multiCountOrSumDistinctMap, countAllMap, analyzer); List<Expr> substitutedAggs = Expr.substituteList(aggExprs, countAllMap, analyzer, false); resultExprs = Expr.substituteList(resultExprs, countAllMap, analyzer, false); aggExprs.clear(); TreeNode.collect(substitutedAggs, Expr.isAggregatePredicate(), aggExprs); List<TupleId> groupingByTupleIds = new ArrayList<>(); if (groupByClause != null) { groupByClause.genGroupingExprs(); ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingInfo != null) { groupingInfo.buildRepeat(groupingExprs, groupByClause.getGroupingSetList()); } substituteOrdinalsAliases(groupingExprs, "GROUP BY", analyzer, false); if (!groupByClause.isGroupByExtension() && !groupingExprs.isEmpty()) { ArrayList<Expr> tempExprs = new ArrayList<>(groupingExprs); groupingExprs.removeIf(Expr::isConstant); if (groupingExprs.isEmpty() && aggExprs.isEmpty()) { groupingExprs.add(tempExprs.get(0)); } } for (int i = 0; i < groupingExprs.size(); i++) { groupingExprs.set(i, rewriteQueryExprByMvColumnExpr(groupingExprs.get(i), analyzer)); } if (groupingInfo != null) { groupingInfo.genOutputTupleDescAndSMap(analyzer, groupingExprs, aggExprs); groupingByTupleIds.add(groupingInfo.getOutputTupleDesc().getId()); } groupByClause.analyze(analyzer); createAggInfo(groupingExprs, aggExprs, analyzer); } else { createAggInfo(new ArrayList<>(), aggExprs, analyzer); } if (aggInfo == null) { return; } AggregateInfo finalAggInfo = aggInfo.getSecondPhaseDistinctAggInfo() != null ? aggInfo.getSecondPhaseDistinctAggInfo() : aggInfo; groupingByTupleIds.add(finalAggInfo.getOutputTupleId()); ExprSubstitutionMap combinedSmap = ExprSubstitutionMap.compose( countAllMap, finalAggInfo.getOutputSmap(), analyzer); if (LOG.isDebugEnabled()) { LOG.debug("combined smap: " + combinedSmap.debugString()); LOG.debug("desctbl: " + analyzer.getDescTbl().debugString()); LOG.debug("resultexprs: " + Expr.debugString(resultExprs)); } if (havingClauseAfterAnaylzed != null) { List<Subquery> subqueryInHaving = Lists.newArrayList(); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryInHaving); for (Subquery subquery : subqueryInHaving) { if (subquery.isCorrelatedPredicate(getTableRefIds())) { throw new AnalysisException("The correlated having clause is not supported"); } } } /* * All of columns of result and having clause are replaced by new slot ref * which is bound by top tuple of agg info. * For example: * ResultExprs: SlotRef(k1), FunctionCall(sum(SlotRef(k2))) * Having predicate: FunctionCall(sum(SlotRef(k2))) > subquery * CombinedSMap: <SlotRef(k1) tuple 0, SlotRef(k1) of tuple 3>, * <FunctionCall(SlotRef(k2)) tuple 0, SlotRef(sum(k2)) of tuple 3> * * After rewritten: * ResultExprs: SlotRef(k1) of tuple 3, SlotRef(sum(k2)) of tuple 3 * Having predicate: SlotRef(sum(k2)) of tuple 3 > subquery */ resultExprs = Expr.substituteList(resultExprs, combinedSmap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("post-agg selectListExprs: " + Expr.debugString(resultExprs)); } if (havingClauseAfterAnaylzed != null) { havingPred = havingClauseAfterAnaylzed.substitute(combinedSmap, analyzer, false); analyzer.registerConjuncts(havingPred, true, finalAggInfo.getOutputTupleId().asList()); if (LOG.isDebugEnabled()) { LOG.debug("post-agg havingPred: " + havingPred.debugString()); } } if (sortInfo != null) { sortInfo.substituteOrderingExprs(combinedSmap, analyzer); if (LOG.isDebugEnabled()) { LOG.debug("post-agg orderingExprs: " + Expr.debugString(sortInfo.getOrderingExprs())); } } for (int i = 0; i < selectList.getItems().size(); ++i) { if (!resultExprs.get(i).isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "select list expression not produced by aggregation output " + "(missing from " + "GROUP BY clause?): " + selectList.getItems().get(i).getExpr().toSql()); } } if (orderByElements != null) { for (int i = 0; i < orderByElements.size(); ++i) { if (!sortInfo.getOrderingExprs().get(i).isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "ORDER BY expression not produced by aggregation output " + "(missing from " + "GROUP BY clause?): " + orderByElements.get(i).getExpr().toSql()); } if (sortInfo.getOrderingExprs().get(i).type.isObjectStored()) { throw new AnalysisException("ORDER BY expression could not contain object-stored columnx."); } } } if (havingPred != null) { if (!havingPred.isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "HAVING clause not produced by aggregation output " + "(missing from GROUP BY " + "clause?): " + havingClause.toSql()); } } }
class SelectStmt extends QueryStmt { private static final Logger LOG = LogManager.getLogger(SelectStmt.class); private UUID id = UUID.randomUUID(); protected SelectList selectList; private final ArrayList<String> colLabels; protected final FromClause fromClause; protected GroupByClause groupByClause; private List<Expr> originalExpr; private Expr havingClause; protected Expr whereClause; private Expr havingPred; private AggregateInfo aggInfo; private AnalyticInfo analyticInfo; private ExprSubstitutionMap baseTblSmap = new ExprSubstitutionMap(); private ValueList valueList; private GroupingInfo groupingInfo; private Expr havingClauseAfterAnaylzed; protected String sqlString; private TableAliasGenerator tableAliasGenerator = null; private SelectList originSelectList; public SelectStmt(ValueList valueList, ArrayList<OrderByElement> orderByElement, LimitElement limitElement) { super(orderByElement, limitElement); this.valueList = valueList; this.selectList = new SelectList(); this.fromClause = new FromClause(); this.colLabels = Lists.newArrayList(); } SelectStmt( SelectList selectList, FromClause fromClause, Expr wherePredicate, GroupByClause groupByClause, Expr havingPredicate, ArrayList<OrderByElement> orderByElements, LimitElement limitElement) { super(orderByElements, limitElement); this.selectList = selectList; this.originSelectList = selectList.clone(); if (fromClause == null) { this.fromClause = new FromClause(); } else { this.fromClause = fromClause; } this.whereClause = wherePredicate; this.groupByClause = groupByClause; this.havingClause = havingPredicate; this.colLabels = Lists.newArrayList(); this.havingPred = null; this.aggInfo = null; this.sortInfo = null; this.groupingInfo = null; } protected SelectStmt(SelectStmt other) { super(other); this.id = other.id; selectList = other.selectList.clone(); fromClause = other.fromClause.clone(); whereClause = (other.whereClause != null) ? other.whereClause.clone() : null; groupByClause = (other.groupByClause != null) ? other.groupByClause.clone() : null; havingClause = (other.havingClause != null) ? other.havingClause.clone() : null; havingClauseAfterAnaylzed = other.havingClauseAfterAnaylzed != null ? other.havingClauseAfterAnaylzed.clone() : null; colLabels = Lists.newArrayList(other.colLabels); aggInfo = (other.aggInfo != null) ? other.aggInfo.clone() : null; analyticInfo = (other.analyticInfo != null) ? other.analyticInfo.clone() : null; sqlString = (other.sqlString != null) ? other.sqlString : null; baseTblSmap = other.baseTblSmap.clone(); groupingInfo = null; } @Override public void reset() { super.reset(); selectList.reset(); colLabels.clear(); fromClause.reset(); if (whereClause != null) { whereClause.reset(); } if (groupByClause != null) { groupByClause.reset(); } if (havingClause != null) { havingClause.reset(); } havingClauseAfterAnaylzed = null; havingPred = null; aggInfo = null; analyticInfo = null; baseTblSmap.clear(); groupingInfo = null; } public List<Expr> getAllExprs() { List<Expr> exprs = new ArrayList<Expr>(); if (originSelectList != null) { exprs.addAll(originSelectList.getExprs()); } if (havingClause != null) { exprs.add(havingClause); } if (havingPred != null) { exprs.add(havingPred); } if (havingClauseAfterAnaylzed != null) { exprs.add(havingClauseAfterAnaylzed); } return exprs; } public boolean haveStar() { for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { return true; } } return false; } @Override public void resetSelectList() { if (originSelectList != null) { selectList = originSelectList; } } @Override public QueryStmt clone() { return new SelectStmt(this); } public UUID getId() { return id; } /** * @return the original select list items from the query */ public SelectList getSelectList() { return selectList; } public void setSelectList(SelectList selectList) { this.selectList = selectList; } public ValueList getValueList() { return valueList; } /** * @return the HAVING clause post-analysis and with aliases resolved */ public Expr getHavingPred() { return havingPred; } public Expr getHavingClauseAfterAnaylzed() { return havingClauseAfterAnaylzed; } public List<TableRef> getTableRefs() { return fromClause.getTableRefs(); } public Expr getWhereClause() { return whereClause; } public void setWhereClause(Expr whereClause) { this.whereClause = whereClause; } public AggregateInfo getAggInfo() { return aggInfo; } public GroupingInfo getGroupingInfo() { return groupingInfo; } public GroupByClause getGroupByClause() { return groupByClause; } public AnalyticInfo getAnalyticInfo() { return analyticInfo; } public boolean hasAnalyticInfo() { return analyticInfo != null; } public boolean hasHavingClause() { return havingClause != null; } public void removeHavingClause() { havingClause = null; } @Override public SortInfo getSortInfo() { return sortInfo; } @Override public ArrayList<String> getColLabels() { return colLabels; } public ExprSubstitutionMap getBaseTblSmap() { return baseTblSmap; } @Override public void getTables(Analyzer analyzer, boolean expandView, Map<Long, TableIf> tableMap, Set<String> parentViewNameSet) throws AnalysisException { getWithClauseTables(analyzer, expandView, tableMap, parentViewNameSet); for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { QueryStmt inlineStmt = ((InlineViewRef) tblRef).getViewStmt(); inlineStmt.getTables(analyzer, expandView, tableMap, parentViewNameSet); } else if (tblRef instanceof TableValuedFunctionRef) { TableValuedFunctionRef tblFuncRef = (TableValuedFunctionRef) tblRef; tableMap.put(tblFuncRef.getTableFunction().getTable().getId(), tblFuncRef.getTableFunction().getTable()); } else { String dbName = tblRef.getName().getDb(); String tableName = tblRef.getName().getTbl(); if (Strings.isNullOrEmpty(dbName)) { dbName = analyzer.getDefaultDb(); } else { dbName = ClusterNamespace.getFullName(analyzer.getClusterName(), tblRef.getName().getDb()); } if (isViewTableRef(tblRef.getName().toString(), parentViewNameSet)) { continue; } tblRef.getName().analyze(analyzer); DatabaseIf db = analyzer.getEnv().getCatalogMgr() .getCatalogOrAnalysisException(tblRef.getName().getCtl()).getDbOrAnalysisException(dbName); TableIf table = db.getTableOrAnalysisException(tableName); if (expandView && (table instanceof View)) { View view = (View) table; view.getQueryStmt().getTables(analyzer, expandView, tableMap, parentViewNameSet); } else { if (!Env.getCurrentEnv().getAuth() .checkTblPriv(ConnectContext.get(), tblRef.getName(), PrivPredicate.SELECT)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SELECT", ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(), dbName + ": " + tableName); } tableMap.put(table.getId(), table); } } } } @Override public void getTableRefs(Analyzer analyzer, List<TableRef> tblRefs, Set<String> parentViewNameSet) { getWithClauseTableRefs(analyzer, tblRefs, parentViewNameSet); for (TableRef tblRef : fromClause) { try { TableRef tmpTblRef = analyzer.resolveTableRef(tblRef); if (tmpTblRef instanceof InlineViewRef) { QueryStmt inlineStmt = ((InlineViewRef) tmpTblRef).getViewStmt(); inlineStmt.getTableRefs(analyzer, tblRefs, parentViewNameSet); } else { if (isViewTableRef(tmpTblRef.getName().toString(), parentViewNameSet)) { continue; } tblRefs.add(tmpTblRef); } } catch (AnalysisException e) { } } } private boolean isViewTableRef(String tblName, Set<String> parentViewNameSet) { if (parentViewNameSet.contains(tblName)) { return true; } if (withClause != null) { List<View> views = withClause.getViews(); for (View view : views) { if (view.getName().equals(tblName)) { return true; } } } return false; } private ColumnAliasGenerator columnAliasGenerator = null; public ColumnAliasGenerator getColumnAliasGenerator() { if (columnAliasGenerator == null) { columnAliasGenerator = new ColumnAliasGenerator(colLabels, null); } return columnAliasGenerator; } public TableAliasGenerator getTableAliasGenerator() { if (tableAliasGenerator == null) { tableAliasGenerator = new TableAliasGenerator(analyzer, null); } return tableAliasGenerator; } public void setTableAliasGenerator(TableAliasGenerator tableAliasGenerator) { this.tableAliasGenerator = tableAliasGenerator; } public void analyze(Analyzer analyzer) throws UserException { if (isAnalyzed()) { return; } super.analyze(analyzer); fromClause.setNeedToSql(needToSql); fromClause.analyze(analyzer); if (!analyzer.isWithClause()) { registerIsNotEmptyPredicates(analyzer); } if (selectList.isExcept()) { List<SelectListItem> items = selectList.getItems(); TableName tblName = items.get(0).getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } ArrayList<String> exceptCols = new ArrayList<>(); for (SelectListItem item : items) { Expr expr = item.getExpr(); if (!(item.getExpr() instanceof SlotRef)) { throw new AnalysisException("`SELECT * EXCEPT` only supports column name."); } exceptCols.add(expr.toColumnLabel()); } resultExprs.removeIf(expr -> exceptCols.contains(expr.toColumnLabel())); colLabels.removeIf(exceptCols::contains); } else { for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { TableName tblName = item.getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } } else { item.getExpr().analyze(analyzer); if (!(item.getExpr() instanceof CaseExpr) && item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { throw new AnalysisException("Subquery is not supported in the select list."); } Expr expr = rewriteQueryExprByMvColumnExpr(item.getExpr(), analyzer); resultExprs.add(expr); SlotRef aliasRef = new SlotRef(null, item.toColumnLabel()); Expr existingAliasExpr = aliasSMap.get(aliasRef); if (existingAliasExpr != null && !existingAliasExpr.equals(item.getExpr())) { ambiguousAliasList.add(aliasRef); } aliasSMap.put(aliasRef, item.getExpr().clone()); colLabels.add(item.toColumnLabel()); } } } if (groupByClause != null && groupByClause.isGroupByExtension()) { ArrayList<Expr> aggFnExprList = new ArrayList<>(); for (SelectListItem item : selectList.getItems()) { aggFnExprList.clear(); getAggregateFnExpr(item.getExpr(), aggFnExprList); for (Expr aggFnExpr : aggFnExprList) { for (Expr expr : groupByClause.getGroupingExprs()) { if (aggFnExpr.contains(expr)) { throw new AnalysisException("column: " + expr.toSql() + " cannot both in select " + "list and aggregate functions when using GROUPING SETS/CUBE/ROLLUP, " + "please use union instead."); } } } } groupingInfo = new GroupingInfo(analyzer, groupByClause); groupingInfo.substituteGroupingFn(resultExprs, analyzer); } else { for (Expr expr : resultExprs) { if (checkGroupingFn(expr)) { throw new AnalysisException( "cannot use GROUPING functions without [grouping sets|rollup|cube] " + "clause or grouping sets only have one element."); } } } if (valueList != null) { if (!fromInsert) { valueList.analyzeForSelect(analyzer); } for (Expr expr : valueList.getFirstRow()) { if (expr instanceof DefaultValueExpr) { resultExprs.add(new IntLiteral(1)); } else { resultExprs.add(expr); } colLabels.add(expr.toColumnLabel()); } } if (needToSql) { originalExpr = Expr.cloneList(resultExprs); } Expr.analyze(resultExprs, analyzer); if (TreeNode.contains(resultExprs, AnalyticExpr.class)) { if (fromClause.isEmpty()) { throw new AnalysisException("Analytic expressions require FROM clause."); } if (selectList.isDistinct()) { throw new AnalysisException( "cannot combine SELECT DISTINCT with analytic functions"); } } whereClauseRewrite(); if (whereClause != null) { if (checkGroupingFn(whereClause)) { throw new AnalysisException("grouping operations are not allowed in WHERE."); } whereClause.analyze(analyzer); if (whereClause.containsAggregate()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_INVALID_GROUP_FUNC_USE); } whereClause.checkReturnsBool("WHERE clause", false); Expr e = whereClause.findFirstOf(AnalyticExpr.class); if (e != null) { throw new AnalysisException( "WHERE clause must not contain analytic expressions: " + e.toSql()); } analyzer.registerConjuncts(whereClause, false, getTableRefIds()); } createSortInfo(analyzer); if (sortInfo != null && CollectionUtils.isNotEmpty(sortInfo.getOrderingExprs())) { if (groupingInfo != null) { List<Expr> orderingExprNotInSelect = sortInfo.getOrderingExprs().stream() .filter(item -> !resultExprs.contains(item)).collect(Collectors.toList()); groupingInfo.substituteGroupingFn(orderingExprNotInSelect, analyzer); } } analyzeAggregation(analyzer); createAnalyticInfo(analyzer); eliminatingSortNode(); if (evaluateOrderBy) { createSortTupleInfo(analyzer); } if (needToSql) { sqlString = toSql(); } if (analyzer.enableStarJoinReorder()) { LOG.debug("use old reorder logical in select stmt"); reorderTable(analyzer); } resolveInlineViewRefs(analyzer); if (analyzer.hasEmptySpjResultSet() && aggInfo == null) { analyzer.setHasEmptyResultSet(); } if (aggInfo != null) { if (LOG.isDebugEnabled()) { LOG.debug("post-analysis " + aggInfo.debugString()); } } if (hasOutFileClause()) { outFileClause.analyze(analyzer, resultExprs, colLabels); } } public List<TupleId> getTableRefIds() { List<TupleId> result = Lists.newArrayList(); for (TableRef ref : fromClause) { result.add(ref.getId()); } return result; } public List<TupleId> getTableRefIdsWithoutInlineView() { List<TupleId> result = Lists.newArrayList(); for (TableRef ref : fromClause) { if (ref instanceof InlineViewRef) { continue; } result.add(ref.getId()); } return result; } public boolean hasInlineView() { for (TableRef ref : fromClause) { if (ref instanceof InlineViewRef) { return true; } } return false; } @Override public List<TupleId> collectTupleIds() { List<TupleId> result = Lists.newArrayList(); resultExprs.forEach(expr -> expr.getIds(result, null)); result.addAll(getTableRefIds()); if (whereClause != null) { whereClause.getIds(result, null); } if (havingClauseAfterAnaylzed != null) { havingClauseAfterAnaylzed.getIds(result, null); } return result; } private void whereClauseRewrite() { if (whereClause instanceof IntLiteral) { if (((IntLiteral) whereClause).getLongValue() == 0) { whereClause = new BoolLiteral(false); } else { whereClause = new BoolLiteral(true); } } } /** * Generates and registers !empty() predicates to filter out empty collections directly * in the parent scan of collection table refs. This is a performance optimization to * avoid the expensive processing of empty collections inside a subplan that would * yield an empty result set. * <p> * For correctness purposes, the predicates are generated in cases where we can ensure * that they will be assigned only to the parent scan, and no other plan node. * <p> * The conditions are as follows: * - collection table ref is relative and non-correlated * - collection table ref represents the rhs of an inner/cross/semi join * - collection table ref's parent tuple is not outer joined * <p> * TODO: In some cases, it is possible to generate !empty() predicates for a correlated * table ref, but in general, that is not correct for non-trivial query blocks. * For example, if the block with the correlated ref has an aggregation then adding a * !empty() predicate would incorrectly discard rows from the final result set. * TODO: Evaluating !empty() predicates at non-scan nodes interacts poorly with our BE * projection of collection slots. For example, rows could incorrectly be filtered if * a !empty() predicate is assigned to a plan node that comes after the unnest of the * collection that also performs the projection. */ private void registerIsNotEmptyPredicates(Analyzer analyzer) throws AnalysisException { /* for (TableRef tblRef: fromClause_.getTableRefs()) { Preconditions.checkState(tblRef.isResolved()); if (!(tblRef instanceof CollectionTableRef)) continue; CollectionTableRef ref = (CollectionTableRef) tblRef; if (!ref.isRelative() || ref.isCorrelated()) continue; if (ref.getJoinOp().isOuterJoin() || ref.getJoinOp().isAntiJoin()) continue; if (analyzer.isOuterJoined(ref.getResolvedPath().getRootDesc().getId())) continue; IsNotEmptyPredicate isNotEmptyPred = new IsNotEmptyPredicate(ref.getCollectionExpr().clone()); isNotEmptyPred.analyze(analyzer); analyzer.registerOnClauseConjuncts( Lists.<Expr>newArrayList(isNotEmptyPred), ref); } */ } /** * Marks all unassigned join predicates as well as exprs in aggInfo and sortInfo. */ public void materializeRequiredSlots(Analyzer analyzer) throws AnalysisException { List<Expr> unassigned = analyzer.getUnassignedConjuncts(getTableRefIds(), true); List<Expr> unassignedJoinConjuncts = Lists.newArrayList(); for (Expr e : unassigned) { if (analyzer.evalAfterJoin(e)) { unassignedJoinConjuncts.add(e); } } List<Expr> baseTblJoinConjuncts = Expr.trySubstituteList(unassignedJoinConjuncts, baseTblSmap, analyzer, false); analyzer.materializeSlots(baseTblJoinConjuncts); List<Expr> markConjuncts = analyzer.getMarkConjuncts(); markConjuncts = Expr.trySubstituteList(markConjuncts, baseTblSmap, analyzer, false); analyzer.materializeSlots(markConjuncts); if (evaluateOrderBy) { sortInfo.materializeRequiredSlots(analyzer, baseTblSmap); } if (hasAnalyticInfo()) { ArrayList<TupleId> tids = Lists.newArrayList(); getMaterializedTupleIds(tids); List<Expr> conjuncts = analyzer.getUnassignedConjuncts(tids); analyzer.materializeSlots(conjuncts); analyticInfo.materializeRequiredSlots(analyzer, baseTblSmap); } if (aggInfo != null) { ArrayList<Expr> havingConjuncts = Lists.newArrayList(); if (havingPred != null) { havingConjuncts.add(havingPred); } havingConjuncts.addAll( analyzer.getUnassignedConjuncts(aggInfo.getResultTupleId().asList())); materializeSlots(analyzer, havingConjuncts); aggInfo.materializeRequiredSlots(analyzer, baseTblSmap); } for (TableRef tableRef : fromClause.getTableRefs()) { if (tableRef.lateralViewRefs != null) { for (LateralViewRef lateralViewRef : tableRef.lateralViewRefs) { lateralViewRef.materializeRequiredSlots(baseTblSmap, analyzer); } } } } protected void reorderTable(Analyzer analyzer) throws AnalysisException { List<Pair<TableRef, Long>> candidates = Lists.newArrayList(); ArrayList<TableRef> originOrderBackUp = Lists.newArrayList(fromClause.getTableRefs()); for (TableRef tblRef : fromClause) { if (tblRef.getJoinOp() != JoinOperator.INNER_JOIN || tblRef.hasJoinHints()) { break; } long rowCount = 0; if (tblRef.getTable().getType() == TableType.OLAP) { rowCount = ((OlapTable) (tblRef.getTable())).getRowCount(); LOG.debug("tableName={} rowCount={}", tblRef.getAlias(), rowCount); } candidates.add(Pair.of(tblRef, rowCount)); } int reorderTableCount = candidates.size(); if (reorderTableCount < originOrderBackUp.size()) { fromClause.clear(); fromClause.addAll(originOrderBackUp.subList(0, reorderTableCount)); } long last = 0; for (int i = candidates.size() - 1; i >= 0; --i) { Pair<TableRef, Long> candidate = candidates.get(i); if (candidate.first instanceof InlineViewRef) { candidate.second = last; } last = candidate.second + 1; } Collections.sort(candidates, (a, b) -> b.second.compareTo(a.second)); for (Pair<TableRef, Long> candidate : candidates) { if (reorderTable(analyzer, candidate.first)) { if (reorderTableCount < originOrderBackUp.size()) { fromClause.addAll(originOrderBackUp.subList(reorderTableCount, originOrderBackUp.size())); } return; } } fromClause.clear(); for (TableRef tableRef : originOrderBackUp) { fromClause.add(tableRef); } } protected boolean reorderTable(Analyzer analyzer, TableRef firstRef) throws AnalysisException { List<TableRef> tmpRefList = Lists.newArrayList(); Map<TupleId, TableRef> tableRefMap = Maps.newHashMap(); for (TableRef tblRef : fromClause) { tableRefMap.put(tblRef.getId(), tblRef); tmpRefList.add(tblRef); } fromClause.clear(); fromClause.add(firstRef); tableRefMap.remove(firstRef.getId()); Set<TupleId> validTupleId = Sets.newHashSet(); validTupleId.add(firstRef.getId()); int i = 0; while (i < fromClause.size()) { TableRef tblRef = fromClause.get(i); List<Expr> eqJoinPredicates = analyzer.getEqJoinConjuncts(tblRef.getId()); List<TupleId> tupleList = Lists.newArrayList(); Expr.getIds(eqJoinPredicates, tupleList, null); for (TupleId tid : tupleList) { if (validTupleId.contains(tid)) { continue; } TableRef candidateTableRef = tableRefMap.get(tid); if (candidateTableRef != null) { Preconditions.checkState(tid == candidateTableRef.getId()); List<Expr> candidateEqJoinPredicates = analyzer.getEqJoinConjunctsExcludeAuxPredicates(tid); for (Expr candidateEqJoinPredicate : candidateEqJoinPredicates) { List<TupleId> candidateTupleList = Lists.newArrayList(); Expr.getIds(Lists.newArrayList(candidateEqJoinPredicate), candidateTupleList, null); int count = candidateTupleList.size(); for (TupleId tupleId : candidateTupleList) { if (validTupleId.contains(tupleId) || tid.equals(tupleId)) { count--; } } if (count == 0) { fromClause.add(candidateTableRef); validTupleId.add(tid); tableRefMap.remove(tid); break; } } } } i++; } if (0 != tableRefMap.size()) { fromClause.clear(); fromClause.addAll(tmpRefList); return false; } return true; } /** * Populates baseTblSmap_ with our combined inline view smap and creates * baseTblResultExprs. */ protected void resolveInlineViewRefs(Analyzer analyzer) throws AnalysisException { for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { InlineViewRef inlineViewRef = (InlineViewRef) tblRef; baseTblSmap = ExprSubstitutionMap.combine(baseTblSmap, inlineViewRef.getBaseTblSmap()); } } baseTblResultExprs = Expr.trySubstituteList(resultExprs, baseTblSmap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("baseTblSmap_: " + baseTblSmap.debugString()); LOG.debug("resultExprs: " + Expr.debugString(resultExprs)); LOG.debug("baseTblResultExprs: " + Expr.debugString(baseTblResultExprs)); } } /** * Expand "*" select list item. */ private void expandStar(Analyzer analyzer) throws AnalysisException { if (fromClause.isEmpty()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_NO_TABLES_USED); } for (TableRef tableRef : fromClause) { if (analyzer.isSemiJoined(tableRef.getId())) { continue; } expandStar(new TableName(tableRef.getAliasAsName().getCtl(), tableRef.getAliasAsName().getDb(), tableRef.getAliasAsName().getTbl()), tableRef.getDesc()); if (tableRef.lateralViewRefs != null) { for (LateralViewRef lateralViewRef : tableRef.lateralViewRefs) { expandStar(lateralViewRef.getName(), lateralViewRef.getDesc()); } } } } /** * Expand "<tbl>.*" select list item. */ private void expandStar(Analyzer analyzer, TableName tblName) throws AnalysisException { Collection<TupleDescriptor> descs = analyzer.getDescriptor(tblName); if (descs == null || descs.isEmpty()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_UNKNOWN_TABLE, tblName.getTbl(), tblName.getDb()); } for (TupleDescriptor desc : descs) { expandStar(tblName, desc); } } /** * Expand "*" for a particular tuple descriptor by appending * refs for each column to selectListExprs. */ private void expandStar(TableName tblName, TupleDescriptor desc) { for (Column col : desc.getTable().getBaseSchema()) { resultExprs.add(new SlotRef(tblName, col.getName())); colLabels.add(col.getName()); } } private boolean isContainInBitmap(Expr expr) { List<Expr> inPredicates = Lists.newArrayList(); expr.collect(InPredicate.class, inPredicates); return inPredicates.stream().anyMatch(e -> e.getChild(1) instanceof Subquery && ((Subquery) e.getChild(1)).getStatement().getResultExprs().get(0).getType().isBitmapType()); } /** * Analyze aggregation-relevant components of the select block (Group By clause, * select list, Order By clause), * Create the AggregationInfo, including the agg output tuple, and transform all post-agg exprs * given AggregationInfo's smap. */ /** * Build smap count_distinct->multi_count_distinct sum_distinct->multi_count_distinct * assumes that select list and having clause have been analyzed. */ private ExprSubstitutionMap createSumOrCountMultiDistinctSMap( ArrayList<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { final List<FunctionCallExpr> distinctExprs = Lists.newArrayList(); for (FunctionCallExpr aggExpr : aggExprs) { if (aggExpr.isDistinct()) { distinctExprs.add(aggExpr); } } final ExprSubstitutionMap result = new ExprSubstitutionMap(); final boolean isUsingSetForDistinct = AggregateInfo.estimateIfUsingSetForDistinct(distinctExprs); if (!isUsingSetForDistinct) { return result; } for (FunctionCallExpr inputExpr : distinctExprs) { Expr replaceExpr = null; final String functionName = inputExpr.getFnName().getFunction(); if (functionName.equalsIgnoreCase(FunctionSet.COUNT)) { final List<Expr> countInputExpr = Lists.newArrayList(inputExpr.getChild(0).clone(null)); replaceExpr = new FunctionCallExpr("MULTI_DISTINCT_COUNT", new FunctionParams(inputExpr.isDistinct(), countInputExpr)); } else if (functionName.equalsIgnoreCase("SUM")) { final List<Expr> sumInputExprs = Lists.newArrayList(inputExpr.getChild(0).clone(null)); replaceExpr = new FunctionCallExpr("MULTI_DISTINCT_SUM", new FunctionParams(inputExpr.isDistinct(), sumInputExprs)); } else if (functionName.equalsIgnoreCase("AVG")) { final List<Expr> sumInputExprs = Lists.newArrayList(inputExpr.getChild(0).clone(null)); final List<Expr> countInputExpr = Lists.newArrayList(inputExpr.getChild(0).clone(null)); final FunctionCallExpr sumExpr = new FunctionCallExpr("MULTI_DISTINCT_SUM", new FunctionParams(inputExpr.isDistinct(), sumInputExprs)); final FunctionCallExpr countExpr = new FunctionCallExpr("MULTI_DISTINCT_COUNT", new FunctionParams(inputExpr.isDistinct(), countInputExpr)); replaceExpr = new ArithmeticExpr(ArithmeticExpr.Operator.DIVIDE, sumExpr, countExpr); } else { throw new AnalysisException(inputExpr.getFnName() + " can't support multi distinct."); } replaceExpr.analyze(analyzer); result.put(inputExpr, replaceExpr); } if (LOG.isDebugEnabled()) { LOG.debug("multi distinct smap: {}", result.debugString()); } return result; } /** * Create a map from COUNT([ALL]) -> zeroifnull(COUNT([ALL])) if * i) There is no GROUP-BY, and * ii) There are other distinct aggregates to be evaluated. * This transformation is necessary for COUNT to correctly return 0 for empty * input relations. */ private ExprSubstitutionMap createCountAllMap( List<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { ExprSubstitutionMap scalarCountAllMap = new ExprSubstitutionMap(); if (groupByClause != null && !groupByClause.isEmpty()) { return scalarCountAllMap; } com.google.common.base.Predicate<FunctionCallExpr> isNotDistinctPred = new com.google.common.base.Predicate<FunctionCallExpr>() { public boolean apply(FunctionCallExpr expr) { return !expr.isDistinct(); } }; if (Iterables.all(aggExprs, isNotDistinctPred)) { return scalarCountAllMap; } com.google.common.base.Predicate<FunctionCallExpr> isCountPred = new com.google.common.base.Predicate<FunctionCallExpr>() { public boolean apply(FunctionCallExpr expr) { return expr.getFnName().getFunction().equals(FunctionSet.COUNT); } }; Iterable<FunctionCallExpr> countAllAggs = Iterables.filter(aggExprs, Predicates.and(isCountPred, isNotDistinctPred)); for (FunctionCallExpr countAllAgg : countAllAggs) { ArrayList<Expr> zeroIfNullParam = Lists.newArrayList(countAllAgg.clone(), new IntLiteral(0, Type.BIGINT)); FunctionCallExpr zeroIfNull = new FunctionCallExpr("ifnull", zeroIfNullParam); zeroIfNull.analyze(analyzer); scalarCountAllMap.put(countAllAgg, zeroIfNull); } return scalarCountAllMap; } /** * Create aggInfo for the given grouping and agg exprs. */ private void createAggInfo( ArrayList<Expr> groupingExprs, ArrayList<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { for (int i = 0; i < aggExprs.size(); i++) { aggExprs.set(i, (FunctionCallExpr) rewriteQueryExprByMvColumnExpr(aggExprs.get(i), analyzer)); } if (selectList.isDistinct()) { Preconditions.checkState(groupingExprs.isEmpty()); Preconditions.checkState(aggExprs.isEmpty()); aggInfo = AggregateInfo.create(Expr.cloneList(resultExprs), null, null, analyzer); } else { if (CollectionUtils.isEmpty(groupingExprs) && CollectionUtils.isEmpty(aggExprs)) { return; } aggInfo = AggregateInfo.create(groupingExprs, aggExprs, null, analyzer); } } /** * If the select list contains AnalyticExprs, create AnalyticInfo and substitute * AnalyticExprs using the AnalyticInfo's smap. */ private void createAnalyticInfo(Analyzer analyzer) throws AnalysisException { ArrayList<Expr> analyticExprs = Lists.newArrayList(); TreeNode.collect(resultExprs, AnalyticExpr.class, analyticExprs); if (sortInfo != null) { TreeNode.collect(sortInfo.getOrderingExprs(), AnalyticExpr.class, analyticExprs); } if (analyticExprs.isEmpty()) { return; } ExprSubstitutionMap rewriteSmap = new ExprSubstitutionMap(); for (Expr expr : analyticExprs) { AnalyticExpr toRewrite = (AnalyticExpr) expr; Expr newExpr = AnalyticExpr.rewrite(toRewrite); if (newExpr != null) { newExpr.analyze(analyzer); if (!rewriteSmap.containsMappingFor(toRewrite)) { rewriteSmap.put(toRewrite, newExpr); } } } if (rewriteSmap.size() > 0) { ArrayList<Expr> updatedAnalyticExprs = Expr.substituteList(analyticExprs, rewriteSmap, analyzer, false); analyticExprs.clear(); TreeNode.collect(updatedAnalyticExprs, AnalyticExpr.class, analyticExprs); } analyticInfo = AnalyticInfo.create(analyticExprs, analyzer); ExprSubstitutionMap smap = analyticInfo.getSmap(); if (rewriteSmap.size() > 0) { smap = ExprSubstitutionMap.compose( rewriteSmap, analyticInfo.getSmap(), analyzer); } resultExprs = Expr.substituteList(resultExprs, smap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("post-analytic selectListExprs: " + Expr.debugString(resultExprs)); } if (sortInfo != null) { sortInfo.substituteOrderingExprs(smap, analyzer); if (LOG.isDebugEnabled()) { LOG.debug("post-analytic orderingExprs: " + Expr.debugString(sortInfo.getOrderingExprs())); } } } @Override public void rewriteExprs(ExprRewriter rewriter) throws AnalysisException { Preconditions.checkState(isAnalyzed()); rewriteSelectList(rewriter); for (TableRef ref : fromClause) { ref.rewriteExprs(rewriter, analyzer); } List<Subquery> subqueryExprs = Lists.newArrayList(); if (whereClause != null) { whereClause = rewriter.rewrite(whereClause, analyzer, ExprRewriter.ClauseType.WHERE_CLAUSE); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { havingClause = rewriter.rewrite(havingClause, analyzer); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { subquery.getStatement().rewriteExprs(rewriter); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { rewriter.rewriteList(groupingExprs, analyzer); } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { try { for (Expr expr : oriGroupingExprs) { if (!(expr instanceof SlotRef)) { expr.analyze(analyzer); } } } catch (AnalysisException ex) { } rewriter.rewriteList(oriGroupingExprs, analyzer); for (Expr expr : oriGroupingExprs) { if (!(expr instanceof SlotRef)) { expr.reset(); } } } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElements) { try { if (!(orderByElem.getExpr() instanceof SlotRef)) { orderByElem.getExpr().analyze(analyzer); } } catch (AnalysisException ex) { } orderByElem.setExpr(rewriter.rewrite(orderByElem.getExpr(), analyzer)); if (!(orderByElem.getExpr() instanceof SlotRef)) { orderByElem.getExpr().reset(); } } } } @Override public void collectExprs(Map<String, Expr> exprMap) { List<Subquery> subqueryExprs = Lists.newArrayList(); for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { continue; } registerExprId(item.getExpr()); exprMap.put(item.getExpr().getId().toString(), item.getExpr()); if (item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { item.getExpr().collect(Subquery.class, subqueryExprs); } } for (TableRef ref : fromClause) { Preconditions.checkState(ref.isAnalyzed); if (ref.onClause != null) { registerExprId(ref.onClause); exprMap.put(ref.onClause.getId().toString(), ref.onClause); } if (ref instanceof InlineViewRef) { ((InlineViewRef) ref).getViewStmt().collectExprs(exprMap); } } if (whereClause != null) { registerExprId(whereClause); exprMap.put(whereClause.getId().toString(), whereClause); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { registerExprId(havingClauseAfterAnaylzed); exprMap.put(havingClauseAfterAnaylzed.getId().toString(), havingClauseAfterAnaylzed); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { registerExprId(subquery); subquery.getStatement().collectExprs(exprMap); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { for (Expr expr : groupingExprs) { if (containAlias(expr)) { continue; } registerExprId(expr); exprMap.put(expr.getId().toString(), expr); } } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { for (Expr expr : oriGroupingExprs) { /* * Suppose there is a query statement: * * ``` * select * i_item_sk as b * from item * group by b * order by b desc * ``` * * where `b` is an alias for `i_item_sk`. * * When analyze is done, it becomes * * ``` * SELECT * `i_item_sk` * FROM `item` * GROUP BY `b` * ORDER BY `b` DESC * ``` * Aliases information of groupBy and orderBy clauses is recorded in `QueryStmt.aliasSMap`. * The select clause has its own alias info in `SelectListItem.alias`. * * Aliases expr in the `group by` and `order by` clauses are not analyzed, * i.e. `Expr.isAnalyzed=false`. Subsequent constant folding will analyze the unanalyzed Expr before * collecting the constant expressions, preventing the `INVALID_TYPE` expr from being sent to BE. * * But when analyzing the alias, the meta information corresponding to the slot cannot be found * in the catalog, an error will be reported. * * So the alias needs to be removed here. * */ if (containAlias(expr)) { continue; } registerExprId(expr); exprMap.put(expr.getId().toString(), expr); } } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElementsAfterAnalyzed) { if (containAlias(orderByElem.getExpr())) { continue; } registerExprId(orderByElem.getExpr()); exprMap.put(orderByElem.getExpr().getId().toString(), orderByElem.getExpr()); } } } @Override public void putBackExprs(Map<String, Expr> rewrittenExprMap) { List<Subquery> subqueryExprs = Lists.newArrayList(); for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { continue; } item.setExpr(rewrittenExprMap.get(item.getExpr().getId().toString())); if (item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { item.getExpr().collect(Subquery.class, subqueryExprs); } } for (TableRef ref : fromClause) { if (ref.onClause != null) { ref.setOnClause(rewrittenExprMap.get(ref.onClause.getId().toString())); } if (ref instanceof InlineViewRef) { ((InlineViewRef) ref).getViewStmt().putBackExprs(rewrittenExprMap); } } if (whereClause != null) { setWhereClause(rewrittenExprMap.get(whereClause.getId().toString())); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { havingClause = rewrittenExprMap.get(havingClauseAfterAnaylzed.getId().toString()); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { subquery.getStatement().putBackExprs(rewrittenExprMap); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { ArrayList<Expr> newGroupingExpr = new ArrayList<>(); for (Expr expr : groupingExprs) { if (expr.getId() == null) { newGroupingExpr.add(expr); } else { newGroupingExpr.add(rewrittenExprMap.get(expr.getId().toString())); } } groupByClause.setGroupingExpr(newGroupingExpr); } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { ArrayList<Expr> newOriGroupingExprs = new ArrayList<>(); for (Expr expr : oriGroupingExprs) { if (expr.getId() == null) { newOriGroupingExprs.add(expr); } else { newOriGroupingExprs.add(rewrittenExprMap.get(expr.getId().toString())); } } groupByClause.setOriGroupingExprs(newOriGroupingExprs); } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElementsAfterAnalyzed) { Expr expr = orderByElem.getExpr(); if (expr.getId() == null) { orderByElem.setExpr(expr); } else { orderByElem.setExpr(rewrittenExprMap.get(expr.getId().toString())); } } orderByElements = (ArrayList<OrderByElement>) orderByElementsAfterAnalyzed; } } private void rewriteSelectList(ExprRewriter rewriter) throws AnalysisException { for (SelectListItem item : selectList.getItems()) { if (item.getExpr() instanceof CaseExpr && item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { rewriteSubquery(item.getExpr(), analyzer); } } selectList.rewriteExprs(rewriter, analyzer); } /** equal subquery in case when to an inline view * subquery in case when statement like * * SELECT CASE * WHEN ( * SELECT COUNT(*) / 2 * FROM t * ) > k4 THEN ( * SELECT AVG(k4) * FROM t * ) * ELSE ( * SELECT SUM(k4) * FROM t * ) * END AS kk4 * FROM t; * this statement will be equal to * * SELECT CASE * WHEN t1.a > k4 THEN t2.a * ELSE t3.a * END AS kk4 * FROM t, ( * SELECT COUNT(*) / 2 AS a * FROM t * ) t1, ( * SELECT AVG(k4) AS a * FROM t * ) t2, ( * SELECT SUM(k4) AS a * FROM t * ) t3; */ private Expr rewriteSubquery(Expr expr, Analyzer analyzer) throws AnalysisException { if (expr instanceof Subquery) { if (!(((Subquery) expr).getStatement() instanceof SelectStmt)) { throw new AnalysisException("Only support select subquery in case-when clause."); } if (expr.isCorrelatedPredicate(getTableRefIds())) { throw new AnalysisException("The correlated subquery in case-when clause is not supported"); } SelectStmt subquery = (SelectStmt) ((Subquery) expr).getStatement(); if (subquery.resultExprs.size() != 1 || !subquery.returnsSingleRow()) { throw new AnalysisException("Subquery in case-when must return scala type"); } subquery.reset(); subquery.setAssertNumRowsElement(1, AssertNumRowsElement.Assertion.EQ); String alias = getTableAliasGenerator().getNextAlias(); String colAlias = getColumnAliasGenerator().getNextAlias(); InlineViewRef inlineViewRef = new InlineViewRef(alias, subquery, Arrays.asList(colAlias)); try { inlineViewRef.analyze(analyzer); } catch (UserException e) { throw new AnalysisException(e.getMessage()); } fromClause.add(inlineViewRef); expr = new SlotRef(inlineViewRef.getAliasAsName(), colAlias); } else if (CollectionUtils.isNotEmpty(expr.getChildren())) { for (int i = 0; i < expr.getChildren().size(); ++i) { expr.setChild(i, rewriteSubquery(expr.getChild(i), analyzer)); } } return expr; } public void eliminatingSortNode() { if (ConnectContext.get() == null || !ConnectContext.get().getSessionVariable().enableEliminateSortNode) { return; } if (!evaluateOrderBy() || getSortInfo() == null || getWhereClause() == null) { return; } List<SlotRef> sortSlots = new ArrayList<>(); for (Expr expr : getSortInfo().getOrderingExprs()) { SlotRef source = expr.getSrcSlotRef(); if (source == null) { return; } sortSlots.add(source); } if (sortSlots.isEmpty()) { return; } if (checkSortNodeEliminable(getWhereClause(), sortSlots) && sortSlots.isEmpty()) { evaluateOrderBy = false; } } private boolean checkSortNodeEliminable(Expr expr, List<SlotRef> sortSlotRefs) { if (expr instanceof CompoundPredicate) { if (((CompoundPredicate) expr).getOp() != Operator.AND) { return false; } } if (expr instanceof BinaryPredicate) { Reference<SlotRef> slotRefRef = new Reference<>(); BinaryPredicate binaryPredicate = (BinaryPredicate) expr; if (binaryPredicate.isSingleColumnPredicate(slotRefRef, null)) { if (binaryPredicate.getOp() != BinaryPredicate.Operator.EQ) { return true; } sortSlotRefs.remove(slotRefRef.getRef()); } } else if (expr instanceof InPredicate) { if (((InPredicate) expr).isNotIn()) { return true; } if (expr.getChildren().size() != 2) { return true; } if (!expr.getChild(1).isConstant()) { return true; } sortSlotRefs.remove(expr.getChild(0).unwrapSlotRef()); } for (Expr child : expr.getChildren()) { if (!checkSortNodeEliminable(child, sortSlotRefs)) { return false; } } return true; } @Override public String toSql() { if (sqlString != null) { return sqlString; } StringBuilder strBuilder = new StringBuilder(); if (withClause != null) { strBuilder.append(withClause.toSql()); strBuilder.append(" "); } strBuilder.append("SELECT "); if (selectList.isDistinct()) { strBuilder.append("DISTINCT "); } ConnectContext ctx = ConnectContext.get(); if (ctx == null || ctx.getSessionVariable().internalSession || toSQLWithSelectList) { for (int i = 0; i < selectList.getItems().size(); i++) { strBuilder.append(selectList.getItems().get(i).toSql()); strBuilder.append((i + 1 != selectList.getItems().size()) ? ", " : ""); } } else { for (int i = 0; i < resultExprs.size(); ++i) { if (i != 0) { strBuilder.append(", "); } if (needToSql) { strBuilder.append(originalExpr.get(i).toSql()); } else { strBuilder.append(resultExprs.get(i).toSql()); } strBuilder.append(" AS ").append(SqlUtils.getIdentSql(colLabels.get(i))); } } if (!fromClause.isEmpty()) { strBuilder.append(fromClause.toSql()); } if (whereClause != null) { strBuilder.append(" WHERE "); strBuilder.append(whereClause.toSql()); } if (groupByClause != null) { strBuilder.append(" GROUP BY "); strBuilder.append(groupByClause.toSql()); } if (havingClause != null) { strBuilder.append(" HAVING "); strBuilder.append(havingClause.toSql()); } if (orderByElements != null) { strBuilder.append(" ORDER BY "); for (int i = 0; i < orderByElements.size(); ++i) { strBuilder.append(orderByElements.get(i).getExpr().toSql()); if (sortInfo != null) { strBuilder.append((sortInfo.getIsAscOrder().get(i)) ? " ASC" : " DESC"); } strBuilder.append((i + 1 != orderByElements.size()) ? ", " : ""); } } if (hasLimitClause()) { strBuilder.append(limitElement.toSql()); } if (hasOutFileClause()) { strBuilder.append(outFileClause.toSql()); } return strBuilder.toString(); } @Override public String toDigest() { StringBuilder strBuilder = new StringBuilder(); if (withClause != null) { strBuilder.append(withClause.toDigest()); strBuilder.append(" "); } strBuilder.append("SELECT "); if (selectList.isDistinct()) { strBuilder.append("DISTINCT "); } if (originalExpr == null) { originalExpr = Expr.cloneList(resultExprs); } if (resultExprs.isEmpty()) { for (int i = 0; i < selectList.getItems().size(); ++i) { if (i != 0) { strBuilder.append(", "); } strBuilder.append(selectList.getItems().get(i).toDigest()); } } else { for (int i = 0; i < originalExpr.size(); ++i) { if (i != 0) { strBuilder.append(", "); } strBuilder.append(originalExpr.get(i).toDigest()); strBuilder.append(" AS ").append(SqlUtils.getIdentSql(colLabels.get(i))); } } if (!fromClause.isEmpty()) { strBuilder.append(fromClause.toDigest()); } if (whereClause != null) { strBuilder.append(" WHERE "); strBuilder.append(whereClause.toDigest()); } if (groupByClause != null) { strBuilder.append(" GROUP BY "); strBuilder.append(groupByClause.toSql()); } if (havingClause != null) { strBuilder.append(" HAVING "); strBuilder.append(havingClause.toDigest()); } if (orderByElements != null) { strBuilder.append(" ORDER BY "); for (int i = 0; i < orderByElements.size(); ++i) { strBuilder.append(orderByElements.get(i).getExpr().toDigest()); if (sortInfo != null) { strBuilder.append((sortInfo.getIsAscOrder().get(i)) ? " ASC" : " DESC"); } strBuilder.append((i + 1 != orderByElements.size()) ? ", " : ""); } } if (hasLimitClause()) { strBuilder.append(limitElement.toDigest()); } if (hasOutFileClause()) { strBuilder.append(outFileClause.toDigest()); } return strBuilder.toString(); } /** * If the select statement has a sort/top that is evaluated, then the sort tuple * is materialized. Else, if there is aggregation then the aggregate tuple id is * materialized. Otherwise, all referenced tables are materialized as long as they are * not semi-joined. If there are analytics and no sort, then the returned tuple * ids also include the logical analytic output tuple. */ @Override public void getMaterializedTupleIds(ArrayList<TupleId> tupleIdList) { if (evaluateOrderBy) { tupleIdList.add(sortInfo.getSortTupleDescriptor().getId()); } else if (aggInfo != null) { if (aggInfo.isDistinctAgg()) { tupleIdList.add(aggInfo.getSecondPhaseDistinctAggInfo().getOutputTupleId()); } else { tupleIdList.add(aggInfo.getOutputTupleId()); } } else { for (TableRef tblRef : fromClause) { tupleIdList.addAll(tblRef.getMaterializedTupleIds()); } } if (hasAnalyticInfo() && !isEvaluateOrderBy()) { tupleIdList.add(analyticInfo.getOutputTupleId()); } } @Override public void substituteSelectList(Analyzer analyzer, List<String> newColLabels) throws AnalysisException, UserException { if (hasWithClause()) { withClause.analyze(analyzer); } TableRef leftTblRef = null; for (int i = 0; i < fromClause.size(); ++i) { TableRef tblRef = fromClause.get(i); tblRef = analyzer.resolveTableRef(tblRef); Preconditions.checkNotNull(tblRef); fromClause.set(i, tblRef); tblRef.setLeftTblRef(leftTblRef); tblRef.analyze(analyzer); leftTblRef = tblRef; } for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { TableName tblName = item.getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } } else { if (item.getExpr() instanceof AnalyticExpr) { item.getExpr().analyze(analyzer); } if (item.getAlias() != null) { SlotRef aliasRef = new SlotRef(null, item.getAlias()); SlotRef newAliasRef = new SlotRef(null, newColLabels.get(resultExprs.size())); newAliasRef.analysisDone(); aliasSMap.put(aliasRef, newAliasRef); } resultExprs.add(item.getExpr()); } } if (groupByClause != null) { substituteOrdinalsAliases(groupByClause.getGroupingExprs(), "GROUP BY", analyzer, false); } if (havingClause != null) { havingClause = havingClause.clone(aliasSMap); } if (orderByElements != null) { for (int i = 0; i < orderByElements.size(); ++i) { orderByElements = OrderByElement.substitute(orderByElements, aliasSMap, analyzer); } } colLabels.clear(); colLabels.addAll(newColLabels); } public boolean hasWhereClause() { return whereClause != null; } public boolean hasAggInfo() { return aggInfo != null; } public boolean hasGroupByClause() { return groupByClause != null; } /** * Check if the stmt returns a single row. This can happen * in the following cases: * 1. select stmt with a 'limit 1' clause * 2. select stmt with an aggregate function and no group by. * 3. select stmt with no from clause. * <p> * This function may produce false negatives because the cardinality of the * result set also depends on the data a stmt is processing. */ public boolean returnsSingleRow() { if (hasLimitClause() && getLimit() == 1) { return true; } if (fromClause.isEmpty()) { return true; } if (hasAggInfo() && !hasGroupByClause() && !selectList.isDistinct()) { return true; } return false; } @Override public void collectTableRefs(List<TableRef> tblRefs) { for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { InlineViewRef inlineViewRef = (InlineViewRef) tblRef; inlineViewRef.getViewStmt().collectTableRefs(tblRefs); } else { tblRefs.add(tblRef); } } } private boolean checkGroupingFn(Expr expr) { if (expr instanceof GroupingFunctionCallExpr) { return true; } else if (expr.getChildren() != null) { for (Expr child : expr.getChildren()) { if (checkGroupingFn(child)) { return true; } } } return false; } private void getAggregateFnExpr(Expr expr, ArrayList<Expr> aggFnExprList) { if (expr instanceof FunctionCallExpr && expr.fn instanceof AggregateFunction) { aggFnExprList.add(expr); } else if (expr.getChildren() != null) { for (Expr child : expr.getChildren()) { getAggregateFnExpr(child, aggFnExprList); } } } @Override public int hashCode() { return id.hashCode(); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof SelectStmt)) { return false; } return this.id.equals(((SelectStmt) obj).id); } }
class SelectStmt extends QueryStmt { private static final Logger LOG = LogManager.getLogger(SelectStmt.class); private UUID id = UUID.randomUUID(); protected SelectList selectList; private final ArrayList<String> colLabels; protected final FromClause fromClause; protected GroupByClause groupByClause; private List<Expr> originalExpr; private Expr havingClause; protected Expr whereClause; private Expr havingPred; private AggregateInfo aggInfo; private AnalyticInfo analyticInfo; private ExprSubstitutionMap baseTblSmap = new ExprSubstitutionMap(); private ValueList valueList; private GroupingInfo groupingInfo; private Expr havingClauseAfterAnaylzed; protected String sqlString; private TableAliasGenerator tableAliasGenerator = null; private SelectList originSelectList; public SelectStmt(ValueList valueList, ArrayList<OrderByElement> orderByElement, LimitElement limitElement) { super(orderByElement, limitElement); this.valueList = valueList; this.selectList = new SelectList(); this.fromClause = new FromClause(); this.colLabels = Lists.newArrayList(); } SelectStmt( SelectList selectList, FromClause fromClause, Expr wherePredicate, GroupByClause groupByClause, Expr havingPredicate, ArrayList<OrderByElement> orderByElements, LimitElement limitElement) { super(orderByElements, limitElement); this.selectList = selectList; this.originSelectList = selectList.clone(); if (fromClause == null) { this.fromClause = new FromClause(); } else { this.fromClause = fromClause; } this.whereClause = wherePredicate; this.groupByClause = groupByClause; this.havingClause = havingPredicate; this.colLabels = Lists.newArrayList(); this.havingPred = null; this.aggInfo = null; this.sortInfo = null; this.groupingInfo = null; } protected SelectStmt(SelectStmt other) { super(other); this.id = other.id; selectList = other.selectList.clone(); fromClause = other.fromClause.clone(); whereClause = (other.whereClause != null) ? other.whereClause.clone() : null; groupByClause = (other.groupByClause != null) ? other.groupByClause.clone() : null; havingClause = (other.havingClause != null) ? other.havingClause.clone() : null; havingClauseAfterAnaylzed = other.havingClauseAfterAnaylzed != null ? other.havingClauseAfterAnaylzed.clone() : null; colLabels = Lists.newArrayList(other.colLabels); aggInfo = (other.aggInfo != null) ? other.aggInfo.clone() : null; analyticInfo = (other.analyticInfo != null) ? other.analyticInfo.clone() : null; sqlString = (other.sqlString != null) ? other.sqlString : null; baseTblSmap = other.baseTblSmap.clone(); groupingInfo = null; } @Override public void reset() { super.reset(); selectList.reset(); colLabels.clear(); fromClause.reset(); if (whereClause != null) { whereClause.reset(); } if (groupByClause != null) { groupByClause.reset(); } if (havingClause != null) { havingClause.reset(); } havingClauseAfterAnaylzed = null; havingPred = null; aggInfo = null; analyticInfo = null; baseTblSmap.clear(); groupingInfo = null; } public List<Expr> getAllExprs() { List<Expr> exprs = new ArrayList<Expr>(); if (originSelectList != null) { exprs.addAll(originSelectList.getExprs()); } if (havingClause != null) { exprs.add(havingClause); } if (havingPred != null) { exprs.add(havingPred); } if (havingClauseAfterAnaylzed != null) { exprs.add(havingClauseAfterAnaylzed); } return exprs; } public boolean haveStar() { for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { return true; } } return false; } @Override public void resetSelectList() { if (originSelectList != null) { selectList = originSelectList; } } @Override public QueryStmt clone() { return new SelectStmt(this); } public UUID getId() { return id; } /** * @return the original select list items from the query */ public SelectList getSelectList() { return selectList; } public void setSelectList(SelectList selectList) { this.selectList = selectList; } public ValueList getValueList() { return valueList; } /** * @return the HAVING clause post-analysis and with aliases resolved */ public Expr getHavingPred() { return havingPred; } public Expr getHavingClauseAfterAnaylzed() { return havingClauseAfterAnaylzed; } public List<TableRef> getTableRefs() { return fromClause.getTableRefs(); } public Expr getWhereClause() { return whereClause; } public void setWhereClause(Expr whereClause) { this.whereClause = whereClause; } public AggregateInfo getAggInfo() { return aggInfo; } public GroupingInfo getGroupingInfo() { return groupingInfo; } public GroupByClause getGroupByClause() { return groupByClause; } public AnalyticInfo getAnalyticInfo() { return analyticInfo; } public boolean hasAnalyticInfo() { return analyticInfo != null; } public boolean hasHavingClause() { return havingClause != null; } public void removeHavingClause() { havingClause = null; } @Override public SortInfo getSortInfo() { return sortInfo; } @Override public ArrayList<String> getColLabels() { return colLabels; } public ExprSubstitutionMap getBaseTblSmap() { return baseTblSmap; } @Override public void getTables(Analyzer analyzer, boolean expandView, Map<Long, TableIf> tableMap, Set<String> parentViewNameSet) throws AnalysisException { getWithClauseTables(analyzer, expandView, tableMap, parentViewNameSet); for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { QueryStmt inlineStmt = ((InlineViewRef) tblRef).getViewStmt(); inlineStmt.getTables(analyzer, expandView, tableMap, parentViewNameSet); } else if (tblRef instanceof TableValuedFunctionRef) { TableValuedFunctionRef tblFuncRef = (TableValuedFunctionRef) tblRef; tableMap.put(tblFuncRef.getTableFunction().getTable().getId(), tblFuncRef.getTableFunction().getTable()); } else { String dbName = tblRef.getName().getDb(); String tableName = tblRef.getName().getTbl(); if (Strings.isNullOrEmpty(dbName)) { dbName = analyzer.getDefaultDb(); } else { dbName = ClusterNamespace.getFullName(analyzer.getClusterName(), tblRef.getName().getDb()); } if (isViewTableRef(tblRef.getName().toString(), parentViewNameSet)) { continue; } tblRef.getName().analyze(analyzer); DatabaseIf db = analyzer.getEnv().getCatalogMgr() .getCatalogOrAnalysisException(tblRef.getName().getCtl()).getDbOrAnalysisException(dbName); TableIf table = db.getTableOrAnalysisException(tableName); if (expandView && (table instanceof View)) { View view = (View) table; view.getQueryStmt().getTables(analyzer, expandView, tableMap, parentViewNameSet); } else { if (!Env.getCurrentEnv().getAuth() .checkTblPriv(ConnectContext.get(), tblRef.getName(), PrivPredicate.SELECT)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SELECT", ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(), dbName + ": " + tableName); } tableMap.put(table.getId(), table); } } } } @Override public void getTableRefs(Analyzer analyzer, List<TableRef> tblRefs, Set<String> parentViewNameSet) { getWithClauseTableRefs(analyzer, tblRefs, parentViewNameSet); for (TableRef tblRef : fromClause) { try { TableRef tmpTblRef = analyzer.resolveTableRef(tblRef); if (tmpTblRef instanceof InlineViewRef) { QueryStmt inlineStmt = ((InlineViewRef) tmpTblRef).getViewStmt(); inlineStmt.getTableRefs(analyzer, tblRefs, parentViewNameSet); } else { if (isViewTableRef(tmpTblRef.getName().toString(), parentViewNameSet)) { continue; } tblRefs.add(tmpTblRef); } } catch (AnalysisException e) { } } } private boolean isViewTableRef(String tblName, Set<String> parentViewNameSet) { if (parentViewNameSet.contains(tblName)) { return true; } if (withClause != null) { List<View> views = withClause.getViews(); for (View view : views) { if (view.getName().equals(tblName)) { return true; } } } return false; } private ColumnAliasGenerator columnAliasGenerator = null; public ColumnAliasGenerator getColumnAliasGenerator() { if (columnAliasGenerator == null) { columnAliasGenerator = new ColumnAliasGenerator(colLabels, null); } return columnAliasGenerator; } public TableAliasGenerator getTableAliasGenerator() { if (tableAliasGenerator == null) { tableAliasGenerator = new TableAliasGenerator(analyzer, null); } return tableAliasGenerator; } public void setTableAliasGenerator(TableAliasGenerator tableAliasGenerator) { this.tableAliasGenerator = tableAliasGenerator; } public void analyze(Analyzer analyzer) throws UserException { if (isAnalyzed()) { return; } super.analyze(analyzer); fromClause.setNeedToSql(needToSql); fromClause.analyze(analyzer); if (!analyzer.isWithClause()) { registerIsNotEmptyPredicates(analyzer); } if (selectList.isExcept()) { List<SelectListItem> items = selectList.getItems(); TableName tblName = items.get(0).getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } ArrayList<String> exceptCols = new ArrayList<>(); for (SelectListItem item : items) { Expr expr = item.getExpr(); if (!(item.getExpr() instanceof SlotRef)) { throw new AnalysisException("`SELECT * EXCEPT` only supports column name."); } exceptCols.add(expr.toColumnLabel()); } resultExprs.removeIf(expr -> exceptCols.contains(expr.toColumnLabel())); colLabels.removeIf(exceptCols::contains); } else { for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { TableName tblName = item.getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } } else { item.getExpr().analyze(analyzer); if (!(item.getExpr() instanceof CaseExpr) && item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { throw new AnalysisException("Subquery is not supported in the select list."); } Expr expr = rewriteQueryExprByMvColumnExpr(item.getExpr(), analyzer); resultExprs.add(expr); SlotRef aliasRef = new SlotRef(null, item.toColumnLabel()); Expr existingAliasExpr = aliasSMap.get(aliasRef); if (existingAliasExpr != null && !existingAliasExpr.equals(item.getExpr())) { ambiguousAliasList.add(aliasRef); } aliasSMap.put(aliasRef, item.getExpr().clone()); colLabels.add(item.toColumnLabel()); } } } if (groupByClause != null && groupByClause.isGroupByExtension()) { ArrayList<Expr> aggFnExprList = new ArrayList<>(); for (SelectListItem item : selectList.getItems()) { aggFnExprList.clear(); getAggregateFnExpr(item.getExpr(), aggFnExprList); for (Expr aggFnExpr : aggFnExprList) { for (Expr expr : groupByClause.getGroupingExprs()) { if (aggFnExpr.contains(expr)) { throw new AnalysisException("column: " + expr.toSql() + " cannot both in select " + "list and aggregate functions when using GROUPING SETS/CUBE/ROLLUP, " + "please use union instead."); } } } } groupingInfo = new GroupingInfo(analyzer, groupByClause); groupingInfo.substituteGroupingFn(resultExprs, analyzer); } else { for (Expr expr : resultExprs) { if (checkGroupingFn(expr)) { throw new AnalysisException( "cannot use GROUPING functions without [grouping sets|rollup|cube] " + "clause or grouping sets only have one element."); } } } if (valueList != null) { if (!fromInsert) { valueList.analyzeForSelect(analyzer); } for (Expr expr : valueList.getFirstRow()) { if (expr instanceof DefaultValueExpr) { resultExprs.add(new IntLiteral(1)); } else { resultExprs.add(expr); } colLabels.add(expr.toColumnLabel()); } } if (needToSql) { originalExpr = Expr.cloneList(resultExprs); } Expr.analyze(resultExprs, analyzer); if (TreeNode.contains(resultExprs, AnalyticExpr.class)) { if (fromClause.isEmpty()) { throw new AnalysisException("Analytic expressions require FROM clause."); } if (selectList.isDistinct()) { throw new AnalysisException( "cannot combine SELECT DISTINCT with analytic functions"); } } whereClauseRewrite(); if (whereClause != null) { if (checkGroupingFn(whereClause)) { throw new AnalysisException("grouping operations are not allowed in WHERE."); } whereClause.analyze(analyzer); if (whereClause.containsAggregate()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_INVALID_GROUP_FUNC_USE); } whereClause.checkReturnsBool("WHERE clause", false); Expr e = whereClause.findFirstOf(AnalyticExpr.class); if (e != null) { throw new AnalysisException( "WHERE clause must not contain analytic expressions: " + e.toSql()); } analyzer.registerConjuncts(whereClause, false, getTableRefIds()); } createSortInfo(analyzer); if (sortInfo != null && CollectionUtils.isNotEmpty(sortInfo.getOrderingExprs())) { if (groupingInfo != null) { List<Expr> orderingExprNotInSelect = sortInfo.getOrderingExprs().stream() .filter(item -> !resultExprs.contains(item)).collect(Collectors.toList()); groupingInfo.substituteGroupingFn(orderingExprNotInSelect, analyzer); } } analyzeAggregation(analyzer); createAnalyticInfo(analyzer); eliminatingSortNode(); if (evaluateOrderBy) { createSortTupleInfo(analyzer); } if (needToSql) { sqlString = toSql(); } if (analyzer.enableStarJoinReorder()) { LOG.debug("use old reorder logical in select stmt"); reorderTable(analyzer); } resolveInlineViewRefs(analyzer); if (analyzer.hasEmptySpjResultSet() && aggInfo == null) { analyzer.setHasEmptyResultSet(); } if (aggInfo != null) { if (LOG.isDebugEnabled()) { LOG.debug("post-analysis " + aggInfo.debugString()); } } if (hasOutFileClause()) { outFileClause.analyze(analyzer, resultExprs, colLabels); } } public List<TupleId> getTableRefIds() { List<TupleId> result = Lists.newArrayList(); for (TableRef ref : fromClause) { result.add(ref.getId()); } return result; } public List<TupleId> getTableRefIdsWithoutInlineView() { List<TupleId> result = Lists.newArrayList(); for (TableRef ref : fromClause) { if (ref instanceof InlineViewRef) { continue; } result.add(ref.getId()); } return result; } public boolean hasInlineView() { for (TableRef ref : fromClause) { if (ref instanceof InlineViewRef) { return true; } } return false; } @Override public List<TupleId> collectTupleIds() { List<TupleId> result = Lists.newArrayList(); resultExprs.forEach(expr -> expr.getIds(result, null)); result.addAll(getTableRefIds()); if (whereClause != null) { whereClause.getIds(result, null); } if (havingClauseAfterAnaylzed != null) { havingClauseAfterAnaylzed.getIds(result, null); } return result; } private void whereClauseRewrite() { if (whereClause instanceof IntLiteral) { if (((IntLiteral) whereClause).getLongValue() == 0) { whereClause = new BoolLiteral(false); } else { whereClause = new BoolLiteral(true); } } } /** * Generates and registers !empty() predicates to filter out empty collections directly * in the parent scan of collection table refs. This is a performance optimization to * avoid the expensive processing of empty collections inside a subplan that would * yield an empty result set. * <p> * For correctness purposes, the predicates are generated in cases where we can ensure * that they will be assigned only to the parent scan, and no other plan node. * <p> * The conditions are as follows: * - collection table ref is relative and non-correlated * - collection table ref represents the rhs of an inner/cross/semi join * - collection table ref's parent tuple is not outer joined * <p> * TODO: In some cases, it is possible to generate !empty() predicates for a correlated * table ref, but in general, that is not correct for non-trivial query blocks. * For example, if the block with the correlated ref has an aggregation then adding a * !empty() predicate would incorrectly discard rows from the final result set. * TODO: Evaluating !empty() predicates at non-scan nodes interacts poorly with our BE * projection of collection slots. For example, rows could incorrectly be filtered if * a !empty() predicate is assigned to a plan node that comes after the unnest of the * collection that also performs the projection. */ private void registerIsNotEmptyPredicates(Analyzer analyzer) throws AnalysisException { /* for (TableRef tblRef: fromClause_.getTableRefs()) { Preconditions.checkState(tblRef.isResolved()); if (!(tblRef instanceof CollectionTableRef)) continue; CollectionTableRef ref = (CollectionTableRef) tblRef; if (!ref.isRelative() || ref.isCorrelated()) continue; if (ref.getJoinOp().isOuterJoin() || ref.getJoinOp().isAntiJoin()) continue; if (analyzer.isOuterJoined(ref.getResolvedPath().getRootDesc().getId())) continue; IsNotEmptyPredicate isNotEmptyPred = new IsNotEmptyPredicate(ref.getCollectionExpr().clone()); isNotEmptyPred.analyze(analyzer); analyzer.registerOnClauseConjuncts( Lists.<Expr>newArrayList(isNotEmptyPred), ref); } */ } /** * Marks all unassigned join predicates as well as exprs in aggInfo and sortInfo. */ public void materializeRequiredSlots(Analyzer analyzer) throws AnalysisException { List<Expr> unassigned = analyzer.getUnassignedConjuncts(getTableRefIds(), true); List<Expr> unassignedJoinConjuncts = Lists.newArrayList(); for (Expr e : unassigned) { if (analyzer.evalAfterJoin(e)) { unassignedJoinConjuncts.add(e); } } List<Expr> baseTblJoinConjuncts = Expr.trySubstituteList(unassignedJoinConjuncts, baseTblSmap, analyzer, false); analyzer.materializeSlots(baseTblJoinConjuncts); List<Expr> markConjuncts = analyzer.getMarkConjuncts(); markConjuncts = Expr.trySubstituteList(markConjuncts, baseTblSmap, analyzer, false); analyzer.materializeSlots(markConjuncts); if (evaluateOrderBy) { sortInfo.materializeRequiredSlots(analyzer, baseTblSmap); } if (hasAnalyticInfo()) { ArrayList<TupleId> tids = Lists.newArrayList(); getMaterializedTupleIds(tids); List<Expr> conjuncts = analyzer.getUnassignedConjuncts(tids); analyzer.materializeSlots(conjuncts); analyticInfo.materializeRequiredSlots(analyzer, baseTblSmap); } if (aggInfo != null) { ArrayList<Expr> havingConjuncts = Lists.newArrayList(); if (havingPred != null) { havingConjuncts.add(havingPred); } havingConjuncts.addAll( analyzer.getUnassignedConjuncts(aggInfo.getResultTupleId().asList())); materializeSlots(analyzer, havingConjuncts); aggInfo.materializeRequiredSlots(analyzer, baseTblSmap); } for (TableRef tableRef : fromClause.getTableRefs()) { if (tableRef.lateralViewRefs != null) { for (LateralViewRef lateralViewRef : tableRef.lateralViewRefs) { lateralViewRef.materializeRequiredSlots(baseTblSmap, analyzer); } } } } protected void reorderTable(Analyzer analyzer) throws AnalysisException { List<Pair<TableRef, Long>> candidates = Lists.newArrayList(); ArrayList<TableRef> originOrderBackUp = Lists.newArrayList(fromClause.getTableRefs()); for (TableRef tblRef : fromClause) { if (tblRef.getJoinOp() != JoinOperator.INNER_JOIN || tblRef.hasJoinHints()) { break; } long rowCount = 0; if (tblRef.getTable().getType() == TableType.OLAP) { rowCount = ((OlapTable) (tblRef.getTable())).getRowCount(); LOG.debug("tableName={} rowCount={}", tblRef.getAlias(), rowCount); } candidates.add(Pair.of(tblRef, rowCount)); } int reorderTableCount = candidates.size(); if (reorderTableCount < originOrderBackUp.size()) { fromClause.clear(); fromClause.addAll(originOrderBackUp.subList(0, reorderTableCount)); } long last = 0; for (int i = candidates.size() - 1; i >= 0; --i) { Pair<TableRef, Long> candidate = candidates.get(i); if (candidate.first instanceof InlineViewRef) { candidate.second = last; } last = candidate.second + 1; } Collections.sort(candidates, (a, b) -> b.second.compareTo(a.second)); for (Pair<TableRef, Long> candidate : candidates) { if (reorderTable(analyzer, candidate.first)) { if (reorderTableCount < originOrderBackUp.size()) { fromClause.addAll(originOrderBackUp.subList(reorderTableCount, originOrderBackUp.size())); } return; } } fromClause.clear(); for (TableRef tableRef : originOrderBackUp) { fromClause.add(tableRef); } } protected boolean reorderTable(Analyzer analyzer, TableRef firstRef) throws AnalysisException { List<TableRef> tmpRefList = Lists.newArrayList(); Map<TupleId, TableRef> tableRefMap = Maps.newHashMap(); for (TableRef tblRef : fromClause) { tableRefMap.put(tblRef.getId(), tblRef); tmpRefList.add(tblRef); } fromClause.clear(); fromClause.add(firstRef); tableRefMap.remove(firstRef.getId()); Set<TupleId> validTupleId = Sets.newHashSet(); validTupleId.add(firstRef.getId()); int i = 0; while (i < fromClause.size()) { TableRef tblRef = fromClause.get(i); List<Expr> eqJoinPredicates = analyzer.getEqJoinConjuncts(tblRef.getId()); List<TupleId> tupleList = Lists.newArrayList(); Expr.getIds(eqJoinPredicates, tupleList, null); for (TupleId tid : tupleList) { if (validTupleId.contains(tid)) { continue; } TableRef candidateTableRef = tableRefMap.get(tid); if (candidateTableRef != null) { Preconditions.checkState(tid == candidateTableRef.getId()); List<Expr> candidateEqJoinPredicates = analyzer.getEqJoinConjunctsExcludeAuxPredicates(tid); for (Expr candidateEqJoinPredicate : candidateEqJoinPredicates) { List<TupleId> candidateTupleList = Lists.newArrayList(); Expr.getIds(Lists.newArrayList(candidateEqJoinPredicate), candidateTupleList, null); int count = candidateTupleList.size(); for (TupleId tupleId : candidateTupleList) { if (validTupleId.contains(tupleId) || tid.equals(tupleId)) { count--; } } if (count == 0) { fromClause.add(candidateTableRef); validTupleId.add(tid); tableRefMap.remove(tid); break; } } } } i++; } if (0 != tableRefMap.size()) { fromClause.clear(); fromClause.addAll(tmpRefList); return false; } return true; } /** * Populates baseTblSmap_ with our combined inline view smap and creates * baseTblResultExprs. */ protected void resolveInlineViewRefs(Analyzer analyzer) throws AnalysisException { for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { InlineViewRef inlineViewRef = (InlineViewRef) tblRef; baseTblSmap = ExprSubstitutionMap.combine(baseTblSmap, inlineViewRef.getBaseTblSmap()); } } baseTblResultExprs = Expr.trySubstituteList(resultExprs, baseTblSmap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("baseTblSmap_: " + baseTblSmap.debugString()); LOG.debug("resultExprs: " + Expr.debugString(resultExprs)); LOG.debug("baseTblResultExprs: " + Expr.debugString(baseTblResultExprs)); } } /** * Expand "*" select list item. */ private void expandStar(Analyzer analyzer) throws AnalysisException { if (fromClause.isEmpty()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_NO_TABLES_USED); } for (TableRef tableRef : fromClause) { if (analyzer.isSemiJoined(tableRef.getId())) { continue; } expandStar(new TableName(tableRef.getAliasAsName().getCtl(), tableRef.getAliasAsName().getDb(), tableRef.getAliasAsName().getTbl()), tableRef.getDesc()); if (tableRef.lateralViewRefs != null) { for (LateralViewRef lateralViewRef : tableRef.lateralViewRefs) { expandStar(lateralViewRef.getName(), lateralViewRef.getDesc()); } } } } /** * Expand "<tbl>.*" select list item. */ private void expandStar(Analyzer analyzer, TableName tblName) throws AnalysisException { Collection<TupleDescriptor> descs = analyzer.getDescriptor(tblName); if (descs == null || descs.isEmpty()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_UNKNOWN_TABLE, tblName.getTbl(), tblName.getDb()); } for (TupleDescriptor desc : descs) { expandStar(tblName, desc); } } /** * Expand "*" for a particular tuple descriptor by appending * refs for each column to selectListExprs. */ private void expandStar(TableName tblName, TupleDescriptor desc) { for (Column col : desc.getTable().getBaseSchema()) { resultExprs.add(new SlotRef(tblName, col.getName())); colLabels.add(col.getName()); } } private boolean isContainInBitmap(Expr expr) { List<Expr> inPredicates = Lists.newArrayList(); expr.collect(InPredicate.class, inPredicates); return inPredicates.stream().anyMatch(e -> e.getChild(1) instanceof Subquery && ((Subquery) e.getChild(1)).getStatement().getResultExprs().get(0).getType().isBitmapType()); } /** * Analyze aggregation-relevant components of the select block (Group By clause, * select list, Order By clause), * Create the AggregationInfo, including the agg output tuple, and transform all post-agg exprs * given AggregationInfo's smap. */ /** * Build smap count_distinct->multi_count_distinct sum_distinct->multi_count_distinct * assumes that select list and having clause have been analyzed. */ private ExprSubstitutionMap createSumOrCountMultiDistinctSMap( ArrayList<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { final List<FunctionCallExpr> distinctExprs = Lists.newArrayList(); for (FunctionCallExpr aggExpr : aggExprs) { if (aggExpr.isDistinct()) { distinctExprs.add(aggExpr); } } final ExprSubstitutionMap result = new ExprSubstitutionMap(); final boolean isUsingSetForDistinct = AggregateInfo.estimateIfUsingSetForDistinct(distinctExprs); if (!isUsingSetForDistinct) { return result; } for (FunctionCallExpr inputExpr : distinctExprs) { Expr replaceExpr = null; final String functionName = inputExpr.getFnName().getFunction(); if (functionName.equalsIgnoreCase(FunctionSet.COUNT)) { final List<Expr> countInputExpr = Lists.newArrayList(inputExpr.getChild(0).clone(null)); replaceExpr = new FunctionCallExpr("MULTI_DISTINCT_COUNT", new FunctionParams(inputExpr.isDistinct(), countInputExpr)); } else if (functionName.equalsIgnoreCase("SUM")) { final List<Expr> sumInputExprs = Lists.newArrayList(inputExpr.getChild(0).clone(null)); replaceExpr = new FunctionCallExpr("MULTI_DISTINCT_SUM", new FunctionParams(inputExpr.isDistinct(), sumInputExprs)); } else if (functionName.equalsIgnoreCase("AVG")) { final List<Expr> sumInputExprs = Lists.newArrayList(inputExpr.getChild(0).clone(null)); final List<Expr> countInputExpr = Lists.newArrayList(inputExpr.getChild(0).clone(null)); final FunctionCallExpr sumExpr = new FunctionCallExpr("MULTI_DISTINCT_SUM", new FunctionParams(inputExpr.isDistinct(), sumInputExprs)); final FunctionCallExpr countExpr = new FunctionCallExpr("MULTI_DISTINCT_COUNT", new FunctionParams(inputExpr.isDistinct(), countInputExpr)); replaceExpr = new ArithmeticExpr(ArithmeticExpr.Operator.DIVIDE, sumExpr, countExpr); } else { throw new AnalysisException(inputExpr.getFnName() + " can't support multi distinct."); } replaceExpr.analyze(analyzer); result.put(inputExpr, replaceExpr); } if (LOG.isDebugEnabled()) { LOG.debug("multi distinct smap: {}", result.debugString()); } return result; } /** * Create a map from COUNT([ALL]) -> zeroifnull(COUNT([ALL])) if * i) There is no GROUP-BY, and * ii) There are other distinct aggregates to be evaluated. * This transformation is necessary for COUNT to correctly return 0 for empty * input relations. */ private ExprSubstitutionMap createCountAllMap( List<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { ExprSubstitutionMap scalarCountAllMap = new ExprSubstitutionMap(); if (groupByClause != null && !groupByClause.isEmpty()) { return scalarCountAllMap; } com.google.common.base.Predicate<FunctionCallExpr> isNotDistinctPred = new com.google.common.base.Predicate<FunctionCallExpr>() { public boolean apply(FunctionCallExpr expr) { return !expr.isDistinct(); } }; if (Iterables.all(aggExprs, isNotDistinctPred)) { return scalarCountAllMap; } com.google.common.base.Predicate<FunctionCallExpr> isCountPred = new com.google.common.base.Predicate<FunctionCallExpr>() { public boolean apply(FunctionCallExpr expr) { return expr.getFnName().getFunction().equals(FunctionSet.COUNT); } }; Iterable<FunctionCallExpr> countAllAggs = Iterables.filter(aggExprs, Predicates.and(isCountPred, isNotDistinctPred)); for (FunctionCallExpr countAllAgg : countAllAggs) { ArrayList<Expr> zeroIfNullParam = Lists.newArrayList(countAllAgg.clone(), new IntLiteral(0, Type.BIGINT)); FunctionCallExpr zeroIfNull = new FunctionCallExpr("ifnull", zeroIfNullParam); zeroIfNull.analyze(analyzer); scalarCountAllMap.put(countAllAgg, zeroIfNull); } return scalarCountAllMap; } /** * Create aggInfo for the given grouping and agg exprs. */ private void createAggInfo( ArrayList<Expr> groupingExprs, ArrayList<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { for (int i = 0; i < aggExprs.size(); i++) { aggExprs.set(i, (FunctionCallExpr) rewriteQueryExprByMvColumnExpr(aggExprs.get(i), analyzer)); } if (selectList.isDistinct()) { Preconditions.checkState(groupingExprs.isEmpty()); Preconditions.checkState(aggExprs.isEmpty()); aggInfo = AggregateInfo.create(Expr.cloneList(resultExprs), null, null, analyzer); } else { if (CollectionUtils.isEmpty(groupingExprs) && CollectionUtils.isEmpty(aggExprs)) { return; } aggInfo = AggregateInfo.create(groupingExprs, aggExprs, null, analyzer); } } /** * If the select list contains AnalyticExprs, create AnalyticInfo and substitute * AnalyticExprs using the AnalyticInfo's smap. */ private void createAnalyticInfo(Analyzer analyzer) throws AnalysisException { ArrayList<Expr> analyticExprs = Lists.newArrayList(); TreeNode.collect(resultExprs, AnalyticExpr.class, analyticExprs); if (sortInfo != null) { TreeNode.collect(sortInfo.getOrderingExprs(), AnalyticExpr.class, analyticExprs); } if (analyticExprs.isEmpty()) { return; } ExprSubstitutionMap rewriteSmap = new ExprSubstitutionMap(); for (Expr expr : analyticExprs) { AnalyticExpr toRewrite = (AnalyticExpr) expr; Expr newExpr = AnalyticExpr.rewrite(toRewrite); if (newExpr != null) { newExpr.analyze(analyzer); if (!rewriteSmap.containsMappingFor(toRewrite)) { rewriteSmap.put(toRewrite, newExpr); } } } if (rewriteSmap.size() > 0) { ArrayList<Expr> updatedAnalyticExprs = Expr.substituteList(analyticExprs, rewriteSmap, analyzer, false); analyticExprs.clear(); TreeNode.collect(updatedAnalyticExprs, AnalyticExpr.class, analyticExprs); } analyticInfo = AnalyticInfo.create(analyticExprs, analyzer); ExprSubstitutionMap smap = analyticInfo.getSmap(); if (rewriteSmap.size() > 0) { smap = ExprSubstitutionMap.compose( rewriteSmap, analyticInfo.getSmap(), analyzer); } resultExprs = Expr.substituteList(resultExprs, smap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("post-analytic selectListExprs: " + Expr.debugString(resultExprs)); } if (sortInfo != null) { sortInfo.substituteOrderingExprs(smap, analyzer); if (LOG.isDebugEnabled()) { LOG.debug("post-analytic orderingExprs: " + Expr.debugString(sortInfo.getOrderingExprs())); } } } @Override public void rewriteExprs(ExprRewriter rewriter) throws AnalysisException { Preconditions.checkState(isAnalyzed()); rewriteSelectList(rewriter); for (TableRef ref : fromClause) { ref.rewriteExprs(rewriter, analyzer); } List<Subquery> subqueryExprs = Lists.newArrayList(); if (whereClause != null) { whereClause = rewriter.rewrite(whereClause, analyzer, ExprRewriter.ClauseType.WHERE_CLAUSE); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { havingClause = rewriter.rewrite(havingClause, analyzer); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { subquery.getStatement().rewriteExprs(rewriter); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { rewriter.rewriteList(groupingExprs, analyzer); } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { try { for (Expr expr : oriGroupingExprs) { if (!(expr instanceof SlotRef)) { expr.analyze(analyzer); } } } catch (AnalysisException ex) { } rewriter.rewriteList(oriGroupingExprs, analyzer); for (Expr expr : oriGroupingExprs) { if (!(expr instanceof SlotRef)) { expr.reset(); } } } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElements) { try { if (!(orderByElem.getExpr() instanceof SlotRef)) { orderByElem.getExpr().analyze(analyzer); } } catch (AnalysisException ex) { } orderByElem.setExpr(rewriter.rewrite(orderByElem.getExpr(), analyzer)); if (!(orderByElem.getExpr() instanceof SlotRef)) { orderByElem.getExpr().reset(); } } } } @Override public void collectExprs(Map<String, Expr> exprMap) { List<Subquery> subqueryExprs = Lists.newArrayList(); for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { continue; } registerExprId(item.getExpr()); exprMap.put(item.getExpr().getId().toString(), item.getExpr()); if (item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { item.getExpr().collect(Subquery.class, subqueryExprs); } } for (TableRef ref : fromClause) { Preconditions.checkState(ref.isAnalyzed); if (ref.onClause != null) { registerExprId(ref.onClause); exprMap.put(ref.onClause.getId().toString(), ref.onClause); } if (ref instanceof InlineViewRef) { ((InlineViewRef) ref).getViewStmt().collectExprs(exprMap); } } if (whereClause != null) { registerExprId(whereClause); exprMap.put(whereClause.getId().toString(), whereClause); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { registerExprId(havingClauseAfterAnaylzed); exprMap.put(havingClauseAfterAnaylzed.getId().toString(), havingClauseAfterAnaylzed); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { registerExprId(subquery); subquery.getStatement().collectExprs(exprMap); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { for (Expr expr : groupingExprs) { if (containAlias(expr)) { continue; } registerExprId(expr); exprMap.put(expr.getId().toString(), expr); } } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { for (Expr expr : oriGroupingExprs) { /* * Suppose there is a query statement: * * ``` * select * i_item_sk as b * from item * group by b * order by b desc * ``` * * where `b` is an alias for `i_item_sk`. * * When analyze is done, it becomes * * ``` * SELECT * `i_item_sk` * FROM `item` * GROUP BY `b` * ORDER BY `b` DESC * ``` * Aliases information of groupBy and orderBy clauses is recorded in `QueryStmt.aliasSMap`. * The select clause has its own alias info in `SelectListItem.alias`. * * Aliases expr in the `group by` and `order by` clauses are not analyzed, * i.e. `Expr.isAnalyzed=false`. Subsequent constant folding will analyze the unanalyzed Expr before * collecting the constant expressions, preventing the `INVALID_TYPE` expr from being sent to BE. * * But when analyzing the alias, the meta information corresponding to the slot cannot be found * in the catalog, an error will be reported. * * So the alias needs to be removed here. * */ if (containAlias(expr)) { continue; } registerExprId(expr); exprMap.put(expr.getId().toString(), expr); } } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElementsAfterAnalyzed) { if (containAlias(orderByElem.getExpr())) { continue; } registerExprId(orderByElem.getExpr()); exprMap.put(orderByElem.getExpr().getId().toString(), orderByElem.getExpr()); } } } @Override public void putBackExprs(Map<String, Expr> rewrittenExprMap) { List<Subquery> subqueryExprs = Lists.newArrayList(); for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { continue; } item.setExpr(rewrittenExprMap.get(item.getExpr().getId().toString())); if (item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { item.getExpr().collect(Subquery.class, subqueryExprs); } } for (TableRef ref : fromClause) { if (ref.onClause != null) { ref.setOnClause(rewrittenExprMap.get(ref.onClause.getId().toString())); } if (ref instanceof InlineViewRef) { ((InlineViewRef) ref).getViewStmt().putBackExprs(rewrittenExprMap); } } if (whereClause != null) { setWhereClause(rewrittenExprMap.get(whereClause.getId().toString())); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { havingClause = rewrittenExprMap.get(havingClauseAfterAnaylzed.getId().toString()); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { subquery.getStatement().putBackExprs(rewrittenExprMap); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { ArrayList<Expr> newGroupingExpr = new ArrayList<>(); for (Expr expr : groupingExprs) { if (expr.getId() == null) { newGroupingExpr.add(expr); } else { newGroupingExpr.add(rewrittenExprMap.get(expr.getId().toString())); } } groupByClause.setGroupingExpr(newGroupingExpr); } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { ArrayList<Expr> newOriGroupingExprs = new ArrayList<>(); for (Expr expr : oriGroupingExprs) { if (expr.getId() == null) { newOriGroupingExprs.add(expr); } else { newOriGroupingExprs.add(rewrittenExprMap.get(expr.getId().toString())); } } groupByClause.setOriGroupingExprs(newOriGroupingExprs); } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElementsAfterAnalyzed) { Expr expr = orderByElem.getExpr(); if (expr.getId() == null) { orderByElem.setExpr(expr); } else { orderByElem.setExpr(rewrittenExprMap.get(expr.getId().toString())); } } orderByElements = (ArrayList<OrderByElement>) orderByElementsAfterAnalyzed; } } private void rewriteSelectList(ExprRewriter rewriter) throws AnalysisException { for (SelectListItem item : selectList.getItems()) { if (item.getExpr() instanceof CaseExpr && item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { rewriteSubquery(item.getExpr(), analyzer); } } selectList.rewriteExprs(rewriter, analyzer); } /** equal subquery in case when to an inline view * subquery in case when statement like * * SELECT CASE * WHEN ( * SELECT COUNT(*) / 2 * FROM t * ) > k4 THEN ( * SELECT AVG(k4) * FROM t * ) * ELSE ( * SELECT SUM(k4) * FROM t * ) * END AS kk4 * FROM t; * this statement will be equal to * * SELECT CASE * WHEN t1.a > k4 THEN t2.a * ELSE t3.a * END AS kk4 * FROM t, ( * SELECT COUNT(*) / 2 AS a * FROM t * ) t1, ( * SELECT AVG(k4) AS a * FROM t * ) t2, ( * SELECT SUM(k4) AS a * FROM t * ) t3; */ private Expr rewriteSubquery(Expr expr, Analyzer analyzer) throws AnalysisException { if (expr instanceof Subquery) { if (!(((Subquery) expr).getStatement() instanceof SelectStmt)) { throw new AnalysisException("Only support select subquery in case-when clause."); } if (expr.isCorrelatedPredicate(getTableRefIds())) { throw new AnalysisException("The correlated subquery in case-when clause is not supported"); } SelectStmt subquery = (SelectStmt) ((Subquery) expr).getStatement(); if (subquery.resultExprs.size() != 1 || !subquery.returnsSingleRow()) { throw new AnalysisException("Subquery in case-when must return scala type"); } subquery.reset(); subquery.setAssertNumRowsElement(1, AssertNumRowsElement.Assertion.EQ); String alias = getTableAliasGenerator().getNextAlias(); String colAlias = getColumnAliasGenerator().getNextAlias(); InlineViewRef inlineViewRef = new InlineViewRef(alias, subquery, Arrays.asList(colAlias)); try { inlineViewRef.analyze(analyzer); } catch (UserException e) { throw new AnalysisException(e.getMessage()); } fromClause.add(inlineViewRef); expr = new SlotRef(inlineViewRef.getAliasAsName(), colAlias); } else if (CollectionUtils.isNotEmpty(expr.getChildren())) { for (int i = 0; i < expr.getChildren().size(); ++i) { expr.setChild(i, rewriteSubquery(expr.getChild(i), analyzer)); } } return expr; } public void eliminatingSortNode() { if (ConnectContext.get() == null || !ConnectContext.get().getSessionVariable().enableEliminateSortNode) { return; } if (!evaluateOrderBy() || getSortInfo() == null || getWhereClause() == null) { return; } List<SlotRef> sortSlots = new ArrayList<>(); for (Expr expr : getSortInfo().getOrderingExprs()) { SlotRef source = expr.getSrcSlotRef(); if (source == null) { return; } sortSlots.add(source); } if (sortSlots.isEmpty()) { return; } if (checkSortNodeEliminable(getWhereClause(), sortSlots) && sortSlots.isEmpty()) { evaluateOrderBy = false; } } private boolean checkSortNodeEliminable(Expr expr, List<SlotRef> sortSlotRefs) { if (expr instanceof CompoundPredicate) { if (((CompoundPredicate) expr).getOp() != Operator.AND) { return false; } } if (expr instanceof BinaryPredicate) { Reference<SlotRef> slotRefRef = new Reference<>(); BinaryPredicate binaryPredicate = (BinaryPredicate) expr; if (binaryPredicate.isSingleColumnPredicate(slotRefRef, null)) { if (binaryPredicate.getOp() != BinaryPredicate.Operator.EQ) { return true; } sortSlotRefs.remove(slotRefRef.getRef()); } } else if (expr instanceof InPredicate) { if (((InPredicate) expr).isNotIn()) { return true; } if (expr.getChildren().size() != 2) { return true; } if (!expr.getChild(1).isConstant()) { return true; } sortSlotRefs.remove(expr.getChild(0).unwrapSlotRef()); } for (Expr child : expr.getChildren()) { if (!checkSortNodeEliminable(child, sortSlotRefs)) { return false; } } return true; } @Override public String toSql() { if (sqlString != null) { return sqlString; } StringBuilder strBuilder = new StringBuilder(); if (withClause != null) { strBuilder.append(withClause.toSql()); strBuilder.append(" "); } strBuilder.append("SELECT "); if (selectList.isDistinct()) { strBuilder.append("DISTINCT "); } ConnectContext ctx = ConnectContext.get(); if (ctx == null || ctx.getSessionVariable().internalSession || toSQLWithSelectList) { for (int i = 0; i < selectList.getItems().size(); i++) { strBuilder.append(selectList.getItems().get(i).toSql()); strBuilder.append((i + 1 != selectList.getItems().size()) ? ", " : ""); } } else { for (int i = 0; i < resultExprs.size(); ++i) { if (i != 0) { strBuilder.append(", "); } if (needToSql) { strBuilder.append(originalExpr.get(i).toSql()); } else { strBuilder.append(resultExprs.get(i).toSql()); } strBuilder.append(" AS ").append(SqlUtils.getIdentSql(colLabels.get(i))); } } if (!fromClause.isEmpty()) { strBuilder.append(fromClause.toSql()); } if (whereClause != null) { strBuilder.append(" WHERE "); strBuilder.append(whereClause.toSql()); } if (groupByClause != null) { strBuilder.append(" GROUP BY "); strBuilder.append(groupByClause.toSql()); } if (havingClause != null) { strBuilder.append(" HAVING "); strBuilder.append(havingClause.toSql()); } if (orderByElements != null) { strBuilder.append(" ORDER BY "); for (int i = 0; i < orderByElements.size(); ++i) { strBuilder.append(orderByElements.get(i).getExpr().toSql()); if (sortInfo != null) { strBuilder.append((sortInfo.getIsAscOrder().get(i)) ? " ASC" : " DESC"); } strBuilder.append((i + 1 != orderByElements.size()) ? ", " : ""); } } if (hasLimitClause()) { strBuilder.append(limitElement.toSql()); } if (hasOutFileClause()) { strBuilder.append(outFileClause.toSql()); } return strBuilder.toString(); } @Override public String toDigest() { StringBuilder strBuilder = new StringBuilder(); if (withClause != null) { strBuilder.append(withClause.toDigest()); strBuilder.append(" "); } strBuilder.append("SELECT "); if (selectList.isDistinct()) { strBuilder.append("DISTINCT "); } if (originalExpr == null) { originalExpr = Expr.cloneList(resultExprs); } if (resultExprs.isEmpty()) { for (int i = 0; i < selectList.getItems().size(); ++i) { if (i != 0) { strBuilder.append(", "); } strBuilder.append(selectList.getItems().get(i).toDigest()); } } else { for (int i = 0; i < originalExpr.size(); ++i) { if (i != 0) { strBuilder.append(", "); } strBuilder.append(originalExpr.get(i).toDigest()); strBuilder.append(" AS ").append(SqlUtils.getIdentSql(colLabels.get(i))); } } if (!fromClause.isEmpty()) { strBuilder.append(fromClause.toDigest()); } if (whereClause != null) { strBuilder.append(" WHERE "); strBuilder.append(whereClause.toDigest()); } if (groupByClause != null) { strBuilder.append(" GROUP BY "); strBuilder.append(groupByClause.toSql()); } if (havingClause != null) { strBuilder.append(" HAVING "); strBuilder.append(havingClause.toDigest()); } if (orderByElements != null) { strBuilder.append(" ORDER BY "); for (int i = 0; i < orderByElements.size(); ++i) { strBuilder.append(orderByElements.get(i).getExpr().toDigest()); if (sortInfo != null) { strBuilder.append((sortInfo.getIsAscOrder().get(i)) ? " ASC" : " DESC"); } strBuilder.append((i + 1 != orderByElements.size()) ? ", " : ""); } } if (hasLimitClause()) { strBuilder.append(limitElement.toDigest()); } if (hasOutFileClause()) { strBuilder.append(outFileClause.toDigest()); } return strBuilder.toString(); } /** * If the select statement has a sort/top that is evaluated, then the sort tuple * is materialized. Else, if there is aggregation then the aggregate tuple id is * materialized. Otherwise, all referenced tables are materialized as long as they are * not semi-joined. If there are analytics and no sort, then the returned tuple * ids also include the logical analytic output tuple. */ @Override public void getMaterializedTupleIds(ArrayList<TupleId> tupleIdList) { if (evaluateOrderBy) { tupleIdList.add(sortInfo.getSortTupleDescriptor().getId()); } else if (aggInfo != null) { if (aggInfo.isDistinctAgg()) { tupleIdList.add(aggInfo.getSecondPhaseDistinctAggInfo().getOutputTupleId()); } else { tupleIdList.add(aggInfo.getOutputTupleId()); } } else { for (TableRef tblRef : fromClause) { tupleIdList.addAll(tblRef.getMaterializedTupleIds()); } } if (hasAnalyticInfo() && !isEvaluateOrderBy()) { tupleIdList.add(analyticInfo.getOutputTupleId()); } } @Override public void substituteSelectList(Analyzer analyzer, List<String> newColLabels) throws AnalysisException, UserException { if (hasWithClause()) { withClause.analyze(analyzer); } TableRef leftTblRef = null; for (int i = 0; i < fromClause.size(); ++i) { TableRef tblRef = fromClause.get(i); tblRef = analyzer.resolveTableRef(tblRef); Preconditions.checkNotNull(tblRef); fromClause.set(i, tblRef); tblRef.setLeftTblRef(leftTblRef); tblRef.analyze(analyzer); leftTblRef = tblRef; } for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { TableName tblName = item.getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } } else { if (item.getExpr() instanceof AnalyticExpr) { item.getExpr().analyze(analyzer); } if (item.getAlias() != null) { SlotRef aliasRef = new SlotRef(null, item.getAlias()); SlotRef newAliasRef = new SlotRef(null, newColLabels.get(resultExprs.size())); newAliasRef.analysisDone(); aliasSMap.put(aliasRef, newAliasRef); } resultExprs.add(item.getExpr()); } } if (groupByClause != null) { substituteOrdinalsAliases(groupByClause.getGroupingExprs(), "GROUP BY", analyzer, false); } if (havingClause != null) { havingClause = havingClause.clone(aliasSMap); } if (orderByElements != null) { for (int i = 0; i < orderByElements.size(); ++i) { orderByElements = OrderByElement.substitute(orderByElements, aliasSMap, analyzer); } } colLabels.clear(); colLabels.addAll(newColLabels); } public boolean hasWhereClause() { return whereClause != null; } public boolean hasAggInfo() { return aggInfo != null; } public boolean hasGroupByClause() { return groupByClause != null; } /** * Check if the stmt returns a single row. This can happen * in the following cases: * 1. select stmt with a 'limit 1' clause * 2. select stmt with an aggregate function and no group by. * 3. select stmt with no from clause. * <p> * This function may produce false negatives because the cardinality of the * result set also depends on the data a stmt is processing. */ public boolean returnsSingleRow() { if (hasLimitClause() && getLimit() == 1) { return true; } if (fromClause.isEmpty()) { return true; } if (hasAggInfo() && !hasGroupByClause() && !selectList.isDistinct()) { return true; } return false; } @Override public void collectTableRefs(List<TableRef> tblRefs) { for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { InlineViewRef inlineViewRef = (InlineViewRef) tblRef; inlineViewRef.getViewStmt().collectTableRefs(tblRefs); } else { tblRefs.add(tblRef); } } } private boolean checkGroupingFn(Expr expr) { if (expr instanceof GroupingFunctionCallExpr) { return true; } else if (expr.getChildren() != null) { for (Expr child : expr.getChildren()) { if (checkGroupingFn(child)) { return true; } } } return false; } private void getAggregateFnExpr(Expr expr, ArrayList<Expr> aggFnExprList) { if (expr instanceof FunctionCallExpr && expr.fn instanceof AggregateFunction) { aggFnExprList.add(expr); } else if (expr.getChildren() != null) { for (Expr child : expr.getChildren()) { getAggregateFnExpr(child, aggFnExprList); } } } @Override public int hashCode() { return id.hashCode(); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof SelectStmt)) { return false; } return this.id.equals(((SelectStmt) obj).id); } }
Please remove useless blank line here.
public void assertGetEncryptFuzzyQueryValues() { List<Object> encryptFuzzyQueryValues = new EncryptRule(createEncryptRuleConfiguration()) .getEncryptFuzzyQueryValues(DefaultDatabase.LOGIC_NAME, DefaultDatabase.LOGIC_NAME, "t_encrypt", "pwd", Collections.singletonList(null)); for (final Object value : encryptFuzzyQueryValues) { assertNull(value); } }
.getEncryptFuzzyQueryValues(DefaultDatabase.LOGIC_NAME, DefaultDatabase.LOGIC_NAME, "t_encrypt", "pwd", Collections.singletonList(null));
public void assertGetEncryptFuzzyQueryValues() { List<Object> encryptFuzzyQueryValues = new EncryptRule(createEncryptRuleConfiguration()) .getEncryptFuzzyQueryValues(DefaultDatabase.LOGIC_NAME, DefaultDatabase.LOGIC_NAME, "t_encrypt", "pwd", Collections.singletonList(null)); for (Object actual : encryptFuzzyQueryValues) { assertNull(actual); } }
class EncryptRuleTest { @Test public void assertNewInstanceWithAlgorithmProvidedEncryptRuleConfiguration() { EncryptColumnRuleConfiguration encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", null); EncryptTableRuleConfiguration tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), null); AlgorithmProvidedEncryptRuleConfiguration ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration( Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); EncryptRule actual = new EncryptRule(ruleConfig); assertTrue(actual.findEncryptTable("t_encrypt").isPresent()); } @Test public void assertFindEncryptTable() { assertTrue(new EncryptRule(createEncryptRuleConfiguration()).findEncryptTable("t_encrypt").isPresent()); } @Test public void assertFindEncryptor() { assertTrue(new EncryptRule(createEncryptRuleConfiguration()).findEncryptor("t_encrypt", "pwd").isPresent()); } @Test public void assertNotFindEncryptor() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).findEncryptor("t_encrypt", "other_column").isPresent()); } @Test public void assertGetEncryptValues() { List<Object> encryptAssistedQueryValues = new EncryptRule(createEncryptRuleConfiguration()) .getEncryptValues(DefaultDatabase.LOGIC_NAME, DefaultDatabase.LOGIC_NAME, "t_encrypt", "pwd", Collections.singletonList(null)); for (final Object value : encryptAssistedQueryValues) { assertNull(value); } } @Test public void assertGetCipherColumnWhenEncryptColumnExist() { assertThat(new EncryptRule(createEncryptRuleConfiguration()).getCipherColumn("t_encrypt", "pwd"), is("pwd_cipher")); } @Test(expected = NullPointerException.class) public void assertGetCipherColumnWhenNoEncryptColumn() { new EncryptRule(createEncryptRuleConfiguration()).getCipherColumn("t_encrypt", "pwd_cipher"); } @Test public void assertGetLogicAndCipherColumns() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).getLogicAndCipherColumns("t_encrypt").isEmpty()); } @Test public void assertFindAssistedQueryColumn() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).findAssistedQueryColumn("t_encrypt", "pwd_cipher").isPresent()); } @Test public void assertGetEncryptAssistedQueryValues() { List<Object> encryptAssistedQueryValues = new EncryptRule(createEncryptRuleConfiguration()) .getEncryptAssistedQueryValues(DefaultDatabase.LOGIC_NAME, DefaultDatabase.LOGIC_NAME, "t_encrypt", "pwd", Collections.singletonList(null)); for (final Object value : encryptAssistedQueryValues) { assertNull(value); } } @Test public void assertGetAssistedQueryColumns() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).getAssistedQueryColumns("t_encrypt").isEmpty()); } @Test public void assertFindPlainColumn() { assertTrue(new EncryptRule(createEncryptRuleConfiguration()).findPlainColumn("t_encrypt", "pwd").isPresent()); assertTrue(new EncryptRule(createEncryptRuleConfiguration()).findPlainColumn("t_encrypt", "credit_card".toLowerCase()).isPresent()); assertFalse(new EncryptRule(createEncryptRuleConfiguration()).findPlainColumn("t_encrypt", "notExistLogicColumn").isPresent()); } @Test public void assertFindFuzzyQueryColumn() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).findFuzzyQueryColumn("t_encrypt", "pwd_cipher").isPresent()); } @Test @Test public void assertGetFuzzyQueryColumns() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).getFuzzyQueryColumns("t_encrypt").isEmpty()); } @Test public void assertIsQueryWithCipherColumn() { EncryptColumnRuleConfiguration encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", null); EncryptTableRuleConfiguration tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), null); AlgorithmProvidedEncryptRuleConfiguration ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration( Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); EncryptRule actual = new EncryptRule(ruleConfig); assertTrue(actual.isQueryWithCipherColumn("t_encrypt", "encrypt_column")); encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", null); tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), false); ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration(Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); actual = new EncryptRule(ruleConfig); assertFalse(actual.isQueryWithCipherColumn("t_encrypt", "encrypt_column")); encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", true); tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), false); ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration(Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); actual = new EncryptRule(ruleConfig); assertTrue(actual.isQueryWithCipherColumn("t_encrypt", "encrypt_column")); encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", false); tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), null); ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration(Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); actual = new EncryptRule(ruleConfig); assertFalse(actual.isQueryWithCipherColumn("t_encrypt", "encrypt_column")); } @Test public void assertGetTables() { assertThat(new EncryptRule(createEncryptRuleConfiguration()).getTables(), is(Collections.singleton("t_encrypt"))); } @Test public void assertGetTableWithLowercase() { assertThat(new EncryptRule(createEncryptRuleConfigurationWithUpperCaseLogicTable()).getTables(), is(Collections.singleton("t_encrypt"))); } @Test public void assertTheSameLogicTable() { Collection<String> logicTables = new EncryptRule(createEncryptRuleConfiguration()).getTables(); Collection<String> theSameLogicTables = new EncryptRule(createEncryptRuleConfigurationWithUpperCaseLogicTable()).getTables(); assertThat(logicTables, is(theSameLogicTables)); } @SuppressWarnings("rawtypes") @Test public void assertGetSchemaMetaData() { EncryptRule encryptRule = new EncryptRule(createEncryptRuleConfiguration()); ShardingSphereSchema schema = mock(ShardingSphereSchema.class); encryptRule.setSchemaMetaData("foo_db", Collections.singletonMap("foo_schema", schema)); Optional<EncryptAlgorithm> actual = encryptRule.findEncryptor("t_encrypt", "name"); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(CoreSchemaMetaDataAwareEncryptAlgorithmFixture.class)); assertThat(((CoreSchemaMetaDataAwareEncryptAlgorithmFixture) actual.get()).getDatabaseName(), is("foo_db")); assertThat(((CoreSchemaMetaDataAwareEncryptAlgorithmFixture) actual.get()).getSchemas(), is(Collections.singletonMap("foo_schema", schema))); assertFalse(((CoreSchemaMetaDataAwareEncryptAlgorithmFixture) actual.get()).getSchemas().isEmpty()); } private EncryptRuleConfiguration createEncryptRuleConfiguration() { AlgorithmConfiguration queryAssistedEncryptConfig = new AlgorithmConfiguration("CORE.QUERY_ASSISTED.FIXTURE", new Properties()); AlgorithmConfiguration queryFuzzyEncryptConfig = new AlgorithmConfiguration("CORE.QUERY_FUZZY.FIXTURE", new Properties()); AlgorithmConfiguration metaDataAwareEncryptConfig = new AlgorithmConfiguration("CORE.METADATA_AWARE.FIXTURE", new Properties()); EncryptColumnRuleConfiguration pwdColumnConfig = new EncryptColumnRuleConfiguration("pwd", "pwd_cipher", "pwd_assist", "pwd_fuzzy", "pwd_plain", "test_encryptor", "test_encryptor", "fuzzy_encryptor", null); EncryptColumnRuleConfiguration creditCardColumnConfig = new EncryptColumnRuleConfiguration("credit_card", "credit_card_cipher", "", "", "credit_card_plain", "test_encryptor", null); EncryptColumnRuleConfiguration nameColumnConfig = new EncryptColumnRuleConfiguration("name", "name_cipher", "", "", "name_plain", "customized_encryptor", null); EncryptTableRuleConfiguration tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Arrays.asList(pwdColumnConfig, creditCardColumnConfig, nameColumnConfig), null); return new EncryptRuleConfiguration(Collections.singleton(tableConfig), getEncryptors(queryAssistedEncryptConfig, queryFuzzyEncryptConfig, metaDataAwareEncryptConfig)); } @Test public void assertAssistedQueryEncryptorNameSpecified() { EncryptColumnRuleConfiguration pwdColumnConfig = new EncryptColumnRuleConfiguration("pwd", "pwd_cipher", "pwd_assist", "", "pwd_plain", "test_encryptor", "assisted_query_test_encryptor", null, null); assertThat(pwdColumnConfig.getAssistedQueryEncryptorName(), is("assisted_query_test_encryptor")); } @Test public void assertFuzzyQueryEncryptorNameSpecified() { EncryptColumnRuleConfiguration pwdColumnConfig = new EncryptColumnRuleConfiguration("pwd", "pwd_cipher", "", "pwd_fuzzy", "pwd_plain", "test_encryptor", "", "fuzzy_query_test_encryptor", null); assertThat(pwdColumnConfig.getFuzzyQueryEncryptorName(), is("fuzzy_query_test_encryptor")); } private EncryptRuleConfiguration createEncryptRuleConfigurationWithUpperCaseLogicTable() { AlgorithmConfiguration queryAssistedEncryptConfig = new AlgorithmConfiguration("CORE.QUERY_ASSISTED.FIXTURE", new Properties()); AlgorithmConfiguration queryFuzzyEncryptConfig = new AlgorithmConfiguration("CORE.QUERY_FUZZY.FIXTURE", new Properties()); AlgorithmConfiguration metaDataAwareEncryptConfig = new AlgorithmConfiguration("CORE.METADATA_AWARE.FIXTURE", new Properties()); EncryptColumnRuleConfiguration pwdColumnConfig = new EncryptColumnRuleConfiguration("pwd", "pwd_cipher", "", "", "pwd_plain", "test_encryptor", null); EncryptColumnRuleConfiguration creditCardColumnConfig = new EncryptColumnRuleConfiguration("credit_card", "credit_card_cipher", "", "", "credit_card_plain", "test_encryptor", null); EncryptColumnRuleConfiguration nameColumnConfig = new EncryptColumnRuleConfiguration("name", "name_cipher", "", "", "name_plain", "customized_encryptor", null); EncryptTableRuleConfiguration tableConfig = new EncryptTableRuleConfiguration("T_ENCRYPT", Arrays.asList(pwdColumnConfig, creditCardColumnConfig, nameColumnConfig), null); return new EncryptRuleConfiguration(Collections.singleton(tableConfig), getEncryptors(queryAssistedEncryptConfig, queryFuzzyEncryptConfig, metaDataAwareEncryptConfig)); } private Map<String, AlgorithmConfiguration> getEncryptors(final AlgorithmConfiguration queryAssistedEncryptConfig, final AlgorithmConfiguration queryFuzzyEncryptConfig, final AlgorithmConfiguration metaDataAwareEncryptConfig) { Map<String, AlgorithmConfiguration> result = new HashMap<>(2, 1); result.put("test_encryptor", queryAssistedEncryptConfig); result.put("fuzzy_encryptor", queryFuzzyEncryptConfig); result.put("customized_encryptor", metaDataAwareEncryptConfig); return result; } }
class EncryptRuleTest { @Test public void assertNewInstanceWithAlgorithmProvidedEncryptRuleConfiguration() { EncryptColumnRuleConfiguration encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", null); EncryptTableRuleConfiguration tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), null); AlgorithmProvidedEncryptRuleConfiguration ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration( Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); EncryptRule actual = new EncryptRule(ruleConfig); assertTrue(actual.findEncryptTable("t_encrypt").isPresent()); } @Test public void assertFindEncryptTable() { assertTrue(new EncryptRule(createEncryptRuleConfiguration()).findEncryptTable("t_encrypt").isPresent()); } @Test public void assertFindEncryptor() { assertTrue(new EncryptRule(createEncryptRuleConfiguration()).findEncryptor("t_encrypt", "pwd").isPresent()); } @Test public void assertNotFindEncryptor() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).findEncryptor("t_encrypt", "other_column").isPresent()); } @Test public void assertGetEncryptValues() { List<Object> encryptAssistedQueryValues = new EncryptRule(createEncryptRuleConfiguration()) .getEncryptValues(DefaultDatabase.LOGIC_NAME, DefaultDatabase.LOGIC_NAME, "t_encrypt", "pwd", Collections.singletonList(null)); for (Object actual : encryptAssistedQueryValues) { assertNull(actual); } } @Test public void assertGetCipherColumnWhenEncryptColumnExist() { assertThat(new EncryptRule(createEncryptRuleConfiguration()).getCipherColumn("t_encrypt", "pwd"), is("pwd_cipher")); } @Test(expected = NullPointerException.class) public void assertGetCipherColumnWhenNoEncryptColumn() { new EncryptRule(createEncryptRuleConfiguration()).getCipherColumn("t_encrypt", "pwd_cipher"); } @Test public void assertGetLogicAndCipherColumns() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).getLogicAndCipherColumns("t_encrypt").isEmpty()); } @Test public void assertFindAssistedQueryColumn() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).findAssistedQueryColumn("t_encrypt", "pwd_cipher").isPresent()); } @Test public void assertGetEncryptAssistedQueryValues() { List<Object> encryptAssistedQueryValues = new EncryptRule(createEncryptRuleConfiguration()) .getEncryptAssistedQueryValues(DefaultDatabase.LOGIC_NAME, DefaultDatabase.LOGIC_NAME, "t_encrypt", "pwd", Collections.singletonList(null)); for (Object actual : encryptAssistedQueryValues) { assertNull(actual); } } @Test public void assertGetAssistedQueryColumns() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).getAssistedQueryColumns("t_encrypt").isEmpty()); } @Test public void assertFindPlainColumn() { assertTrue(new EncryptRule(createEncryptRuleConfiguration()).findPlainColumn("t_encrypt", "pwd").isPresent()); assertTrue(new EncryptRule(createEncryptRuleConfiguration()).findPlainColumn("t_encrypt", "credit_card".toLowerCase()).isPresent()); assertFalse(new EncryptRule(createEncryptRuleConfiguration()).findPlainColumn("t_encrypt", "notExistLogicColumn").isPresent()); } @Test public void assertFindFuzzyQueryColumn() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).findFuzzyQueryColumn("t_encrypt", "pwd_cipher").isPresent()); } @Test @Test public void assertGetFuzzyQueryColumns() { assertFalse(new EncryptRule(createEncryptRuleConfiguration()).getFuzzyQueryColumns("t_encrypt").isEmpty()); } @Test public void assertIsQueryWithCipherColumn() { EncryptColumnRuleConfiguration encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", null); EncryptTableRuleConfiguration tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), null); AlgorithmProvidedEncryptRuleConfiguration ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration( Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); EncryptRule actual = new EncryptRule(ruleConfig); assertTrue(actual.isQueryWithCipherColumn("t_encrypt", "encrypt_column")); encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", null); tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), false); ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration(Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); actual = new EncryptRule(ruleConfig); assertFalse(actual.isQueryWithCipherColumn("t_encrypt", "encrypt_column")); encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", true); tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), false); ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration(Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); actual = new EncryptRule(ruleConfig); assertTrue(actual.isQueryWithCipherColumn("t_encrypt", "encrypt_column")); encryptColumnConfig = new EncryptColumnRuleConfiguration("encrypt_column", "encrypt_cipher", "", "", "", "test_encryptor", false); tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Collections.singletonList(encryptColumnConfig), null); ruleConfig = new AlgorithmProvidedEncryptRuleConfiguration(Collections.singleton(tableConfig), Collections.singletonMap("test_encryptor", new CoreEncryptAlgorithmFixture()), true); actual = new EncryptRule(ruleConfig); assertFalse(actual.isQueryWithCipherColumn("t_encrypt", "encrypt_column")); } @Test public void assertGetTables() { assertThat(new EncryptRule(createEncryptRuleConfiguration()).getTables(), is(Collections.singleton("t_encrypt"))); } @Test public void assertGetTableWithLowercase() { assertThat(new EncryptRule(createEncryptRuleConfigurationWithUpperCaseLogicTable()).getTables(), is(Collections.singleton("t_encrypt"))); } @Test public void assertTheSameLogicTable() { Collection<String> logicTables = new EncryptRule(createEncryptRuleConfiguration()).getTables(); Collection<String> theSameLogicTables = new EncryptRule(createEncryptRuleConfigurationWithUpperCaseLogicTable()).getTables(); assertThat(logicTables, is(theSameLogicTables)); } @SuppressWarnings("rawtypes") @Test public void assertGetSchemaMetaData() { EncryptRule encryptRule = new EncryptRule(createEncryptRuleConfiguration()); ShardingSphereSchema schema = mock(ShardingSphereSchema.class); encryptRule.setSchemaMetaData("foo_db", Collections.singletonMap("foo_schema", schema)); Optional<EncryptAlgorithm> actual = encryptRule.findEncryptor("t_encrypt", "name"); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(CoreSchemaMetaDataAwareEncryptAlgorithmFixture.class)); assertThat(((CoreSchemaMetaDataAwareEncryptAlgorithmFixture) actual.get()).getDatabaseName(), is("foo_db")); assertThat(((CoreSchemaMetaDataAwareEncryptAlgorithmFixture) actual.get()).getSchemas(), is(Collections.singletonMap("foo_schema", schema))); assertFalse(((CoreSchemaMetaDataAwareEncryptAlgorithmFixture) actual.get()).getSchemas().isEmpty()); } private EncryptRuleConfiguration createEncryptRuleConfiguration() { AlgorithmConfiguration queryAssistedEncryptConfig = new AlgorithmConfiguration("CORE.QUERY_ASSISTED.FIXTURE", new Properties()); AlgorithmConfiguration queryFuzzyEncryptConfig = new AlgorithmConfiguration("CORE.QUERY_FUZZY.FIXTURE", new Properties()); AlgorithmConfiguration metaDataAwareEncryptConfig = new AlgorithmConfiguration("CORE.METADATA_AWARE.FIXTURE", new Properties()); EncryptColumnRuleConfiguration pwdColumnConfig = new EncryptColumnRuleConfiguration("pwd", "pwd_cipher", "pwd_assist", "pwd_fuzzy", "pwd_plain", "test_encryptor", "test_encryptor", "fuzzy_encryptor", null); EncryptColumnRuleConfiguration creditCardColumnConfig = new EncryptColumnRuleConfiguration("credit_card", "credit_card_cipher", "", "", "credit_card_plain", "test_encryptor", null); EncryptColumnRuleConfiguration nameColumnConfig = new EncryptColumnRuleConfiguration("name", "name_cipher", "", "", "name_plain", "customized_encryptor", null); EncryptTableRuleConfiguration tableConfig = new EncryptTableRuleConfiguration("t_encrypt", Arrays.asList(pwdColumnConfig, creditCardColumnConfig, nameColumnConfig), null); return new EncryptRuleConfiguration(Collections.singleton(tableConfig), getEncryptors(queryAssistedEncryptConfig, queryFuzzyEncryptConfig, metaDataAwareEncryptConfig)); } @Test public void assertAssistedQueryEncryptorNameSpecified() { EncryptColumnRuleConfiguration pwdColumnConfig = new EncryptColumnRuleConfiguration("pwd", "pwd_cipher", "pwd_assist", "", "pwd_plain", "test_encryptor", "assisted_query_test_encryptor", null, null); assertThat(pwdColumnConfig.getAssistedQueryEncryptorName(), is("assisted_query_test_encryptor")); } @Test public void assertFuzzyQueryEncryptorNameSpecified() { EncryptColumnRuleConfiguration pwdColumnConfig = new EncryptColumnRuleConfiguration("pwd", "pwd_cipher", "", "pwd_fuzzy", "pwd_plain", "test_encryptor", "", "fuzzy_query_test_encryptor", null); assertThat(pwdColumnConfig.getFuzzyQueryEncryptorName(), is("fuzzy_query_test_encryptor")); } private EncryptRuleConfiguration createEncryptRuleConfigurationWithUpperCaseLogicTable() { AlgorithmConfiguration queryAssistedEncryptConfig = new AlgorithmConfiguration("CORE.QUERY_ASSISTED.FIXTURE", new Properties()); AlgorithmConfiguration queryFuzzyEncryptConfig = new AlgorithmConfiguration("CORE.QUERY_FUZZY.FIXTURE", new Properties()); AlgorithmConfiguration metaDataAwareEncryptConfig = new AlgorithmConfiguration("CORE.METADATA_AWARE.FIXTURE", new Properties()); EncryptColumnRuleConfiguration pwdColumnConfig = new EncryptColumnRuleConfiguration("pwd", "pwd_cipher", "", "", "pwd_plain", "test_encryptor", null); EncryptColumnRuleConfiguration creditCardColumnConfig = new EncryptColumnRuleConfiguration("credit_card", "credit_card_cipher", "", "", "credit_card_plain", "test_encryptor", null); EncryptColumnRuleConfiguration nameColumnConfig = new EncryptColumnRuleConfiguration("name", "name_cipher", "", "", "name_plain", "customized_encryptor", null); EncryptTableRuleConfiguration tableConfig = new EncryptTableRuleConfiguration("T_ENCRYPT", Arrays.asList(pwdColumnConfig, creditCardColumnConfig, nameColumnConfig), null); return new EncryptRuleConfiguration(Collections.singleton(tableConfig), getEncryptors(queryAssistedEncryptConfig, queryFuzzyEncryptConfig, metaDataAwareEncryptConfig)); } private Map<String, AlgorithmConfiguration> getEncryptors(final AlgorithmConfiguration queryAssistedEncryptConfig, final AlgorithmConfiguration queryFuzzyEncryptConfig, final AlgorithmConfiguration metaDataAwareEncryptConfig) { Map<String, AlgorithmConfiguration> result = new HashMap<>(2, 1); result.put("test_encryptor", queryAssistedEncryptConfig); result.put("fuzzy_encryptor", queryFuzzyEncryptConfig); result.put("customized_encryptor", metaDataAwareEncryptConfig); return result; } }
Oh my bad, `Mockito.mock()` is based off a `Class`, not actual instance... In that case, it might be "cleaner" to unwrap the proxies before creating the spy, WDYT?
public void beforeAll(Object testInstance) { Class<?> current = testInstance.getClass(); while (current.getSuperclass() != null) { for (Field field : current.getDeclaredFields()) { InjectSpy injectSpyAnnotation = field.getAnnotation(InjectSpy.class); if (injectSpyAnnotation != null) { Object beanInstance = getBeanInstance(testInstance, field); Object spy = createSpyAndSetTestField(testInstance, field, beanInstance); MockitoMocksTracker.track(testInstance, spy, beanInstance); } } current = current.getSuperclass(); } }
while (current.getSuperclass() != null) {
public void beforeAll(Object testInstance) { Class<?> current = testInstance.getClass(); while (current.getSuperclass() != null) { for (Field field : current.getDeclaredFields()) { InjectSpy injectSpyAnnotation = field.getAnnotation(InjectSpy.class); if (injectSpyAnnotation != null) { Object beanInstance = CreateMockitoMocksCallback.getBeanInstance(testInstance, field, InjectSpy.class); Object spy = createSpyAndSetTestField(testInstance, field, beanInstance); MockitoMocksTracker.track(testInstance, spy, beanInstance); } } current = current.getSuperclass(); } }
class CreateMockitoSpiesCallback implements QuarkusTestBeforeAllCallback { @Override private Object createSpyAndSetTestField(Object testInstance, Field field, Object beanInstance) { Object spy = Mockito.spy(beanInstance); field.setAccessible(true); try { field.set(testInstance, spy); } catch (IllegalAccessException e) { throw new RuntimeException(e); } return spy; } private Object getBeanInstance(Object testInstance, Field field) { Class<?> fieldClass = field.getType(); InstanceHandle<?> instance = Arc.container().instance(fieldClass, getQualifiers(field)); if (!instance.isAvailable()) { throw new IllegalStateException("Invalid use of @InjectSpy - could not determine bean of type: " + fieldClass + ". Offending field is " + field.getName() + " of test class " + testInstance.getClass()); } return instance.get(); } private Annotation[] getQualifiers(Field fieldToSpy) { List<Annotation> qualifiers = new ArrayList<>(); Annotation[] fieldAnnotations = fieldToSpy.getDeclaredAnnotations(); for (Annotation fieldAnnotation : fieldAnnotations) { for (Annotation annotationOfFieldAnnotation : fieldAnnotation.annotationType().getAnnotations()) { if (annotationOfFieldAnnotation.annotationType().equals(Qualifier.class)) { qualifiers.add(fieldAnnotation); break; } } } return qualifiers.toArray(new Annotation[0]); } }
class CreateMockitoSpiesCallback implements QuarkusTestBeforeAllCallback { @Override private Object createSpyAndSetTestField(Object testInstance, Field field, Object beanInstance) { ClientProxyUnwrapper unwrapper = new ClientProxyUnwrapper(); Object spy = Mockito.spy(unwrapper.apply(beanInstance)); field.setAccessible(true); try { field.set(testInstance, spy); } catch (IllegalAccessException e) { throw new RuntimeException(e); } return spy; } }
Hm, I can try to improve the readability a little bit..
public CompletionStage<ResultNode> resolve(SectionResolutionContext context) { return context.resolutionContext().evaluate(iterable).thenCompose(it -> { List<CompletionStage<ResultNode>> results = new ArrayList<>(); Iterator<?> iterator; if (it instanceof Iterable) { iterator = ((Iterable<?>) it).iterator(); } else if (it instanceof Iterator) { iterator = (Iterator<?>) it; } else if (it instanceof Map) { iterator = ((Map<?, ?>) it).entrySet().iterator(); } else if (it instanceof Stream) { iterator = ((Stream<?>) it).sequential().iterator(); } else if (it instanceof Integer) { iterator = IntStream.rangeClosed(1, (Integer) it).iterator(); } else if (it != null && it.getClass().isArray()) { iterator = Arrays.stream((Object[]) it).iterator(); } else { throw new IllegalStateException( String.format("Cannot iterate over [%s] resolved for [%s] in template %s on line %s", it, iterable.toOriginalString(), iterable.origin.getTemplateId(), iterable.origin.getLine())); } int idx = 0; while (iterator.hasNext()) { results.add(nextElement(iterator.next(), idx++, iterator.hasNext(), context)); } if (results.isEmpty()) { return CompletableFuture.completedFuture(ResultNode.NOOP); } CompletableFuture<ResultNode> result = new CompletableFuture<>(); CompletableFuture<ResultNode>[] all = new CompletableFuture[results.size()]; idx = 0; for (CompletionStage<ResultNode> r : results) { all[idx++] = r.toCompletableFuture(); } CompletableFuture .allOf(all) .whenComplete((v, t) -> { if (t != null) { result.completeExceptionally(t); } else { result.complete(new MultiResultNode(all)); } }); return result; }); }
} else if (it != null && it.getClass().isArray()) {
public CompletionStage<ResultNode> resolve(SectionResolutionContext context) { return context.resolutionContext().evaluate(iterable).thenCompose(it -> { if (it == null) { throw new TemplateException(String.format( "Loop section error in template %s on line %s: [%s] resolved to [null] which is not iterable", iterable.origin.getTemplateId(), iterable.origin.getLine(), iterable.toOriginalString())); } List<CompletionStage<ResultNode>> results = new ArrayList<>(); Iterator<?> iterator = extractIterator(it); int idx = 0; while (iterator.hasNext()) { results.add(nextElement(iterator.next(), idx++, iterator.hasNext(), context)); } if (results.isEmpty()) { return CompletableFuture.completedFuture(ResultNode.NOOP); } CompletableFuture<ResultNode> result = new CompletableFuture<>(); CompletableFuture<ResultNode>[] all = new CompletableFuture[results.size()]; idx = 0; for (CompletionStage<ResultNode> r : results) { all[idx++] = r.toCompletableFuture(); } CompletableFuture .allOf(all) .whenComplete((v, t) -> { if (t != null) { result.completeExceptionally(t); } else { result.complete(new MultiResultNode(all)); } }); return result; }); }
class LoopSectionHelper implements SectionHelper { private static final String DEFAULT_ALIAS = "it"; private final String alias; private final Expression iterable; LoopSectionHelper(String alias, Expression iterable) { this.alias = alias.equals(Parameter.EMPTY) ? DEFAULT_ALIAS : alias; this.iterable = Objects.requireNonNull(iterable); } @SuppressWarnings("unchecked") @Override CompletionStage<ResultNode> nextElement(Object element, int index, boolean hasNext, SectionResolutionContext context) { AtomicReference<ResolutionContext> resolutionContextHolder = new AtomicReference<>(); ResolutionContext child = context.resolutionContext().createChild(new IterationElement(alias, element, index, hasNext), null); resolutionContextHolder.set(child); return context.execute(child); } public static class Factory implements SectionHelperFactory<LoopSectionHelper> { public static final String HINT = "<for-element>"; private static final String ALIAS = "alias"; private static final String IN = "in"; private static final String ITERABLE = "iterable"; @Override public List<String> getDefaultAliases() { return ImmutableList.of("for", "each"); } @Override public ParametersInfo getParameters() { return ParametersInfo.builder() .addParameter(ALIAS, EMPTY) .addParameter(IN, EMPTY) .addParameter(new Parameter(ITERABLE, null, true)) .build(); } @Override public LoopSectionHelper initialize(SectionInitContext context) { return new LoopSectionHelper(context.getParameter(ALIAS), context.getExpression(ITERABLE)); } @Override public Map<String, String> initializeBlock(Map<String, String> outerNameTypeInfos, BlockInfo block) { if (block.getLabel().equals(MAIN_BLOCK_NAME)) { String iterable = block.getParameters().get(ITERABLE); if (iterable == null) { iterable = ValueResolvers.THIS; } Expression iterableExpr = block.addExpression(ITERABLE, iterable); String alias = block.getParameters().get(ALIAS); if (iterableExpr.typeCheckInfo != null) { alias = alias.equals(Parameter.EMPTY) ? DEFAULT_ALIAS : alias; Map<String, String> typeInfos = new HashMap<String, String>(outerNameTypeInfos); typeInfos.put(alias, iterableExpr.typeCheckInfo + HINT); return typeInfos; } else { Map<String, String> typeInfos = new HashMap<String, String>(outerNameTypeInfos); typeInfos.put(alias, null); return typeInfos; } } else { return Collections.emptyMap(); } } } static class IterationElement implements Mapper { final String alias; final Object element; final int index; final boolean hasNext; public IterationElement(String alias, Object element, int index, boolean hasNext) { this.alias = alias; this.element = element; this.index = index; this.hasNext = hasNext; } @Override public Object get(String key) { if (alias.equals(key)) { return element; } switch (key) { case "count": return index + 1; case "index": return index; case "indexParity": return index % 2 != 0 ? "even" : "odd"; case "hasNext": return hasNext; case "isOdd": case "odd": return index % 2 == 0; case "isEven": case "even": return index % 2 != 0; default: return Result.NOT_FOUND; } } } }
class LoopSectionHelper implements SectionHelper { private static final String DEFAULT_ALIAS = "it"; private final String alias; private final Expression iterable; LoopSectionHelper(String alias, Expression iterable) { this.alias = alias.equals(Parameter.EMPTY) ? DEFAULT_ALIAS : alias; this.iterable = Objects.requireNonNull(iterable); } @SuppressWarnings("unchecked") @Override private Iterator<?> extractIterator(Object it) { if (it instanceof Iterable) { return ((Iterable<?>) it).iterator(); } else if (it instanceof Iterator) { return (Iterator<?>) it; } else if (it instanceof Map) { return ((Map<?, ?>) it).entrySet().iterator(); } else if (it instanceof Stream) { return ((Stream<?>) it).sequential().iterator(); } else if (it instanceof Integer) { return IntStream.rangeClosed(1, (Integer) it).iterator(); } else if (it.getClass().isArray()) { return Arrays.stream((Object[]) it).iterator(); } else { throw new TemplateException(String.format( "Loop section error in template %s on line %s: [%s] resolved to [%s] which is not iterable", iterable.origin.getTemplateId(), iterable.origin.getLine(), iterable.toOriginalString(), it.getClass().getName())); } } CompletionStage<ResultNode> nextElement(Object element, int index, boolean hasNext, SectionResolutionContext context) { AtomicReference<ResolutionContext> resolutionContextHolder = new AtomicReference<>(); ResolutionContext child = context.resolutionContext().createChild(new IterationElement(alias, element, index, hasNext), null); resolutionContextHolder.set(child); return context.execute(child); } public static class Factory implements SectionHelperFactory<LoopSectionHelper> { public static final String HINT = "<for-element>"; private static final String ALIAS = "alias"; private static final String IN = "in"; private static final String ITERABLE = "iterable"; @Override public List<String> getDefaultAliases() { return ImmutableList.of("for", "each"); } @Override public ParametersInfo getParameters() { return ParametersInfo.builder() .addParameter(ALIAS, EMPTY) .addParameter(IN, EMPTY) .addParameter(new Parameter(ITERABLE, null, true)) .build(); } @Override public LoopSectionHelper initialize(SectionInitContext context) { return new LoopSectionHelper(context.getParameter(ALIAS), context.getExpression(ITERABLE)); } @Override public Map<String, String> initializeBlock(Map<String, String> outerNameTypeInfos, BlockInfo block) { if (block.getLabel().equals(MAIN_BLOCK_NAME)) { String iterable = block.getParameters().get(ITERABLE); if (iterable == null) { iterable = ValueResolvers.THIS; } Expression iterableExpr = block.addExpression(ITERABLE, iterable); String alias = block.getParameters().get(ALIAS); if (iterableExpr.typeCheckInfo != null) { alias = alias.equals(Parameter.EMPTY) ? DEFAULT_ALIAS : alias; Map<String, String> typeInfos = new HashMap<String, String>(outerNameTypeInfos); typeInfos.put(alias, iterableExpr.typeCheckInfo + HINT); return typeInfos; } else { Map<String, String> typeInfos = new HashMap<String, String>(outerNameTypeInfos); typeInfos.put(alias, null); return typeInfos; } } else { return Collections.emptyMap(); } } } static class IterationElement implements Mapper { final String alias; final Object element; final int index; final boolean hasNext; public IterationElement(String alias, Object element, int index, boolean hasNext) { this.alias = alias; this.element = element; this.index = index; this.hasNext = hasNext; } @Override public Object get(String key) { if (alias.equals(key)) { return element; } switch (key) { case "count": return index + 1; case "index": return index; case "indexParity": return index % 2 != 0 ? "even" : "odd"; case "hasNext": return hasNext; case "isOdd": case "odd": return index % 2 == 0; case "isEven": case "even": return index % 2 != 0; default: return Result.NOT_FOUND; } } } }
We skipped paging and polling operations when adding sync stack support for Key Vault, as shown [here](https://github.com/Azure/azure-sdk-for-java/blob/a90cc9a4d40f9492e76d5a4589b321e94c0aa5de/sdk/keyvault/azure-security-keyvault-secrets/src/test/java/com/azure/security/keyvault/secrets/SecretClientTest.java#L61). Has that been taken care of @g2vinay, @samvaity?
protected HttpClient buildAssertingClient(HttpClient httpClient) { return new AssertingHttpClientBuilder(httpClient) .skipRequest((ignored1, ignored2) -> false) .assertAsync() .build(); }
.build();
protected HttpClient buildAssertingClient(HttpClient httpClient) { return new AssertingHttpClientBuilder(httpClient) .skipRequest((ignored1, ignored2) -> false) .assertAsync() .build(); }
class TableServiceAsyncClientTest extends TableServiceClientTestBase { private static final Duration TIMEOUT = Duration.ofSeconds(100); private static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault(); private static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest(); private TableServiceAsyncClient serviceClient; @BeforeAll static void beforeAll() { StepVerifier.setDefaultTimeout(TIMEOUT); } @AfterAll static void afterAll() { StepVerifier.resetDefaultTimeout(); } @Override protected void beforeTest() { final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode()); serviceClient = getClientBuilder(connectionString).buildAsyncClient(); } @Test public void serviceCreateTable() { String tableName = testResourceNamer.randomName("test", 20); StepVerifier.create(serviceClient.createTable(tableName)) .assertNext(Assertions::assertNotNull) .expectComplete() .verify(); } /** * Tests that a table and entity can be created while having a different tenant ID than the one that will be * provided in the authentication challenge. */ @Test public void serviceCreateTableWithMultipleTenants() { Assumptions.assumeTrue(serviceClient.getServiceEndpoint().contains("core.windows.net") && serviceClient.getServiceVersion() == TableServiceVersion.V2020_12_06); String tableName = testResourceNamer.randomName("tableName", 20); final ClientSecretCredential credential = new ClientSecretCredentialBuilder() .clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId")) .clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret")) .tenantId(testResourceNamer.randomUuid()) .additionallyAllowedTenants("*") .build(); final TableServiceAsyncClient tableServiceAsyncClient = getClientBuilder(Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT", "https: StepVerifier.create(tableServiceAsyncClient.createTable(tableName)) .assertNext(Assertions::assertNotNull) .expectComplete() .verify(); tableName = testResourceNamer.randomName("tableName", 20); StepVerifier.create(tableServiceAsyncClient.createTable(tableName)) .assertNext(Assertions::assertNotNull) .expectComplete() .verify(); } @Test public void serviceCreateTableWithResponse() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 204; StepVerifier.create(serviceClient.createTableWithResponse(tableName)) .assertNext(response -> { assertEquals(expectedStatusCode, response.getStatusCode()); assertNotNull(response.getValue()); }) .expectComplete() .verify(); } @Test public void serviceCreateTableFailsIfExists() { String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); StepVerifier.create(serviceClient.createTable(tableName)) .expectErrorMatches(e -> e instanceof TableServiceException && ((TableServiceException) e).getResponse().getStatusCode() == 409) .verify(); } @Test public void serviceCreateTableIfNotExists() { String tableName = testResourceNamer.randomName("test", 20); StepVerifier.create(serviceClient.createTableIfNotExists(tableName)) .assertNext(Assertions::assertNotNull) .expectComplete() .verify(); } @Test public void serviceCreateTableIfNotExistsSucceedsIfExists() { String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); StepVerifier.create(serviceClient.createTableIfNotExists(tableName)) .expectComplete() .verify(); } @Test public void serviceCreateTableIfNotExistsWithResponse() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 204; StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName)) .assertNext(response -> { assertEquals(expectedStatusCode, response.getStatusCode()); assertNotNull(response.getValue()); }) .expectComplete() .verify(); } @Test public void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 409; serviceClient.createTable(tableName).block(TIMEOUT); StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName)) .assertNext(response -> { assertEquals(expectedStatusCode, response.getStatusCode()); assertNull(response.getValue()); }) .expectComplete() .verify(); } @Test public void serviceDeleteTable() { final String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); StepVerifier.create(serviceClient.deleteTable(tableName)) .expectComplete() .verify(); } @Test public void serviceDeleteNonExistingTable() { final String tableName = testResourceNamer.randomName("test", 20); StepVerifier.create(serviceClient.deleteTable(tableName)) .expectComplete() .verify(); } @Test public void serviceDeleteTableWithResponse() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 204; serviceClient.createTable(tableName).block(); StepVerifier.create(serviceClient.deleteTableWithResponse(tableName)) .assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode())) .expectComplete() .verify(); } @Test public void serviceDeleteNonExistingTableWithResponse() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 404; StepVerifier.create(serviceClient.deleteTableWithResponse(tableName)) .assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode())) .expectComplete() .verify(); } @Test public void serviceListTables() { final String tableName = testResourceNamer.randomName("test", 20); final String tableName2 = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); serviceClient.createTable(tableName2).block(TIMEOUT); StepVerifier.create(serviceClient.listTables()) .expectNextCount(2) .thenConsumeWhile(x -> true) .expectComplete() .verify(); } @Test public void serviceListTablesWithFilter() { final String tableName = testResourceNamer.randomName("test", 20); final String tableName2 = testResourceNamer.randomName("test", 20); ListTablesOptions options = new ListTablesOptions().setFilter("TableName eq '" + tableName + "'"); serviceClient.createTable(tableName).block(TIMEOUT); serviceClient.createTable(tableName2).block(TIMEOUT); StepVerifier.create(serviceClient.listTables(options)) .assertNext(table -> assertEquals(tableName, table.getName())) .expectNextCount(0) .thenConsumeWhile(x -> true) .expectComplete() .verify(); } @Test public void serviceListTablesWithTop() { final String tableName = testResourceNamer.randomName("test", 20); final String tableName2 = testResourceNamer.randomName("test", 20); final String tableName3 = testResourceNamer.randomName("test", 20); ListTablesOptions options = new ListTablesOptions().setTop(2); serviceClient.createTable(tableName).block(TIMEOUT); serviceClient.createTable(tableName2).block(TIMEOUT); serviceClient.createTable(tableName3).block(TIMEOUT); StepVerifier.create(serviceClient.listTables(options)) .expectNextCount(2) .thenConsumeWhile(x -> true) .expectComplete() .verify(); } @Test public void serviceGetTableClient() { final String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); TableAsyncClient tableClient = serviceClient.getTableClient(tableName); TableAsyncClientTest.getEntityWithResponseAsyncImpl(tableClient, testResourceNamer, "partitionKey", "rowKey"); } @Test public void generateAccountSasTokenWithMinimumParameters() { final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC); final TableAccountSasPermission permissions = TableAccountSasPermission.parse("r"); final TableAccountSasService services = new TableAccountSasService().setTableAccess(true); final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true); final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY; final TableAccountSasSignatureValues sasSignatureValues = new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes) .setProtocol(protocol) .setVersion(TableServiceVersion.V2019_02_02.getVersion()); final String sas = serviceClient.generateAccountSas(sasSignatureValues); assertTrue( sas.startsWith( "sv=2019-02-02" + "&ss=t" + "&srt=o" + "&se=2021-12-12T00%3A00%3A00Z" + "&sp=r" + "&spr=https" + "&sig=" ) ); } @Test public void generateAccountSasTokenWithAllParameters() { final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC); final TableAccountSasPermission permissions = TableAccountSasPermission.parse("rdau"); final TableAccountSasService services = new TableAccountSasService().setTableAccess(true); final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true); final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP; final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); final TableSasIpRange ipRange = TableSasIpRange.parse("a-b"); final TableAccountSasSignatureValues sasSignatureValues = new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes) .setProtocol(protocol) .setVersion(TableServiceVersion.V2019_02_02.getVersion()) .setStartTime(startTime) .setSasIpRange(ipRange); final String sas = serviceClient.generateAccountSas(sasSignatureValues); assertTrue( sas.startsWith( "sv=2019-02-02" + "&ss=t" + "&srt=o" + "&st=2015-01-01T00%3A00%3A00Z" + "&se=2021-12-12T00%3A00%3A00Z" + "&sp=rdau" + "&sip=a-b" + "&spr=https%2Chttp" + "&sig=" ) ); } @Test @Disabled public void canUseSasTokenToCreateValidTableClient() { final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC); final TableAccountSasPermission permissions = TableAccountSasPermission.parse("a"); final TableAccountSasService services = new TableAccountSasService().setTableAccess(true); final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true); final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY; final TableAccountSasSignatureValues sasSignatureValues = new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes) .setProtocol(protocol) .setVersion(TableServiceVersion.V2019_02_02.getVersion()); final String sas = serviceClient.generateAccountSas(sasSignatureValues); final String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); final TableClientBuilder tableClientBuilder = new TableClientBuilder() .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .endpoint(serviceClient.getServiceEndpoint()) .sasToken(sas) .tableName(tableName); if (interceptorManager.isPlaybackMode()) { tableClientBuilder.httpClient(playbackClient); } else { tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT); if (!interceptorManager.isLiveMode()) { tableClientBuilder.addPolicy(recordPolicy); } tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500), Duration.ofSeconds(100)))); } final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient(); final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20); final String rowKeyValue = testResourceNamer.randomName("rowKey", 20); final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue); final int expectedStatusCode = 204; StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity)) .assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode())) .expectComplete() .verify(); } @Test public void setGetProperties() { Assumptions.assumeFalse(IS_COSMOS_TEST, "Setting and getting properties is not supported on Cosmos endpoints."); TableServiceRetentionPolicy retentionPolicy = new TableServiceRetentionPolicy() .setDaysToRetain(5) .setEnabled(true); TableServiceLogging logging = new TableServiceLogging() .setReadLogged(true) .setAnalyticsVersion("1.0") .setRetentionPolicy(retentionPolicy); List<TableServiceCorsRule> corsRules = new ArrayList<>(); corsRules.add(new TableServiceCorsRule() .setAllowedMethods("GET,PUT,HEAD") .setAllowedOrigins("*") .setAllowedHeaders("x-ms-version") .setExposedHeaders("x-ms-client-request-id") .setMaxAgeInSeconds(10)); TableServiceMetrics hourMetrics = new TableServiceMetrics() .setEnabled(true) .setVersion("1.0") .setRetentionPolicy(retentionPolicy) .setIncludeApis(true); TableServiceMetrics minuteMetrics = new TableServiceMetrics() .setEnabled(true) .setVersion("1.0") .setRetentionPolicy(retentionPolicy) .setIncludeApis(true); TableServiceProperties sentProperties = new TableServiceProperties() .setLogging(logging) .setCorsRules(corsRules) .setMinuteMetrics(minuteMetrics) .setHourMetrics(hourMetrics); StepVerifier.create(serviceClient.setPropertiesWithResponse(sentProperties)) .assertNext(response -> { assertNotNull(response.getHeaders().getValue("x-ms-request-id")); assertNotNull(response.getHeaders().getValue("x-ms-version")); }) .expectComplete() .verify(); sleepIfRunningAgainstService(30000); StepVerifier.create(serviceClient.getProperties()) .assertNext(retrievedProperties -> assertPropertiesEquals(sentProperties, retrievedProperties)) .expectComplete() .verify(); } @Test public void getStatistics() throws URISyntaxException { Assumptions.assumeFalse(IS_COSMOS_TEST, "Getting statistics is not supported on Cosmos endpoints."); URI primaryEndpoint = new URI(serviceClient.getServiceEndpoint()); String[] hostParts = primaryEndpoint.getHost().split("\\."); StringJoiner secondaryHostJoiner = new StringJoiner("."); secondaryHostJoiner.add(hostParts[0] + "-secondary"); for (int i = 1; i < hostParts.length; i++) { secondaryHostJoiner.add(hostParts[i]); } String secondaryEndpoint = primaryEndpoint.getScheme() + ": TableServiceAsyncClient secondaryClient = new TableServiceClientBuilder() .endpoint(secondaryEndpoint) .serviceVersion(serviceClient.getServiceVersion()) .pipeline(serviceClient.getHttpPipeline()) .buildAsyncClient(); StepVerifier.create(secondaryClient.getStatistics()) .assertNext(statistics -> { assertNotNull(statistics); assertNotNull(statistics.getGeoReplication()); assertNotNull(statistics.getGeoReplication().getStatus()); assertNotNull(statistics.getGeoReplication().getLastSyncTime()); }) .expectComplete() .verify(); } }
class TableServiceAsyncClientTest extends TableServiceClientTestBase { private static final Duration TIMEOUT = Duration.ofSeconds(100); private static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault(); private static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest(); private TableServiceAsyncClient serviceClient; @BeforeAll static void beforeAll() { StepVerifier.setDefaultTimeout(TIMEOUT); } @AfterAll static void afterAll() { StepVerifier.resetDefaultTimeout(); } @Override protected void beforeTest() { final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode()); serviceClient = getClientBuilder(connectionString).buildAsyncClient(); } @Test public void serviceCreateTable() { String tableName = testResourceNamer.randomName("test", 20); StepVerifier.create(serviceClient.createTable(tableName)) .assertNext(Assertions::assertNotNull) .expectComplete() .verify(); } /** * Tests that a table and entity can be created while having a different tenant ID than the one that will be * provided in the authentication challenge. */ @Test public void serviceCreateTableWithMultipleTenants() { Assumptions.assumeTrue(serviceClient.getServiceEndpoint().contains("core.windows.net") && serviceClient.getServiceVersion() == TableServiceVersion.V2020_12_06); String tableName = testResourceNamer.randomName("tableName", 20); final ClientSecretCredential credential = new ClientSecretCredentialBuilder() .clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId")) .clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret")) .tenantId(testResourceNamer.randomUuid()) .additionallyAllowedTenants("*") .build(); final TableServiceAsyncClient tableServiceAsyncClient = getClientBuilder(Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT", "https: StepVerifier.create(tableServiceAsyncClient.createTable(tableName)) .assertNext(Assertions::assertNotNull) .expectComplete() .verify(); tableName = testResourceNamer.randomName("tableName", 20); StepVerifier.create(tableServiceAsyncClient.createTable(tableName)) .assertNext(Assertions::assertNotNull) .expectComplete() .verify(); } @Test public void serviceCreateTableWithResponse() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 204; StepVerifier.create(serviceClient.createTableWithResponse(tableName)) .assertNext(response -> { assertEquals(expectedStatusCode, response.getStatusCode()); assertNotNull(response.getValue()); }) .expectComplete() .verify(); } @Test public void serviceCreateTableFailsIfExists() { String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); StepVerifier.create(serviceClient.createTable(tableName)) .expectErrorMatches(e -> e instanceof TableServiceException && ((TableServiceException) e).getResponse().getStatusCode() == 409) .verify(); } @Test public void serviceCreateTableIfNotExists() { String tableName = testResourceNamer.randomName("test", 20); StepVerifier.create(serviceClient.createTableIfNotExists(tableName)) .assertNext(Assertions::assertNotNull) .expectComplete() .verify(); } @Test public void serviceCreateTableIfNotExistsSucceedsIfExists() { String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); StepVerifier.create(serviceClient.createTableIfNotExists(tableName)) .expectComplete() .verify(); } @Test public void serviceCreateTableIfNotExistsWithResponse() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 204; StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName)) .assertNext(response -> { assertEquals(expectedStatusCode, response.getStatusCode()); assertNotNull(response.getValue()); }) .expectComplete() .verify(); } @Test public void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 409; serviceClient.createTable(tableName).block(TIMEOUT); StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName)) .assertNext(response -> { assertEquals(expectedStatusCode, response.getStatusCode()); assertNull(response.getValue()); }) .expectComplete() .verify(); } @Test public void serviceDeleteTable() { final String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); StepVerifier.create(serviceClient.deleteTable(tableName)) .expectComplete() .verify(); } @Test public void serviceDeleteNonExistingTable() { final String tableName = testResourceNamer.randomName("test", 20); StepVerifier.create(serviceClient.deleteTable(tableName)) .expectComplete() .verify(); } @Test public void serviceDeleteTableWithResponse() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 204; serviceClient.createTable(tableName).block(); StepVerifier.create(serviceClient.deleteTableWithResponse(tableName)) .assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode())) .expectComplete() .verify(); } @Test public void serviceDeleteNonExistingTableWithResponse() { String tableName = testResourceNamer.randomName("test", 20); int expectedStatusCode = 404; StepVerifier.create(serviceClient.deleteTableWithResponse(tableName)) .assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode())) .expectComplete() .verify(); } @Test public void serviceListTables() { final String tableName = testResourceNamer.randomName("test", 20); final String tableName2 = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); serviceClient.createTable(tableName2).block(TIMEOUT); StepVerifier.create(serviceClient.listTables()) .expectNextCount(2) .thenConsumeWhile(x -> true) .expectComplete() .verify(); } @Test public void serviceListTablesWithFilter() { final String tableName = testResourceNamer.randomName("test", 20); final String tableName2 = testResourceNamer.randomName("test", 20); ListTablesOptions options = new ListTablesOptions().setFilter("TableName eq '" + tableName + "'"); serviceClient.createTable(tableName).block(TIMEOUT); serviceClient.createTable(tableName2).block(TIMEOUT); StepVerifier.create(serviceClient.listTables(options)) .assertNext(table -> assertEquals(tableName, table.getName())) .expectNextCount(0) .thenConsumeWhile(x -> true) .expectComplete() .verify(); } @Test public void serviceListTablesWithTop() { final String tableName = testResourceNamer.randomName("test", 20); final String tableName2 = testResourceNamer.randomName("test", 20); final String tableName3 = testResourceNamer.randomName("test", 20); ListTablesOptions options = new ListTablesOptions().setTop(2); serviceClient.createTable(tableName).block(TIMEOUT); serviceClient.createTable(tableName2).block(TIMEOUT); serviceClient.createTable(tableName3).block(TIMEOUT); StepVerifier.create(serviceClient.listTables(options)) .expectNextCount(2) .thenConsumeWhile(x -> true) .expectComplete() .verify(); } @Test public void serviceGetTableClient() { final String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); TableAsyncClient tableClient = serviceClient.getTableClient(tableName); TableAsyncClientTest.getEntityWithResponseAsyncImpl(tableClient, testResourceNamer, "partitionKey", "rowKey"); } @Test public void generateAccountSasTokenWithMinimumParameters() { final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC); final TableAccountSasPermission permissions = TableAccountSasPermission.parse("r"); final TableAccountSasService services = new TableAccountSasService().setTableAccess(true); final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true); final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY; final TableAccountSasSignatureValues sasSignatureValues = new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes) .setProtocol(protocol) .setVersion(TableServiceVersion.V2019_02_02.getVersion()); final String sas = serviceClient.generateAccountSas(sasSignatureValues); assertTrue( sas.startsWith( "sv=2019-02-02" + "&ss=t" + "&srt=o" + "&se=2021-12-12T00%3A00%3A00Z" + "&sp=r" + "&spr=https" + "&sig=" ) ); } @Test public void generateAccountSasTokenWithAllParameters() { final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC); final TableAccountSasPermission permissions = TableAccountSasPermission.parse("rdau"); final TableAccountSasService services = new TableAccountSasService().setTableAccess(true); final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true); final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP; final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); final TableSasIpRange ipRange = TableSasIpRange.parse("a-b"); final TableAccountSasSignatureValues sasSignatureValues = new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes) .setProtocol(protocol) .setVersion(TableServiceVersion.V2019_02_02.getVersion()) .setStartTime(startTime) .setSasIpRange(ipRange); final String sas = serviceClient.generateAccountSas(sasSignatureValues); assertTrue( sas.startsWith( "sv=2019-02-02" + "&ss=t" + "&srt=o" + "&st=2015-01-01T00%3A00%3A00Z" + "&se=2021-12-12T00%3A00%3A00Z" + "&sp=rdau" + "&sip=a-b" + "&spr=https%2Chttp" + "&sig=" ) ); } @Test @Disabled public void canUseSasTokenToCreateValidTableClient() { final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC); final TableAccountSasPermission permissions = TableAccountSasPermission.parse("a"); final TableAccountSasService services = new TableAccountSasService().setTableAccess(true); final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true); final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY; final TableAccountSasSignatureValues sasSignatureValues = new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes) .setProtocol(protocol) .setVersion(TableServiceVersion.V2019_02_02.getVersion()); final String sas = serviceClient.generateAccountSas(sasSignatureValues); final String tableName = testResourceNamer.randomName("test", 20); serviceClient.createTable(tableName).block(TIMEOUT); final TableClientBuilder tableClientBuilder = new TableClientBuilder() .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .endpoint(serviceClient.getServiceEndpoint()) .sasToken(sas) .tableName(tableName); if (interceptorManager.isPlaybackMode()) { tableClientBuilder.httpClient(playbackClient); } else { tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT); if (!interceptorManager.isLiveMode()) { tableClientBuilder.addPolicy(recordPolicy); } tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500), Duration.ofSeconds(100)))); } final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient(); final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20); final String rowKeyValue = testResourceNamer.randomName("rowKey", 20); final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue); final int expectedStatusCode = 204; StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity)) .assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode())) .expectComplete() .verify(); } @Test public void setGetProperties() { Assumptions.assumeFalse(IS_COSMOS_TEST, "Setting and getting properties is not supported on Cosmos endpoints."); TableServiceRetentionPolicy retentionPolicy = new TableServiceRetentionPolicy() .setDaysToRetain(5) .setEnabled(true); TableServiceLogging logging = new TableServiceLogging() .setReadLogged(true) .setAnalyticsVersion("1.0") .setRetentionPolicy(retentionPolicy); List<TableServiceCorsRule> corsRules = new ArrayList<>(); corsRules.add(new TableServiceCorsRule() .setAllowedMethods("GET,PUT,HEAD") .setAllowedOrigins("*") .setAllowedHeaders("x-ms-version") .setExposedHeaders("x-ms-client-request-id") .setMaxAgeInSeconds(10)); TableServiceMetrics hourMetrics = new TableServiceMetrics() .setEnabled(true) .setVersion("1.0") .setRetentionPolicy(retentionPolicy) .setIncludeApis(true); TableServiceMetrics minuteMetrics = new TableServiceMetrics() .setEnabled(true) .setVersion("1.0") .setRetentionPolicy(retentionPolicy) .setIncludeApis(true); TableServiceProperties sentProperties = new TableServiceProperties() .setLogging(logging) .setCorsRules(corsRules) .setMinuteMetrics(minuteMetrics) .setHourMetrics(hourMetrics); StepVerifier.create(serviceClient.setPropertiesWithResponse(sentProperties)) .assertNext(response -> { assertNotNull(response.getHeaders().getValue("x-ms-request-id")); assertNotNull(response.getHeaders().getValue("x-ms-version")); }) .expectComplete() .verify(); sleepIfRunningAgainstService(30000); StepVerifier.create(serviceClient.getProperties()) .assertNext(retrievedProperties -> assertPropertiesEquals(sentProperties, retrievedProperties)) .expectComplete() .verify(); } @Test public void getStatistics() throws URISyntaxException { Assumptions.assumeFalse(IS_COSMOS_TEST, "Getting statistics is not supported on Cosmos endpoints."); URI primaryEndpoint = new URI(serviceClient.getServiceEndpoint()); String[] hostParts = primaryEndpoint.getHost().split("\\."); StringJoiner secondaryHostJoiner = new StringJoiner("."); secondaryHostJoiner.add(hostParts[0] + "-secondary"); for (int i = 1; i < hostParts.length; i++) { secondaryHostJoiner.add(hostParts[i]); } String secondaryEndpoint = primaryEndpoint.getScheme() + ": TableServiceAsyncClient secondaryClient = new TableServiceClientBuilder() .endpoint(secondaryEndpoint) .serviceVersion(serviceClient.getServiceVersion()) .pipeline(serviceClient.getHttpPipeline()) .buildAsyncClient(); StepVerifier.create(secondaryClient.getStatistics()) .assertNext(statistics -> { assertNotNull(statistics); assertNotNull(statistics.getGeoReplication()); assertNotNull(statistics.getGeoReplication().getStatus()); assertNotNull(statistics.getGeoReplication().getLastSyncTime()); }) .expectComplete() .verify(); } }
It's same, the queue `poll` may return null ``` /** * Retrieves and removes the head of this queue, waiting up to the * specified wait time if necessary for an element to become available. * * @param timeout how long to wait before giving up, in units of * {@code unit} * @param unit a {@code TimeUnit} determining how to interpret the * {@code timeout} parameter * @return the head of this queue, or {@code null} if the * specified waiting time elapses before an element is available * @throws InterruptedException if interrupted while waiting */ E poll(long timeout, TimeUnit unit) throws InterruptedException; ```
protected void runBlocking() { client.connect(); client.subscribe(binlogPosition.getFilename(), binlogPosition.getPosition()); while (isRunning()) { List<AbstractBinlogEvent> events = client.poll(); if (null == events) { continue; } handleEvent(events); } channel.pushRecords(Collections.singletonList(new FinishedRecord(new FinishedPosition()))); }
}
protected void runBlocking() { client.connect(); client.subscribe(binlogPosition.getFilename(), binlogPosition.getPosition()); while (isRunning()) { List<AbstractBinlogEvent> events = client.poll(); if (events.isEmpty()) { continue; } handleEvents(events); } channel.pushRecords(Collections.singletonList(new FinishedRecord(new FinishedPosition()))); }
class MySQLIncrementalDumper extends AbstractLifecycleExecutor implements IncrementalDumper { private final DumperConfiguration dumperConfig; private final BinlogPosition binlogPosition; private final PipelineTableMetaDataLoader metaDataLoader; private final PipelineChannel channel; private final MySQLClient client; private final String catalog; public MySQLIncrementalDumper(final DumperConfiguration dumperConfig, final IngestPosition binlogPosition, final PipelineChannel channel, final PipelineTableMetaDataLoader metaDataLoader) { Preconditions.checkArgument(dumperConfig.getDataSourceConfig() instanceof StandardPipelineDataSourceConfiguration, "MySQLBinlogDumper only support StandardPipelineDataSourceConfiguration"); this.dumperConfig = dumperConfig; this.binlogPosition = (BinlogPosition) binlogPosition; this.channel = channel; this.metaDataLoader = metaDataLoader; YamlJdbcConfiguration jdbcConfig = ((StandardPipelineDataSourceConfiguration) dumperConfig.getDataSourceConfig()).getJdbcConfig(); log.info("incremental dump, jdbcUrl={}", jdbcConfig.getUrl()); DataSourceMetaData metaData = TypedSPILoader.getService(DatabaseType.class, "MySQL").getDataSourceMetaData(jdbcConfig.getUrl(), null); ConnectInfo connectInfo = new ConnectInfo(new Random().nextInt(), metaData.getHostname(), metaData.getPort(), jdbcConfig.getUsername(), jdbcConfig.getPassword()); client = new MySQLClient(connectInfo, dumperConfig.isDecodeWithTX()); catalog = metaData.getCatalog(); } @Override private void handleEvent(final List<AbstractBinlogEvent> events) { List<Record> dataRecords = new LinkedList<>(); for (AbstractBinlogEvent each : events) { if (!(each instanceof AbstractRowsEvent)) { dataRecords.add(createPlaceholderRecord(each)); continue; } AbstractRowsEvent rowsEvent = (AbstractRowsEvent) each; if (!rowsEvent.getDatabaseName().equals(catalog) || !dumperConfig.containsTable(rowsEvent.getTableName())) { continue; } PipelineTableMetaData tableMetaData = getPipelineTableMetaData(rowsEvent.getTableName()); if (each instanceof WriteRowsEvent) { dataRecords.addAll(handleWriteRowsEvent((WriteRowsEvent) each, tableMetaData)); continue; } if (each instanceof UpdateRowsEvent) { dataRecords.addAll(handleUpdateRowsEvent((UpdateRowsEvent) each, tableMetaData)); continue; } if (each instanceof DeleteRowsEvent) { dataRecords.addAll(handleDeleteRowsEvent((DeleteRowsEvent) each, tableMetaData)); } } if (dataRecords.isEmpty()) { return; } channel.pushRecords(dataRecords); } private PlaceholderRecord createPlaceholderRecord(final AbstractBinlogEvent event) { PlaceholderRecord result = new PlaceholderRecord(new BinlogPosition(event.getFileName(), event.getPosition(), event.getServerId())); result.setCommitTime(event.getTimestamp() * 1000L); return result; } private PipelineTableMetaData getPipelineTableMetaData(final String actualTableName) { return metaDataLoader.getTableMetaData(dumperConfig.getSchemaName(new ActualTableName(actualTableName)), actualTableName); } private List<DataRecord> handleWriteRowsEvent(final WriteRowsEvent event, final PipelineTableMetaData tableMetaData) { Set<ColumnName> columnNameSet = dumperConfig.getColumnNameSet(event.getTableName()).orElse(null); List<DataRecord> result = new LinkedList<>(); for (Serializable[] each : event.getAfterRows()) { DataRecord dataRecord = createDataRecord(event, each.length); dataRecord.setType(IngestDataChangeType.INSERT); for (int i = 0; i < each.length; i++) { PipelineColumnMetaData columnMetaData = tableMetaData.getColumnMetaData(i + 1); if (isColumnUnneeded(columnNameSet, columnMetaData.getName())) { continue; } dataRecord.addColumn(new Column(columnMetaData.getName(), handleValue(columnMetaData, each[i]), true, columnMetaData.isUniqueKey())); } result.add(dataRecord); } return result; } private boolean isColumnUnneeded(final Set<ColumnName> columnNameSet, final String columnName) { return null != columnNameSet && !columnNameSet.contains(new ColumnName(columnName)); } private List<DataRecord> handleUpdateRowsEvent(final UpdateRowsEvent event, final PipelineTableMetaData tableMetaData) { Set<ColumnName> columnNameSet = dumperConfig.getColumnNameSet(event.getTableName()).orElse(null); List<DataRecord> result = new LinkedList<>(); for (int i = 0; i < event.getBeforeRows().size(); i++) { Serializable[] beforeValues = event.getBeforeRows().get(i); Serializable[] afterValues = event.getAfterRows().get(i); DataRecord dataRecord = createDataRecord(event, beforeValues.length); dataRecord.setType(IngestDataChangeType.UPDATE); for (int j = 0; j < beforeValues.length; j++) { Serializable oldValue = beforeValues[j]; Serializable newValue = afterValues[j]; boolean updated = !Objects.equals(newValue, oldValue); PipelineColumnMetaData columnMetaData = tableMetaData.getColumnMetaData(j + 1); if (isColumnUnneeded(columnNameSet, columnMetaData.getName())) { continue; } dataRecord.addColumn(new Column(columnMetaData.getName(), handleValue(columnMetaData, oldValue), handleValue(columnMetaData, newValue), updated, columnMetaData.isUniqueKey())); } result.add(dataRecord); } return result; } private List<DataRecord> handleDeleteRowsEvent(final DeleteRowsEvent event, final PipelineTableMetaData tableMetaData) { Set<ColumnName> columnNameSet = dumperConfig.getColumnNameSet(event.getTableName()).orElse(null); List<DataRecord> result = new LinkedList<>(); for (Serializable[] each : event.getBeforeRows()) { DataRecord dataRecord = createDataRecord(event, each.length); dataRecord.setType(IngestDataChangeType.DELETE); for (int i = 0, length = each.length; i < length; i++) { PipelineColumnMetaData columnMetaData = tableMetaData.getColumnMetaData(i + 1); if (isColumnUnneeded(columnNameSet, columnMetaData.getName())) { continue; } dataRecord.addColumn(new Column(columnMetaData.getName(), handleValue(columnMetaData, each[i]), null, true, columnMetaData.isUniqueKey())); } result.add(dataRecord); } return result; } private Serializable handleValue(final PipelineColumnMetaData columnMetaData, final Serializable value) { if (value instanceof MySQLBinaryString) { if (PipelineJdbcUtils.isBinaryColumn(columnMetaData.getDataType())) { return ((MySQLBinaryString) value).getBytes(); } return new String(((MySQLBinaryString) value).getBytes(), Charset.defaultCharset()); } Optional<MySQLDataTypeHandler> dataTypeHandler = TypedSPILoader.findService(MySQLDataTypeHandler.class, columnMetaData.getDataTypeName()); return dataTypeHandler.isPresent() ? dataTypeHandler.get().handle(value) : value; } private DataRecord createDataRecord(final AbstractRowsEvent rowsEvent, final int columnCount) { DataRecord result = new DataRecord(new BinlogPosition(rowsEvent.getFileName(), rowsEvent.getPosition(), rowsEvent.getServerId()), columnCount); result.setTableName(dumperConfig.getLogicTableName(rowsEvent.getTableName()).getOriginal()); result.setCommitTime(rowsEvent.getTimestamp() * 1000); return result; } @Override protected void doStop() { if (null != client) { client.closeChannel(); } } }
class MySQLIncrementalDumper extends AbstractLifecycleExecutor implements IncrementalDumper { private final DumperConfiguration dumperConfig; private final BinlogPosition binlogPosition; private final PipelineTableMetaDataLoader metaDataLoader; private final PipelineChannel channel; private final MySQLClient client; private final String catalog; public MySQLIncrementalDumper(final DumperConfiguration dumperConfig, final IngestPosition binlogPosition, final PipelineChannel channel, final PipelineTableMetaDataLoader metaDataLoader) { Preconditions.checkArgument(dumperConfig.getDataSourceConfig() instanceof StandardPipelineDataSourceConfiguration, "MySQLBinlogDumper only support StandardPipelineDataSourceConfiguration"); this.dumperConfig = dumperConfig; this.binlogPosition = (BinlogPosition) binlogPosition; this.channel = channel; this.metaDataLoader = metaDataLoader; YamlJdbcConfiguration jdbcConfig = ((StandardPipelineDataSourceConfiguration) dumperConfig.getDataSourceConfig()).getJdbcConfig(); log.info("incremental dump, jdbcUrl={}", jdbcConfig.getUrl()); DataSourceMetaData metaData = TypedSPILoader.getService(DatabaseType.class, "MySQL").getDataSourceMetaData(jdbcConfig.getUrl(), null); ConnectInfo connectInfo = new ConnectInfo(new Random().nextInt(), metaData.getHostname(), metaData.getPort(), jdbcConfig.getUsername(), jdbcConfig.getPassword()); client = new MySQLClient(connectInfo, dumperConfig.isDecodeWithTX()); catalog = metaData.getCatalog(); } @Override private void handleEvents(final List<AbstractBinlogEvent> events) { List<Record> dataRecords = new LinkedList<>(); for (AbstractBinlogEvent each : events) { if (!(each instanceof AbstractRowsEvent)) { dataRecords.add(createPlaceholderRecord(each)); continue; } dataRecords.addAll(handleEvent(each)); } if (dataRecords.isEmpty()) { return; } channel.pushRecords(dataRecords); } private List<? extends Record> handleEvent(final AbstractBinlogEvent event) { if (!(event instanceof AbstractRowsEvent)) { return Collections.singletonList(createPlaceholderRecord(event)); } AbstractRowsEvent rowsEvent = (AbstractRowsEvent) event; if (!rowsEvent.getDatabaseName().equals(catalog) || !dumperConfig.containsTable(rowsEvent.getTableName())) { return Collections.singletonList(createPlaceholderRecord(event)); } PipelineTableMetaData tableMetaData = getPipelineTableMetaData(rowsEvent.getTableName()); if (event instanceof WriteRowsEvent) { return handleWriteRowsEvent((WriteRowsEvent) event, tableMetaData); } if (event instanceof UpdateRowsEvent) { return handleUpdateRowsEvent((UpdateRowsEvent) event, tableMetaData); } if (event instanceof DeleteRowsEvent) { return handleDeleteRowsEvent((DeleteRowsEvent) event, tableMetaData); } return Collections.emptyList(); } private PlaceholderRecord createPlaceholderRecord(final AbstractBinlogEvent event) { PlaceholderRecord result = new PlaceholderRecord(new BinlogPosition(event.getFileName(), event.getPosition(), event.getServerId())); result.setCommitTime(event.getTimestamp() * 1000L); return result; } private PipelineTableMetaData getPipelineTableMetaData(final String actualTableName) { return metaDataLoader.getTableMetaData(dumperConfig.getSchemaName(new ActualTableName(actualTableName)), actualTableName); } private List<DataRecord> handleWriteRowsEvent(final WriteRowsEvent event, final PipelineTableMetaData tableMetaData) { Set<ColumnName> columnNameSet = dumperConfig.getColumnNameSet(event.getTableName()).orElse(null); List<DataRecord> result = new LinkedList<>(); for (Serializable[] each : event.getAfterRows()) { DataRecord dataRecord = createDataRecord(event, each.length); dataRecord.setType(IngestDataChangeType.INSERT); for (int i = 0; i < each.length; i++) { PipelineColumnMetaData columnMetaData = tableMetaData.getColumnMetaData(i + 1); if (isColumnUnneeded(columnNameSet, columnMetaData.getName())) { continue; } dataRecord.addColumn(new Column(columnMetaData.getName(), handleValue(columnMetaData, each[i]), true, columnMetaData.isUniqueKey())); } result.add(dataRecord); } return result; } private boolean isColumnUnneeded(final Set<ColumnName> columnNameSet, final String columnName) { return null != columnNameSet && !columnNameSet.contains(new ColumnName(columnName)); } private List<DataRecord> handleUpdateRowsEvent(final UpdateRowsEvent event, final PipelineTableMetaData tableMetaData) { Set<ColumnName> columnNameSet = dumperConfig.getColumnNameSet(event.getTableName()).orElse(null); List<DataRecord> result = new LinkedList<>(); for (int i = 0; i < event.getBeforeRows().size(); i++) { Serializable[] beforeValues = event.getBeforeRows().get(i); Serializable[] afterValues = event.getAfterRows().get(i); DataRecord dataRecord = createDataRecord(event, beforeValues.length); dataRecord.setType(IngestDataChangeType.UPDATE); for (int j = 0; j < beforeValues.length; j++) { Serializable oldValue = beforeValues[j]; Serializable newValue = afterValues[j]; boolean updated = !Objects.equals(newValue, oldValue); PipelineColumnMetaData columnMetaData = tableMetaData.getColumnMetaData(j + 1); if (isColumnUnneeded(columnNameSet, columnMetaData.getName())) { continue; } dataRecord.addColumn(new Column(columnMetaData.getName(), handleValue(columnMetaData, oldValue), handleValue(columnMetaData, newValue), updated, columnMetaData.isUniqueKey())); } result.add(dataRecord); } return result; } private List<DataRecord> handleDeleteRowsEvent(final DeleteRowsEvent event, final PipelineTableMetaData tableMetaData) { Set<ColumnName> columnNameSet = dumperConfig.getColumnNameSet(event.getTableName()).orElse(null); List<DataRecord> result = new LinkedList<>(); for (Serializable[] each : event.getBeforeRows()) { DataRecord dataRecord = createDataRecord(event, each.length); dataRecord.setType(IngestDataChangeType.DELETE); for (int i = 0, length = each.length; i < length; i++) { PipelineColumnMetaData columnMetaData = tableMetaData.getColumnMetaData(i + 1); if (isColumnUnneeded(columnNameSet, columnMetaData.getName())) { continue; } dataRecord.addColumn(new Column(columnMetaData.getName(), handleValue(columnMetaData, each[i]), null, true, columnMetaData.isUniqueKey())); } result.add(dataRecord); } return result; } private Serializable handleValue(final PipelineColumnMetaData columnMetaData, final Serializable value) { if (value instanceof MySQLBinaryString) { if (PipelineJdbcUtils.isBinaryColumn(columnMetaData.getDataType())) { return ((MySQLBinaryString) value).getBytes(); } return new String(((MySQLBinaryString) value).getBytes(), Charset.defaultCharset()); } Optional<MySQLDataTypeHandler> dataTypeHandler = TypedSPILoader.findService(MySQLDataTypeHandler.class, columnMetaData.getDataTypeName()); return dataTypeHandler.isPresent() ? dataTypeHandler.get().handle(value) : value; } private DataRecord createDataRecord(final AbstractRowsEvent rowsEvent, final int columnCount) { DataRecord result = new DataRecord(new BinlogPosition(rowsEvent.getFileName(), rowsEvent.getPosition(), rowsEvent.getServerId()), columnCount); result.setTableName(dumperConfig.getLogicTableName(rowsEvent.getTableName()).getOriginal()); result.setCommitTime(rowsEvent.getTimestamp() * 1000); return result; } @Override protected void doStop() { if (null != client) { client.closeChannel(); } } }
Yes, and using .single() would force it to throw if there was more than one element in the flux. why not use `.next()` rather than `.last()` then? If the stream had 10000 elements, this would not complete until 9999 elements had been emitted.
public Mono<Instant> renewMessageLock(UUID lockToken) { return renewMessageLock(new UUID[]{lockToken}) .last() .publishOn(scheduler); }
.last()
public Mono<Instant> renewMessageLock(UUID lockToken) { return renewMessageLock(new UUID[]{lockToken}) .next() .publishOn(scheduler); }
class ManagementChannel implements ServiceBusManagementNode { private final Scheduler scheduler; private final MessageSerializer messageSerializer; private final TokenManager tokenManager; private final Duration operationTimeout; private final Mono<RequestResponseChannel> createRequestResponse; private final String fullyQualifiedNamespace; private final ClientLogger logger; private final String entityPath; private final AtomicLong lastPeekedSequenceNumber = new AtomicLong(); private volatile boolean isDisposed; ManagementChannel(Mono<RequestResponseChannel> createRequestResponse, String fullyQualifiedNamespace, String entityPath, TokenManager tokenManager, MessageSerializer messageSerializer, Scheduler scheduler, Duration operationTimeout) { this.createRequestResponse = createRequestResponse; this.fullyQualifiedNamespace = fullyQualifiedNamespace; this.logger = new ClientLogger(String.format("%s<%s>", ManagementChannel.class, entityPath)); this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.scheduler = Objects.requireNonNull(scheduler, "'scheduler' cannot be null."); this.tokenManager = Objects.requireNonNull(tokenManager, "'tokenManager' cannot be null."); this.operationTimeout = operationTimeout; } @Override public Mono<Void> updateDisposition(UUID lockToken, DispositionStatus dispositionStatus, String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) { return isAuthorized(UPDATE_DISPOSITION_OPERATION).then(createRequestResponse.flatMap(channel -> { final Message message = createDispositionMessage(new UUID[] {lockToken}, dispositionStatus, null, null, null, channel.getReceiveLinkName()); return channel.sendWithAck(message); }).flatMap(response -> { final int statusCode = RequestResponseUtils.getResponseStatusCode(response); final AmqpResponseCode responseCode = AmqpResponseCode.fromValue(statusCode); if (responseCode == AmqpResponseCode.OK) { return Mono.empty(); } else { return Mono.error(ExceptionUtil.amqpResponseCodeToException(statusCode, "", getErrorContext())); } })); } /** * {@inheritDoc} */ @Override public Mono<Instant> renewMessageLock(ServiceBusReceivedMessage messageForLockRenew) { return renewMessageLock(new UUID[]{messageForLockRenew.getLockToken()}) .last() .publishOn(scheduler); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Mono<ServiceBusReceivedMessage> peek(long fromSequenceNumber) { return peek(fromSequenceNumber, 1, null) .last() .publishOn(scheduler); } /** * {@inheritDoc} */ @Override public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) { return peek(this.lastPeekedSequenceNumber.get() + 1, maxMessages, null) .publishOn(scheduler); } /** * {@inheritDoc} */ @Override public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages, long fromSequenceNumber) { return peek(fromSequenceNumber, maxMessages, null) .publishOn(scheduler); } /** * {@inheritDoc} */ @Override public Mono<ServiceBusReceivedMessage> peek() { return peek(lastPeekedSequenceNumber.get() + 1); } private Flux<ServiceBusReceivedMessage> peek(long fromSequenceNumber, int maxMessages, UUID sessionId) { return isAuthorized(PEEK_OPERATION_VALUE).thenMany(createRequestResponse.flatMap(channel -> { final Message message = createManagementMessage(PEEK_OPERATION_VALUE, channel.getReceiveLinkName()); HashMap<String, Object> requestBodyMap = new HashMap<>(); requestBodyMap.put(REQUEST_RESPONSE_FROM_SEQUENCE_NUMBER, fromSequenceNumber); requestBodyMap.put(REQUEST_RESPONSE_MESSAGE_COUNT, maxMessages); if (!Objects.isNull(sessionId)) { requestBodyMap.put(ManagementConstants.REQUEST_RESPONSE_SESSION_ID, sessionId); } message.setBody(new AmqpValue(requestBodyMap)); return channel.sendWithAck(message); }).flatMapMany(amqpMessage -> { final List<ServiceBusReceivedMessage> messageList = messageSerializer.deserializeList(amqpMessage, ServiceBusReceivedMessage.class); if (messageList.size() > 0) { final ServiceBusReceivedMessage receivedMessage = messageList.get(messageList.size() - 1); logger.info("Setting last peeked sequence number: {}", receivedMessage.getSequenceNumber()); if (receivedMessage.getSequenceNumber() > 0) { this.lastPeekedSequenceNumber.set(receivedMessage.getSequenceNumber()); } } return Flux.fromIterable(messageList); })); } private Mono<Void> isAuthorized(String operation) { return tokenManager.getAuthorizationResults().next().flatMap(response -> { if (response != AmqpResponseCode.ACCEPTED) { return Mono.error(new AmqpException(false, String.format( "User does not have authorization to perform operation [%s] on entity [%s]", operation, entityPath), getErrorContext())); } else { return Mono.empty(); } }); } private Message createDispositionMessage(UUID[] lockTokens, DispositionStatus dispositionStatus, String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify, String linkName) { logger.verbose("Update disposition of deliveries '{}' to '{}' on entity '{}', session '{}'", Arrays.toString(lockTokens), dispositionStatus, entityPath, "n/a"); final Message message = createManagementMessage(UPDATE_DISPOSITION_OPERATION, linkName); final Map<String, Object> requestBody = new HashMap<>(); requestBody.put(ManagementConstants.LOCK_TOKENS_KEY, lockTokens); requestBody.put(ManagementConstants.DISPOSITION_STATUS_KEY, dispositionStatus.getValue()); if (deadLetterReason != null) { requestBody.put(ManagementConstants.DEADLETTER_REASON_KEY, deadLetterReason); } if (deadLetterErrorDescription != null) { requestBody.put(ManagementConstants.DEADLETTER_DESCRIPTION_KEY, deadLetterErrorDescription); } if (propertiesToModify != null && propertiesToModify.size() > 0) { requestBody.put(ManagementConstants.PROPERTIES_TO_MODIFY_KEY, propertiesToModify); } message.setBody(new AmqpValue(requestBody)); return message; } private Flux<Instant> renewMessageLock(UUID[] renewLockList) { return isAuthorized(PEEK_OPERATION_VALUE).thenMany(createRequestResponse.flatMap(channel -> { UUID[] lockTokens = Arrays.stream(renewLockList) .toArray(UUID[]::new); Message requestMessage = createManagementMessage(REQUEST_RESPONSE_RENEWLOCK_OPERATION, channel.getReceiveLinkName()); requestMessage.setBody(new AmqpValue(Collections.singletonMap(REQUEST_RESPONSE_LOCKTOKENS, lockTokens))); return channel.sendWithAck(requestMessage); }).flatMapMany(responseMessage -> { int statusCode = RequestResponseUtils.getResponseStatusCode(responseMessage); List<Instant> expirationsForLocks = new ArrayList<>(); if (statusCode == REQUEST_RESPONSE_OK_STATUS_CODE) { @SuppressWarnings("unchecked") Map<String, Object> responseBody = (Map<String, Object>) ((AmqpValue) responseMessage .getBody()).getValue(); Object expirationListObj = responseBody.get(REQUEST_RESPONSE_EXPIRATIONS); if (expirationListObj instanceof Date[]) { Date[] expirations = (Date[]) expirationListObj; expirationsForLocks = Arrays.stream(expirations) .map(Date::toInstant) .collect(Collectors.toList()); } } return Flux.fromIterable(expirationsForLocks); })); } /** * Creates an AMQP message with the required application properties. * * @param operation Management operation to perform (ie. peek, update-disposition, etc.) * @param linkName Name of receiver link associated with operation. * * @return An AMQP message with the required headers. */ private Message createManagementMessage(String operation, String linkName) { final Duration serverTimeout = MessageUtils.adjustServerTimeout(operationTimeout); final Map<String, Object> applicationProperties = new HashMap<>(); applicationProperties.put(MANAGEMENT_OPERATION_KEY, operation); applicationProperties.put(SERVER_TIMEOUT, serverTimeout.toMillis()); if (linkName != null && !linkName.isEmpty()) { applicationProperties.put(ASSOCIATED_LINK_NAME_KEY, linkName); } final Message message = Proton.message(); message.setApplicationProperties(new ApplicationProperties(applicationProperties)); return message; } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(fullyQualifiedNamespace, entityPath); } /** * {@inheritDoc} */ @Override public void close() { if (isDisposed) { return; } isDisposed = true; tokenManager.close(); } }
class ManagementChannel implements ServiceBusManagementNode { private final Scheduler scheduler; private final MessageSerializer messageSerializer; private final TokenManager tokenManager; private final Duration operationTimeout; private final Mono<RequestResponseChannel> createRequestResponse; private final String fullyQualifiedNamespace; private final ClientLogger logger; private final String entityPath; private final AtomicLong lastPeekedSequenceNumber = new AtomicLong(); private volatile boolean isDisposed; ManagementChannel(Mono<RequestResponseChannel> createRequestResponse, String fullyQualifiedNamespace, String entityPath, TokenManager tokenManager, MessageSerializer messageSerializer, Scheduler scheduler, Duration operationTimeout) { this.createRequestResponse = createRequestResponse; this.fullyQualifiedNamespace = fullyQualifiedNamespace; this.logger = new ClientLogger(String.format("%s<%s>", ManagementChannel.class, entityPath)); this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.scheduler = Objects.requireNonNull(scheduler, "'scheduler' cannot be null."); this.tokenManager = Objects.requireNonNull(tokenManager, "'tokenManager' cannot be null."); this.operationTimeout = operationTimeout; } @Override public Mono<Void> updateDisposition(UUID lockToken, DispositionStatus dispositionStatus, String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) { return isAuthorized(UPDATE_DISPOSITION_OPERATION).then(createRequestResponse.flatMap(channel -> { final Message message = createDispositionMessage(new UUID[] {lockToken}, dispositionStatus, null, null, null, channel.getReceiveLinkName()); return channel.sendWithAck(message); }).flatMap(response -> { final int statusCode = RequestResponseUtils.getResponseStatusCode(response); final AmqpResponseCode responseCode = AmqpResponseCode.fromValue(statusCode); if (responseCode == AmqpResponseCode.OK) { return Mono.empty(); } else { return Mono.error(ExceptionUtil.amqpResponseCodeToException(statusCode, "", getErrorContext())); } })); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Mono<ServiceBusReceivedMessage> peek(long fromSequenceNumber) { return peek(fromSequenceNumber, 1, null) .last() .publishOn(scheduler); } /** * {@inheritDoc} */ @Override public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) { return peek(this.lastPeekedSequenceNumber.get() + 1, maxMessages, null) .publishOn(scheduler); } /** * {@inheritDoc} */ @Override public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages, long fromSequenceNumber) { return peek(fromSequenceNumber, maxMessages, null) .publishOn(scheduler); } /** * {@inheritDoc} */ @Override public Mono<ServiceBusReceivedMessage> peek() { return peek(lastPeekedSequenceNumber.get() + 1); } private Flux<ServiceBusReceivedMessage> peek(long fromSequenceNumber, int maxMessages, UUID sessionId) { return isAuthorized(PEEK_OPERATION_VALUE).thenMany(createRequestResponse.flatMap(channel -> { final Message message = createManagementMessage(PEEK_OPERATION_VALUE, channel.getReceiveLinkName()); HashMap<String, Object> requestBodyMap = new HashMap<>(); requestBodyMap.put(REQUEST_RESPONSE_FROM_SEQUENCE_NUMBER, fromSequenceNumber); requestBodyMap.put(MESSAGE_COUNT_KEY, maxMessages); if (!Objects.isNull(sessionId)) { requestBodyMap.put(ManagementConstants.REQUEST_RESPONSE_SESSION_ID, sessionId); } message.setBody(new AmqpValue(requestBodyMap)); return channel.sendWithAck(message); }).flatMapMany(amqpMessage -> { final List<ServiceBusReceivedMessage> messageList = messageSerializer.deserializeList(amqpMessage, ServiceBusReceivedMessage.class); if (messageList.size() > 0) { final ServiceBusReceivedMessage receivedMessage = messageList.get(messageList.size() - 1); logger.info("Setting last peeked sequence number: {}", receivedMessage.getSequenceNumber()); if (receivedMessage.getSequenceNumber() > 0) { this.lastPeekedSequenceNumber.set(receivedMessage.getSequenceNumber()); } } return Flux.fromIterable(messageList); })); } private Mono<Void> isAuthorized(String operation) { return tokenManager.getAuthorizationResults().next().flatMap(response -> { if (response != AmqpResponseCode.ACCEPTED) { return Mono.error(new AmqpException(false, String.format( "User does not have authorization to perform operation [%s] on entity [%s]", operation, entityPath), getErrorContext())); } else { return Mono.empty(); } }); } private Message createDispositionMessage(UUID[] lockTokens, DispositionStatus dispositionStatus, String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify, String linkName) { logger.verbose("Update disposition of deliveries '{}' to '{}' on entity '{}', session '{}'", Arrays.toString(lockTokens), dispositionStatus, entityPath, "n/a"); final Message message = createManagementMessage(UPDATE_DISPOSITION_OPERATION, linkName); final Map<String, Object> requestBody = new HashMap<>(); requestBody.put(ManagementConstants.LOCK_TOKENS_KEY, lockTokens); requestBody.put(ManagementConstants.DISPOSITION_STATUS_KEY, dispositionStatus.getValue()); if (deadLetterReason != null) { requestBody.put(ManagementConstants.DEADLETTER_REASON_KEY, deadLetterReason); } if (deadLetterErrorDescription != null) { requestBody.put(ManagementConstants.DEADLETTER_DESCRIPTION_KEY, deadLetterErrorDescription); } if (propertiesToModify != null && propertiesToModify.size() > 0) { requestBody.put(ManagementConstants.PROPERTIES_TO_MODIFY_KEY, propertiesToModify); } message.setBody(new AmqpValue(requestBody)); return message; } private Flux<Instant> renewMessageLock(UUID[] renewLockList) { return isAuthorized(PEEK_OPERATION_VALUE).thenMany(createRequestResponse.flatMap(channel -> { Message requestMessage = createManagementMessage(RENEW_LOCK_OPERATION, channel.getReceiveLinkName()); requestMessage.setBody(new AmqpValue(Collections.singletonMap(LOCK_TOKENS, renewLockList))); return channel.sendWithAck(requestMessage); }).flatMapMany(responseMessage -> { int statusCode = RequestResponseUtils.getResponseStatusCode(responseMessage); if (statusCode != AmqpResponseCode.OK.getValue()) { return Mono.error(ExceptionUtil.amqpResponseCodeToException(statusCode, "Could not renew the lock.", getErrorContext())); } return Flux.fromIterable(messageSerializer.deserializeList(responseMessage, Instant.class)); })); } /** * Creates an AMQP message with the required application properties. * * @param operation Management operation to perform (ie. peek, update-disposition, etc.) * @param linkName Name of receiver link associated with operation. * * @return An AMQP message with the required headers. */ private Message createManagementMessage(String operation, String linkName) { final Duration serverTimeout = MessageUtils.adjustServerTimeout(operationTimeout); final Map<String, Object> applicationProperties = new HashMap<>(); applicationProperties.put(MANAGEMENT_OPERATION_KEY, operation); applicationProperties.put(SERVER_TIMEOUT, serverTimeout.toMillis()); if (linkName != null && !linkName.isEmpty()) { applicationProperties.put(ASSOCIATED_LINK_NAME_KEY, linkName); } final Message message = Proton.message(); message.setApplicationProperties(new ApplicationProperties(applicationProperties)); return message; } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(fullyQualifiedNamespace, entityPath); } /** * {@inheritDoc} */ @Override public void close() { if (isDisposed) { return; } isDisposed = true; tokenManager.close(); } }
We should be checking that the mock FnApiControlClient got a ProcessBundle InstructionRequest with the cache token specified and not verifying that the state request handler was invoked.
public void verifyCacheTokensAreUsedInNewBundleRequest() { CompletableFuture<InstructionResponse> registerResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(registerResponseFuture); ProcessBundleDescriptor descriptor1 = ProcessBundleDescriptor.newBuilder().setId("descriptor1").build(); Map<String, RemoteInputDestination<WindowedValue<?>>> remoteInputs = Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) FullWindowedValueCoder.of(VarIntCoder.of(), GlobalWindow.Coder.INSTANCE), SDK_GRPC_READ_TRANSFORM)); BundleProcessor processor1 = sdkHarnessClient.getProcessor(descriptor1, remoteInputs); when(dataService.send(any(), any())).thenReturn(mock(CloseableFnDataReceiver.class)); StateRequestHandler stateRequestHandler = Mockito.mock(StateRequestHandler.class); List<BeamFnApi.ProcessBundleRequest.CacheToken> cacheTokens = Collections.singletonList( BeamFnApi.ProcessBundleRequest.CacheToken.newBuilder().getDefaultInstanceForType()); when(stateRequestHandler.getCacheTokens()).thenReturn(cacheTokens); processor1.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mock(RemoteOutputReceiver.class)), stateRequestHandler, BundleProgressHandler.ignored()); Mockito.verify(stateRequestHandler).getCacheTokens(); }
Mockito.verify(stateRequestHandler).getCacheTokens();
public void verifyCacheTokensAreUsedInNewBundleRequest() { CompletableFuture<InstructionResponse> registerResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(registerResponseFuture); ProcessBundleDescriptor descriptor1 = ProcessBundleDescriptor.newBuilder().setId("descriptor1").build(); Map<String, RemoteInputDestination> remoteInputs = Collections.singletonMap( "inputPC", RemoteInputDestination.of( FullWindowedValueCoder.of(VarIntCoder.of(), GlobalWindow.Coder.INSTANCE), SDK_GRPC_READ_TRANSFORM)); BundleProcessor processor1 = sdkHarnessClient.getProcessor(descriptor1, remoteInputs); when(dataService.send(any(), any())).thenReturn(mock(CloseableFnDataReceiver.class)); StateRequestHandler stateRequestHandler = Mockito.mock(StateRequestHandler.class); List<BeamFnApi.ProcessBundleRequest.CacheToken> cacheTokens = Collections.singletonList( BeamFnApi.ProcessBundleRequest.CacheToken.newBuilder().getDefaultInstanceForType()); when(stateRequestHandler.getCacheTokens()).thenReturn(cacheTokens); processor1.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mock(RemoteOutputReceiver.class)), stateRequestHandler, BundleProgressHandler.ignored()); ArgumentCaptor<BeamFnApi.InstructionRequest> reqCaptor = ArgumentCaptor.forClass(BeamFnApi.InstructionRequest.class); Mockito.verify(fnApiControlClient, Mockito.times(2)).handle(reqCaptor.capture()); List<BeamFnApi.InstructionRequest> requests = reqCaptor.getAllValues(); assertThat( requests.get(0).getRequestCase(), is(BeamFnApi.InstructionRequest.RequestCase.REGISTER)); assertThat( requests.get(1).getRequestCase(), is(BeamFnApi.InstructionRequest.RequestCase.PROCESS_BUNDLE)); assertThat(requests.get(1).getProcessBundle().getCacheTokensList(), is(cacheTokens)); }
class SdkHarnessClientTest { @Mock public FnApiControlClient fnApiControlClient; @Mock public FnDataService dataService; @Rule public EmbeddedSdkHarness harness = EmbeddedSdkHarness.create(); @Rule public ExpectedException thrown = ExpectedException.none(); private SdkHarnessClient sdkHarnessClient; private ProcessBundleDescriptor descriptor; private String inputPCollection; private static final String SDK_GRPC_READ_TRANSFORM = "read"; private static final String SDK_GRPC_WRITE_TRANSFORM = "write"; @Before public void setup() throws Exception { MockitoAnnotations.initMocks(this); sdkHarnessClient = SdkHarnessClient.usingFnApiClient(fnApiControlClient, dataService); Pipeline userPipeline = Pipeline.create(); TupleTag<String> outputTag = new TupleTag<>(); userPipeline .apply("create", Create.of("foo")) .apply("proc", ParDo.of(new TestFn()).withOutputTags(outputTag, TupleTagList.empty())); RunnerApi.Pipeline userProto = PipelineTranslation.toProto(userPipeline); ProcessBundleDescriptor.Builder pbdBuilder = ProcessBundleDescriptor.newBuilder() .setId("my_id") .putAllEnvironments(userProto.getComponents().getEnvironmentsMap()) .putAllWindowingStrategies(userProto.getComponents().getWindowingStrategiesMap()) .putAllCoders(userProto.getComponents().getCodersMap()); RunnerApi.Coder fullValueCoder = CoderTranslation.toProto(WindowedValue.getFullCoder(StringUtf8Coder.of(), Coder.INSTANCE)) .getCoder(); pbdBuilder.putCoders("wire_coder", fullValueCoder); PTransform targetProcessor = userProto.getComponents().getTransformsOrThrow("proc"); RemoteGrpcPort port = RemoteGrpcPort.newBuilder() .setApiServiceDescriptor(harness.dataEndpoint()) .setCoderId("wire_coder") .build(); RemoteGrpcPortRead readNode = RemoteGrpcPortRead.readFromPort( port, getOnlyElement(targetProcessor.getInputsMap().values())); RemoteGrpcPortWrite writeNode = RemoteGrpcPortWrite.writeToPort( getOnlyElement(targetProcessor.getOutputsMap().values()), port); for (String pc : targetProcessor.getInputsMap().values()) { pbdBuilder.putPcollections(pc, userProto.getComponents().getPcollectionsOrThrow(pc)); } for (String pc : targetProcessor.getOutputsMap().values()) { pbdBuilder.putPcollections(pc, userProto.getComponents().getPcollectionsOrThrow(pc)); } pbdBuilder .putTransforms("proc", targetProcessor) .putTransforms(SDK_GRPC_READ_TRANSFORM, readNode.toPTransform()) .putTransforms(SDK_GRPC_WRITE_TRANSFORM, writeNode.toPTransform()); descriptor = pbdBuilder.build(); inputPCollection = getOnlyElement(descriptor.getTransformsOrThrow("read").getOutputsMap().values()); } @Test public void testRegisterCachesBundleProcessors() throws Exception { CompletableFuture<InstructionResponse> registerResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(registerResponseFuture); ProcessBundleDescriptor descriptor1 = ProcessBundleDescriptor.newBuilder().setId("descriptor1").build(); ProcessBundleDescriptor descriptor2 = ProcessBundleDescriptor.newBuilder().setId("descriptor2").build(); Map<String, RemoteInputDestination<WindowedValue<?>>> remoteInputs = Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) FullWindowedValueCoder.of(VarIntCoder.of(), GlobalWindow.Coder.INSTANCE), SDK_GRPC_READ_TRANSFORM)); BundleProcessor processor1 = sdkHarnessClient.getProcessor(descriptor1, remoteInputs); BundleProcessor processor2 = sdkHarnessClient.getProcessor(descriptor2, remoteInputs); assertNotSame(processor1, processor2); assertSame(processor1, sdkHarnessClient.getProcessor(descriptor1, remoteInputs)); } @Test public void testRegisterWithStateRequiresStateDelegator() throws Exception { CompletableFuture<InstructionResponse> registerResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(registerResponseFuture); ProcessBundleDescriptor descriptor = ProcessBundleDescriptor.newBuilder() .setId("test") .setStateApiServiceDescriptor(ApiServiceDescriptor.newBuilder().setUrl("foo")) .build(); Map<String, RemoteInputDestination<WindowedValue<?>>> remoteInputs = Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) FullWindowedValueCoder.of(VarIntCoder.of(), GlobalWindow.Coder.INSTANCE), SDK_GRPC_READ_TRANSFORM)); thrown.expect(IllegalStateException.class); thrown.expectMessage("containing a state"); sdkHarnessClient.getProcessor(descriptor, remoteInputs); } @Test public void testNewBundleNoDataDoesNotCrash() throws Exception { CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM))); when(dataService.send(any(), eq(coder))).thenReturn(mock(CloseableFnDataReceiver.class)); try (ActiveBundle activeBundle = processor.newBundle(Collections.emptyMap(), BundleProgressHandler.ignored())) { BeamFnApi.ProcessBundleResponse response = ProcessBundleResponse.getDefaultInstance(); processBundleResponseFuture.complete( BeamFnApi.InstructionResponse.newBuilder().setProcessBundle(response).build()); } } @Test public void testNewBundleAndProcessElements() throws Exception { SdkHarnessClient client = harness.client(); BundleProcessor processor = client.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE), SDK_GRPC_READ_TRANSFORM))); Collection<WindowedValue<String>> outputs = new ArrayList<>(); try (ActiveBundle activeBundle = processor.newBundle( Collections.singletonMap( SDK_GRPC_WRITE_TRANSFORM, RemoteOutputReceiver.of( FullWindowedValueCoder.of( LengthPrefixCoder.of(StringUtf8Coder.of()), Coder.INSTANCE), outputs::add)), BundleProgressHandler.ignored())) { FnDataReceiver<WindowedValue<?>> bundleInputReceiver = Iterables.getOnlyElement(activeBundle.getInputReceivers().values()); bundleInputReceiver.accept(WindowedValue.valueInGlobalWindow("foo")); bundleInputReceiver.accept(WindowedValue.valueInGlobalWindow("bar")); bundleInputReceiver.accept(WindowedValue.valueInGlobalWindow("baz")); } assertThat( outputs, containsInAnyOrder( WindowedValue.valueInGlobalWindow("spam"), WindowedValue.valueInGlobalWindow("ham"), WindowedValue.valueInGlobalWindow("eggs"))); } @Test public void handleCleanupWhenInputSenderFails() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM))); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); doThrow(testException).when(mockInputSender).close(); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockProgressHandler)) { } fail("Exception expected"); } catch (Exception e) { assertEquals(testException, e); verify(mockOutputReceiver).cancel(); verifyNoMoreInteractions(mockOutputReceiver); } } @Test public void handleCleanupWithStateWhenInputSenderFails() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); StateDelegator mockStateDelegator = mock(StateDelegator.class); StateDelegator.Registration mockStateRegistration = mock(StateDelegator.Registration.class); when(mockStateDelegator.registerForProcessBundleInstructionId(any(), any())) .thenReturn(mockStateRegistration); StateRequestHandler mockStateHandler = mock(StateRequestHandler.class); when(mockStateHandler.getCacheTokens()).thenReturn(Collections.emptyList()); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( inputPCollection, RemoteInputDestination.of((FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM)), mockStateDelegator); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); doThrow(testException).when(mockInputSender).close(); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockStateHandler, mockProgressHandler)) { } fail("Exception expected"); } catch (Exception e) { assertEquals(testException, e); verify(mockStateRegistration).abort(); verify(mockOutputReceiver).cancel(); verifyNoMoreInteractions(mockStateRegistration, mockOutputReceiver); } } @Test public void handleCleanupWhenProcessingBundleFails() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM))); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockProgressHandler)) { processBundleResponseFuture.completeExceptionally(testException); } fail("Exception expected"); } catch (ExecutionException e) { assertEquals(testException, e.getCause()); verify(mockOutputReceiver).cancel(); verifyNoMoreInteractions(mockOutputReceiver); } } @Test public void handleCleanupWithStateWhenProcessingBundleFails() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); StateDelegator mockStateDelegator = mock(StateDelegator.class); StateDelegator.Registration mockStateRegistration = mock(StateDelegator.Registration.class); when(mockStateDelegator.registerForProcessBundleInstructionId(any(), any())) .thenReturn(mockStateRegistration); StateRequestHandler mockStateHandler = mock(StateRequestHandler.class); when(mockStateHandler.getCacheTokens()).thenReturn(Collections.emptyList()); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( inputPCollection, RemoteInputDestination.of((FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM)), mockStateDelegator); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockStateHandler, mockProgressHandler)) { processBundleResponseFuture.completeExceptionally(testException); } fail("Exception expected"); } catch (ExecutionException e) { assertEquals(testException, e.getCause()); verify(mockStateRegistration).abort(); verify(mockOutputReceiver).cancel(); verifyNoMoreInteractions(mockStateRegistration, mockOutputReceiver); } } @Test public void handleCleanupWhenAwaitingOnClosingOutputReceivers() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM))); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); doThrow(testException).when(mockOutputReceiver).awaitCompletion(); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockProgressHandler)) { BeamFnApi.ProcessBundleResponse response = BeamFnApi.ProcessBundleResponse.getDefaultInstance(); processBundleResponseFuture.complete( BeamFnApi.InstructionResponse.newBuilder().setProcessBundle(response).build()); } fail("Exception expected"); } catch (Exception e) { assertEquals(testException, e); } } @Test public void handleCleanupWithStateWhenAwaitingOnClosingOutputReceivers() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); StateDelegator mockStateDelegator = mock(StateDelegator.class); StateDelegator.Registration mockStateRegistration = mock(StateDelegator.Registration.class); when(mockStateDelegator.registerForProcessBundleInstructionId(any(), any())) .thenReturn(mockStateRegistration); StateRequestHandler mockStateHandler = mock(StateRequestHandler.class); when(mockStateHandler.getCacheTokens()).thenReturn(Collections.emptyList()); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( inputPCollection, RemoteInputDestination.of((FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM)), mockStateDelegator); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); doThrow(testException).when(mockOutputReceiver).awaitCompletion(); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockStateHandler, mockProgressHandler)) { BeamFnApi.ProcessBundleResponse response = BeamFnApi.ProcessBundleResponse.getDefaultInstance(); processBundleResponseFuture.complete( BeamFnApi.InstructionResponse.newBuilder().setProcessBundle(response).build()); } fail("Exception expected"); } catch (Exception e) { assertEquals(testException, e); } } @Test private static class TestFn extends DoFn<String, String> { @ProcessElement public void processElement(ProcessContext context) { if ("foo".equals(context.element())) { context.output("spam"); } else if ("bar".equals(context.element())) { context.output("ham"); } else { context.output("eggs"); } } } }
class SdkHarnessClientTest { @Mock public FnApiControlClient fnApiControlClient; @Mock public FnDataService dataService; @Rule public EmbeddedSdkHarness harness = EmbeddedSdkHarness.create(); @Rule public ExpectedException thrown = ExpectedException.none(); private SdkHarnessClient sdkHarnessClient; private ProcessBundleDescriptor descriptor; private String inputPCollection; private static final String SDK_GRPC_READ_TRANSFORM = "read"; private static final String SDK_GRPC_WRITE_TRANSFORM = "write"; @Before public void setup() throws Exception { MockitoAnnotations.initMocks(this); sdkHarnessClient = SdkHarnessClient.usingFnApiClient(fnApiControlClient, dataService); Pipeline userPipeline = Pipeline.create(); TupleTag<String> outputTag = new TupleTag<>(); userPipeline .apply("create", Create.of("foo")) .apply("proc", ParDo.of(new TestFn()).withOutputTags(outputTag, TupleTagList.empty())); RunnerApi.Pipeline userProto = PipelineTranslation.toProto(userPipeline); ProcessBundleDescriptor.Builder pbdBuilder = ProcessBundleDescriptor.newBuilder() .setId("my_id") .putAllEnvironments(userProto.getComponents().getEnvironmentsMap()) .putAllWindowingStrategies(userProto.getComponents().getWindowingStrategiesMap()) .putAllCoders(userProto.getComponents().getCodersMap()); RunnerApi.Coder fullValueCoder = CoderTranslation.toProto(WindowedValue.getFullCoder(StringUtf8Coder.of(), Coder.INSTANCE)) .getCoder(); pbdBuilder.putCoders("wire_coder", fullValueCoder); PTransform targetProcessor = userProto.getComponents().getTransformsOrThrow("proc"); RemoteGrpcPort port = RemoteGrpcPort.newBuilder() .setApiServiceDescriptor(harness.dataEndpoint()) .setCoderId("wire_coder") .build(); RemoteGrpcPortRead readNode = RemoteGrpcPortRead.readFromPort( port, getOnlyElement(targetProcessor.getInputsMap().values())); RemoteGrpcPortWrite writeNode = RemoteGrpcPortWrite.writeToPort( getOnlyElement(targetProcessor.getOutputsMap().values()), port); for (String pc : targetProcessor.getInputsMap().values()) { pbdBuilder.putPcollections(pc, userProto.getComponents().getPcollectionsOrThrow(pc)); } for (String pc : targetProcessor.getOutputsMap().values()) { pbdBuilder.putPcollections(pc, userProto.getComponents().getPcollectionsOrThrow(pc)); } pbdBuilder .putTransforms("proc", targetProcessor) .putTransforms(SDK_GRPC_READ_TRANSFORM, readNode.toPTransform()) .putTransforms(SDK_GRPC_WRITE_TRANSFORM, writeNode.toPTransform()); descriptor = pbdBuilder.build(); inputPCollection = getOnlyElement(descriptor.getTransformsOrThrow("read").getOutputsMap().values()); } @Test public void testRegisterCachesBundleProcessors() throws Exception { CompletableFuture<InstructionResponse> registerResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(registerResponseFuture); ProcessBundleDescriptor descriptor1 = ProcessBundleDescriptor.newBuilder().setId("descriptor1").build(); ProcessBundleDescriptor descriptor2 = ProcessBundleDescriptor.newBuilder().setId("descriptor2").build(); Map<String, RemoteInputDestination> remoteInputs = Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) FullWindowedValueCoder.of(VarIntCoder.of(), GlobalWindow.Coder.INSTANCE), SDK_GRPC_READ_TRANSFORM)); BundleProcessor processor1 = sdkHarnessClient.getProcessor(descriptor1, remoteInputs); BundleProcessor processor2 = sdkHarnessClient.getProcessor(descriptor2, remoteInputs); assertNotSame(processor1, processor2); assertSame(processor1, sdkHarnessClient.getProcessor(descriptor1, remoteInputs)); } @Test public void testRegisterWithStateRequiresStateDelegator() throws Exception { CompletableFuture<InstructionResponse> registerResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(registerResponseFuture); ProcessBundleDescriptor descriptor = ProcessBundleDescriptor.newBuilder() .setId("test") .setStateApiServiceDescriptor(ApiServiceDescriptor.newBuilder().setUrl("foo")) .build(); Map<String, RemoteInputDestination> remoteInputs = Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) FullWindowedValueCoder.of(VarIntCoder.of(), GlobalWindow.Coder.INSTANCE), SDK_GRPC_READ_TRANSFORM)); thrown.expect(IllegalStateException.class); thrown.expectMessage("containing a state"); sdkHarnessClient.getProcessor(descriptor, remoteInputs); } @Test public void testNewBundleNoDataDoesNotCrash() throws Exception { CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM))); when(dataService.send(any(), eq(coder))).thenReturn(mock(CloseableFnDataReceiver.class)); try (ActiveBundle activeBundle = processor.newBundle(Collections.emptyMap(), BundleProgressHandler.ignored())) { BeamFnApi.ProcessBundleResponse response = ProcessBundleResponse.getDefaultInstance(); processBundleResponseFuture.complete( BeamFnApi.InstructionResponse.newBuilder().setProcessBundle(response).build()); } } @Test public void testNewBundleAndProcessElements() throws Exception { SdkHarnessClient client = harness.client(); BundleProcessor processor = client.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE), SDK_GRPC_READ_TRANSFORM))); Collection<WindowedValue<String>> outputs = new ArrayList<>(); try (ActiveBundle activeBundle = processor.newBundle( Collections.singletonMap( SDK_GRPC_WRITE_TRANSFORM, RemoteOutputReceiver.of( FullWindowedValueCoder.of( LengthPrefixCoder.of(StringUtf8Coder.of()), Coder.INSTANCE), outputs::add)), BundleProgressHandler.ignored())) { FnDataReceiver<WindowedValue<?>> bundleInputReceiver = Iterables.getOnlyElement(activeBundle.getInputReceivers().values()); bundleInputReceiver.accept(WindowedValue.valueInGlobalWindow("foo")); bundleInputReceiver.accept(WindowedValue.valueInGlobalWindow("bar")); bundleInputReceiver.accept(WindowedValue.valueInGlobalWindow("baz")); } assertThat( outputs, containsInAnyOrder( WindowedValue.valueInGlobalWindow("spam"), WindowedValue.valueInGlobalWindow("ham"), WindowedValue.valueInGlobalWindow("eggs"))); } @Test public void handleCleanupWhenInputSenderFails() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM))); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); doThrow(testException).when(mockInputSender).close(); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockProgressHandler)) { } fail("Exception expected"); } catch (Exception e) { assertEquals(testException, e); verify(mockOutputReceiver).cancel(); verifyNoMoreInteractions(mockOutputReceiver); } } @Test public void handleCleanupWithStateWhenInputSenderFails() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); StateDelegator mockStateDelegator = mock(StateDelegator.class); StateDelegator.Registration mockStateRegistration = mock(StateDelegator.Registration.class); when(mockStateDelegator.registerForProcessBundleInstructionId(any(), any())) .thenReturn(mockStateRegistration); StateRequestHandler mockStateHandler = mock(StateRequestHandler.class); when(mockStateHandler.getCacheTokens()).thenReturn(Collections.emptyList()); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( inputPCollection, RemoteInputDestination.of((FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM)), mockStateDelegator); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); doThrow(testException).when(mockInputSender).close(); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockStateHandler, mockProgressHandler)) { } fail("Exception expected"); } catch (Exception e) { assertEquals(testException, e); verify(mockStateRegistration).abort(); verify(mockOutputReceiver).cancel(); verifyNoMoreInteractions(mockStateRegistration, mockOutputReceiver); } } @Test public void handleCleanupWhenProcessingBundleFails() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM))); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockProgressHandler)) { processBundleResponseFuture.completeExceptionally(testException); } fail("Exception expected"); } catch (ExecutionException e) { assertEquals(testException, e.getCause()); verify(mockOutputReceiver).cancel(); verifyNoMoreInteractions(mockOutputReceiver); } } @Test public void handleCleanupWithStateWhenProcessingBundleFails() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); StateDelegator mockStateDelegator = mock(StateDelegator.class); StateDelegator.Registration mockStateRegistration = mock(StateDelegator.Registration.class); when(mockStateDelegator.registerForProcessBundleInstructionId(any(), any())) .thenReturn(mockStateRegistration); StateRequestHandler mockStateHandler = mock(StateRequestHandler.class); when(mockStateHandler.getCacheTokens()).thenReturn(Collections.emptyList()); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( inputPCollection, RemoteInputDestination.of((FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM)), mockStateDelegator); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockStateHandler, mockProgressHandler)) { processBundleResponseFuture.completeExceptionally(testException); } fail("Exception expected"); } catch (ExecutionException e) { assertEquals(testException, e.getCause()); verify(mockStateRegistration).abort(); verify(mockOutputReceiver).cancel(); verifyNoMoreInteractions(mockStateRegistration, mockOutputReceiver); } } @Test public void handleCleanupWhenAwaitingOnClosingOutputReceivers() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( "inputPC", RemoteInputDestination.of( (FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM))); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); doThrow(testException).when(mockOutputReceiver).awaitCompletion(); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockProgressHandler)) { BeamFnApi.ProcessBundleResponse response = BeamFnApi.ProcessBundleResponse.getDefaultInstance(); processBundleResponseFuture.complete( BeamFnApi.InstructionResponse.newBuilder().setProcessBundle(response).build()); } fail("Exception expected"); } catch (Exception e) { assertEquals(testException, e); } } @Test public void handleCleanupWithStateWhenAwaitingOnClosingOutputReceivers() throws Exception { Exception testException = new Exception(); InboundDataClient mockOutputReceiver = mock(InboundDataClient.class); CloseableFnDataReceiver mockInputSender = mock(CloseableFnDataReceiver.class); StateDelegator mockStateDelegator = mock(StateDelegator.class); StateDelegator.Registration mockStateRegistration = mock(StateDelegator.Registration.class); when(mockStateDelegator.registerForProcessBundleInstructionId(any(), any())) .thenReturn(mockStateRegistration); StateRequestHandler mockStateHandler = mock(StateRequestHandler.class); when(mockStateHandler.getCacheTokens()).thenReturn(Collections.emptyList()); BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class); CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>(); when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class))) .thenReturn(new CompletableFuture<>()) .thenReturn(processBundleResponseFuture); FullWindowedValueCoder<String> coder = FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE); BundleProcessor processor = sdkHarnessClient.getProcessor( descriptor, Collections.singletonMap( inputPCollection, RemoteInputDestination.of((FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM)), mockStateDelegator); when(dataService.receive(any(), any(), any())).thenReturn(mockOutputReceiver); when(dataService.send(any(), eq(coder))).thenReturn(mockInputSender); doThrow(testException).when(mockOutputReceiver).awaitCompletion(); RemoteOutputReceiver mockRemoteOutputReceiver = mock(RemoteOutputReceiver.class); try { try (ActiveBundle activeBundle = processor.newBundle( ImmutableMap.of(SDK_GRPC_WRITE_TRANSFORM, mockRemoteOutputReceiver), mockStateHandler, mockProgressHandler)) { BeamFnApi.ProcessBundleResponse response = BeamFnApi.ProcessBundleResponse.getDefaultInstance(); processBundleResponseFuture.complete( BeamFnApi.InstructionResponse.newBuilder().setProcessBundle(response).build()); } fail("Exception expected"); } catch (Exception e) { assertEquals(testException, e); } } @Test private static class TestFn extends DoFn<String, String> { @ProcessElement public void processElement(ProcessContext context) { if ("foo".equals(context.element())) { context.output("spam"); } else if ("bar".equals(context.element())) { context.output("ham"); } else { context.output("eggs"); } } } }
The params is a lambda expression
public static Schema toIcebergApiSchema(List<Column> columns) { List<Types.NestedField> icebergColumns = new ArrayList<>(); for (Column column : columns) { int index = icebergColumns.size(); org.apache.iceberg.types.Type type = toIcebergColumnType(column.getType()); Types.NestedField field = Types.NestedField.of( index, column.isAllowNull(), column.getName(), type, column.getComment()); icebergColumns.add(field); } org.apache.iceberg.types.Type icebergSchema = Types.StructType.of(icebergColumns); AtomicInteger nextFieldId = new AtomicInteger(1); icebergSchema = TypeUtil.assignFreshIds(icebergSchema, nextFieldId::getAndIncrement); return new Schema(icebergSchema.asStructType().fields()); }
AtomicInteger nextFieldId = new AtomicInteger(1);
public static Schema toIcebergApiSchema(List<Column> columns) { List<Types.NestedField> icebergColumns = new ArrayList<>(); for (Column column : columns) { int index = icebergColumns.size(); org.apache.iceberg.types.Type type = toIcebergColumnType(column.getType()); Types.NestedField field = Types.NestedField.of( index, column.isAllowNull(), column.getName(), type, column.getComment()); icebergColumns.add(field); } org.apache.iceberg.types.Type icebergSchema = Types.StructType.of(icebergColumns); AtomicInteger nextFieldId = new AtomicInteger(1); icebergSchema = TypeUtil.assignFreshIds(icebergSchema, nextFieldId::getAndIncrement); return new Schema(icebergSchema.asStructType().fields()); }
class IcebergApiConverter { private static final Logger LOG = LogManager.getLogger(IcebergApiConverter.class); public static final String PARTITION_NULL_VALUE = "null"; public static IcebergTable toIcebergTable(Table nativeTbl, String catalogName, String remoteDbName, String remoteTableName, String nativeCatalogType, Optional<IcebergMetricsReporter> metricsReporter) { IcebergTable.Builder tableBuilder = IcebergTable.builder() .setId(CONNECTOR_ID_GENERATOR.getNextId().asInt()) .setSrTableName(remoteTableName) .setCatalogName(catalogName) .setResourceName(toResourceName(catalogName, "iceberg")) .setRemoteDbName(remoteDbName) .setRemoteTableName(remoteTableName) .setNativeTable(nativeTbl) .setFullSchema(toFullSchemas(nativeTbl)) .setIcebergProperties(toIcebergProps(nativeCatalogType)) .setMetricsReporter(metricsReporter); return tableBuilder.build(); } public static PartitionSpec parsePartitionFields(Schema schema, List<String> fields) { PartitionSpec.Builder builder = PartitionSpec.builderFor(schema); for (String field : fields) { builder.identity(field); } return builder.build(); } public static Optional<String> getTableLocation(Map<String, String> tableProperties) { return Optional.ofNullable(tableProperties.get(LOCATION_PROPERTY)); } public static org.apache.iceberg.types.Type toIcebergColumnType(Type type) { PrimitiveType primitiveType = type.getPrimitiveType(); switch (primitiveType) { case BOOLEAN: return Types.BooleanType.get(); case INT: case BIGINT: return Types.IntegerType.get(); case FLOAT: return Types.FloatType.get(); case DOUBLE: return Types.DoubleType.get(); case DATE: return Types.DateType.get(); case DATETIME: return Types.TimestampType.withoutZone(); case VARCHAR: case CHAR: return Types.StringType.get(); case DECIMAL32: case DECIMAL64: case DECIMAL128: return Types.DecimalType.of(type.getPrecision(), (((ScalarType) type).getScalarScale())); default: throw new StarRocksConnectorException("Unsupported column type %s", primitiveType); } } public static List<Column> toFullSchemas(Table nativeTbl) { List<Column> fullSchema = Lists.newArrayList(); List<Types.NestedField> columns; try { columns = nativeTbl.schema().columns(); } catch (NullPointerException e) { throw new StarRocksConnectorException(e.getMessage()); } for (Types.NestedField field : columns) { Type srType; try { srType = fromIcebergType(field.type()); } catch (InternalError | Exception e) { LOG.error("Failed to convert iceberg type {}", field.type().toString(), e); srType = Type.UNKNOWN_TYPE; } Column column = new Column(field.name(), srType, true); fullSchema.add(column); } return fullSchema; } public static Map<String, String> toIcebergProps(String nativeCatalogType) { Map<String, String> options = new HashMap<>(); options.put(ICEBERG_CATALOG_TYPE, nativeCatalogType); return options; } public static RemoteFileInputFormat getHdfsFileFormat(FileFormat format) { switch (format) { case ORC: return RemoteFileInputFormat.ORC; case PARQUET: return RemoteFileInputFormat.PARQUET; default: throw new StarRocksConnectorException("Unexpected file format: " + format); } } public static TIcebergSchema getTIcebergSchema(Schema schema) { Types.StructType rootType = schema.asStruct(); TIcebergSchema tIcebergSchema = new TIcebergSchema(); List<TIcebergSchemaField> fields = new ArrayList<>(rootType.fields().size()); for (Types.NestedField nestedField : rootType.fields()) { fields.add(getTIcebergSchemaField(nestedField)); } tIcebergSchema.setFields(fields); return tIcebergSchema; } private static TIcebergSchemaField getTIcebergSchemaField(Types.NestedField nestedField) { TIcebergSchemaField tIcebergSchemaField = new TIcebergSchemaField(); tIcebergSchemaField.setField_id(nestedField.fieldId()); tIcebergSchemaField.setName(nestedField.name()); if (nestedField.type().isNestedType()) { List<TIcebergSchemaField> children = new ArrayList<>(nestedField.type().asNestedType().fields().size()); for (Types.NestedField child : nestedField.type().asNestedType().fields()) { children.add(getTIcebergSchemaField(child)); } tIcebergSchemaField.setChildren(children); } return tIcebergSchemaField; } public static Map<String, String> rebuildCreateTableProperties(Map<String, String> createProperties) { ImmutableMap.Builder<String, String> tableProperties = ImmutableMap.builder(); createProperties.entrySet().stream() .filter(entry -> createProperties.containsKey(entry.getKey())) .forEach(tableProperties::put); String fileFormat = createProperties.getOrDefault("file_format", TableProperties.DEFAULT_FILE_FORMAT_DEFAULT); if ("parquet".equalsIgnoreCase(fileFormat)) { tableProperties.put(TableProperties.DEFAULT_FILE_FORMAT, "parquet"); } else if ("avro".equalsIgnoreCase(fileFormat)) { tableProperties.put(TableProperties.DEFAULT_FILE_FORMAT, "avro"); } else if ("orc".equalsIgnoreCase(fileFormat)) { tableProperties.put(TableProperties.DEFAULT_FILE_FORMAT, "orc"); } else if (fileFormat != null) { throw new IllegalArgumentException("Unsupported format in USING: " + fileFormat); } return tableProperties.build(); } }
class IcebergApiConverter { private static final Logger LOG = LogManager.getLogger(IcebergApiConverter.class); public static final String PARTITION_NULL_VALUE = "null"; public static IcebergTable toIcebergTable(Table nativeTbl, String catalogName, String remoteDbName, String remoteTableName, String nativeCatalogType, Optional<IcebergMetricsReporter> metricsReporter) { IcebergTable.Builder tableBuilder = IcebergTable.builder() .setId(CONNECTOR_ID_GENERATOR.getNextId().asInt()) .setSrTableName(remoteTableName) .setCatalogName(catalogName) .setResourceName(toResourceName(catalogName, "iceberg")) .setRemoteDbName(remoteDbName) .setRemoteTableName(remoteTableName) .setNativeTable(nativeTbl) .setFullSchema(toFullSchemas(nativeTbl)) .setIcebergProperties(toIcebergProps(nativeCatalogType)) .setMetricsReporter(metricsReporter); return tableBuilder.build(); } public static PartitionSpec parsePartitionFields(Schema schema, List<String> fields) { PartitionSpec.Builder builder = PartitionSpec.builderFor(schema); for (String field : fields) { builder.identity(field); } return builder.build(); } public static Optional<String> getTableLocation(Map<String, String> tableProperties) { return Optional.ofNullable(tableProperties.get(LOCATION_PROPERTY)); } public static org.apache.iceberg.types.Type toIcebergColumnType(Type type) { PrimitiveType primitiveType = type.getPrimitiveType(); switch (primitiveType) { case BOOLEAN: return Types.BooleanType.get(); case INT: return Types.IntegerType.get(); case BIGINT: return Types.LongType.get(); case FLOAT: return Types.FloatType.get(); case DOUBLE: return Types.DoubleType.get(); case DATE: return Types.DateType.get(); case DATETIME: return Types.TimestampType.withoutZone(); case VARCHAR: case CHAR: return Types.StringType.get(); case DECIMAL32: case DECIMAL64: case DECIMAL128: return Types.DecimalType.of(type.getPrecision(), (((ScalarType) type).getScalarScale())); default: throw new StarRocksConnectorException("Unsupported column type %s", primitiveType); } } public static List<Column> toFullSchemas(Table nativeTbl) { List<Column> fullSchema = Lists.newArrayList(); List<Types.NestedField> columns; try { columns = nativeTbl.schema().columns(); } catch (NullPointerException e) { throw new StarRocksConnectorException(e.getMessage()); } for (Types.NestedField field : columns) { Type srType; try { srType = fromIcebergType(field.type()); } catch (InternalError | Exception e) { LOG.error("Failed to convert iceberg type {}", field.type().toString(), e); srType = Type.UNKNOWN_TYPE; } Column column = new Column(field.name(), srType, true); fullSchema.add(column); } return fullSchema; } public static Map<String, String> toIcebergProps(String nativeCatalogType) { Map<String, String> options = new HashMap<>(); options.put(ICEBERG_CATALOG_TYPE, nativeCatalogType); return options; } public static RemoteFileInputFormat getHdfsFileFormat(FileFormat format) { switch (format) { case ORC: return RemoteFileInputFormat.ORC; case PARQUET: return RemoteFileInputFormat.PARQUET; default: throw new StarRocksConnectorException("Unexpected file format: " + format); } } public static TIcebergSchema getTIcebergSchema(Schema schema) { Types.StructType rootType = schema.asStruct(); TIcebergSchema tIcebergSchema = new TIcebergSchema(); List<TIcebergSchemaField> fields = new ArrayList<>(rootType.fields().size()); for (Types.NestedField nestedField : rootType.fields()) { fields.add(getTIcebergSchemaField(nestedField)); } tIcebergSchema.setFields(fields); return tIcebergSchema; } private static TIcebergSchemaField getTIcebergSchemaField(Types.NestedField nestedField) { TIcebergSchemaField tIcebergSchemaField = new TIcebergSchemaField(); tIcebergSchemaField.setField_id(nestedField.fieldId()); tIcebergSchemaField.setName(nestedField.name()); if (nestedField.type().isNestedType()) { List<TIcebergSchemaField> children = new ArrayList<>(nestedField.type().asNestedType().fields().size()); for (Types.NestedField child : nestedField.type().asNestedType().fields()) { children.add(getTIcebergSchemaField(child)); } tIcebergSchemaField.setChildren(children); } return tIcebergSchemaField; } public static Map<String, String> rebuildCreateTableProperties(Map<String, String> createProperties) { ImmutableMap.Builder<String, String> tableProperties = ImmutableMap.builder(); createProperties.entrySet().forEach(tableProperties::put); String fileFormat = createProperties.getOrDefault("file_format", TableProperties.DEFAULT_FILE_FORMAT_DEFAULT); if ("parquet".equalsIgnoreCase(fileFormat)) { tableProperties.put(TableProperties.DEFAULT_FILE_FORMAT, "parquet"); } else if ("avro".equalsIgnoreCase(fileFormat)) { tableProperties.put(TableProperties.DEFAULT_FILE_FORMAT, "avro"); } else if ("orc".equalsIgnoreCase(fileFormat)) { tableProperties.put(TableProperties.DEFAULT_FILE_FORMAT, "orc"); } else if (fileFormat != null) { throw new IllegalArgumentException("Unsupported format in USING: " + fileFormat); } return tableProperties.build(); } }
Shall we take 4 to a meaningful constant??
public void process(ServiceNode serviceNode, List<AnnotationAttachmentNode> annotations) { List<BLangFunction> resources = (List<BLangFunction>) serviceNode.getResources(); resources.forEach(res -> validate(serviceNode.getName().getValue(), res, this.diagnosticLog)); if (resourceCount != 4) { String errorMsg = "Service needs to have all 4 resources " + "[(%s (Listener) or %s (CallBackService)), %s, %s, %s]."; String msg = String.format(errorMsg, RESOURCE_ON_ACCEPT, RESOURCE_ON_CONNECT, RESOURCE_ON_READ_READY, RESOURCE_ON_CLOSE, RESOURCE_ON_ERROR); diagnosticLog.logDiagnostic(ERROR, serviceNode.getPosition(), msg); } }
if (resourceCount != 4) {
public void process(ServiceNode serviceNode, List<AnnotationAttachmentNode> annotations) { int resourceCount = 0; int mandatoryResourceCount = 4; List<BLangFunction> resources = (List<BLangFunction>) serviceNode.getResources(); for (BLangFunction resource : resources) { resourceCount += validate(serviceNode.getName().getValue(), resource, this.diagnosticLog); } if (resourceCount != mandatoryResourceCount) { String errorMsg = "Service needs to have all 4 resources " + "[(%s (Listener) or %s (CallBackService)), %s, %s, %s]."; String msg = String.format(errorMsg, RESOURCE_ON_ACCEPT, RESOURCE_ON_CONNECT, RESOURCE_ON_READ_READY, RESOURCE_ON_CLOSE, RESOURCE_ON_ERROR); diagnosticLog.logDiagnostic(ERROR, serviceNode.getPosition(), msg); } }
class SocketCompilerPlugin extends AbstractCompilerPlugin { private static final String INVALID_RESOURCE_SIGNATURE = "Invalid resource signature for %s in service %s. "; private DiagnosticLog diagnosticLog = null; private int resourceCount = 0; @Override public void init(DiagnosticLog diagnosticLog) { this.diagnosticLog = diagnosticLog; } @Override private void validate(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { switch (resource.getName().getValue()) { case RESOURCE_ON_CONNECT: case RESOURCE_ON_ACCEPT: validateOnAccept(serviceName, resource, diagnosticLog); resourceCount++; break; case RESOURCE_ON_READ_READY: validateOnReadReady(serviceName, resource, diagnosticLog); resourceCount++; break; case RESOURCE_ON_CLOSE: validateOnClose(serviceName, resource, diagnosticLog); resourceCount++; break; case RESOURCE_ON_ERROR: validateOnError(serviceName, resource, diagnosticLog); resourceCount++; break; default: } } private void validateOnError(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { final List<BLangSimpleVariable> readReadyParams = resource.getParameters(); if (readReadyParams.size() != 2) { String msg = String .format(INVALID_RESOURCE_SIGNATURE + "Parameters should be a 'socket:Caller' and 'error'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); return; } BType caller = readReadyParams.get(0).type; if (OBJECT.equals(caller.getKind()) && caller instanceof BStructureType) { validateEndpointCaller(serviceName, resource, diagnosticLog, (BStructureType) caller); } BType error = readReadyParams.get(1).getTypeNode().type; if (RECORD.equals(error.getKind()) && error instanceof BRecordType) { if (!"error".equals(error.tsymbol.toString())) { String msg = String.format(INVALID_RESOURCE_SIGNATURE + "The second parameter should be an 'error'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); } } } private void validateOnReadReady(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { final List<BLangSimpleVariable> readReadyParams = resource.getParameters(); if (readReadyParams.size() != 2) { String msg = String .format(INVALID_RESOURCE_SIGNATURE + "Parameters should be a 'socket:Caller' and 'byte[]'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); return; } BType caller = readReadyParams.get(0).type; if (OBJECT.equals(caller.getKind()) && caller instanceof BStructureType) { validateEndpointCaller(serviceName, resource, diagnosticLog, (BStructureType) caller); } BType content = readReadyParams.get(1).getTypeNode().type; if (ARRAY.equals(content.getKind()) && content instanceof BArrayType) { if (!"byte".equals(((BArrayType) content).eType.tsymbol.toString())) { String msg = String.format(INVALID_RESOURCE_SIGNATURE + "Second parameter should be a byte[]", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); } } } private void validateOnClose(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { validateOnAccept(serviceName, resource, diagnosticLog); } private void validateOnAccept(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { final List<BLangSimpleVariable> acceptParams = resource.getParameters(); if (acceptParams.size() != 1) { String msg = String.format(INVALID_RESOURCE_SIGNATURE + "The parameter should be a 'socket:Caller'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); return; } BType caller = acceptParams.get(0).type; if (OBJECT.equals(caller.getKind()) && caller instanceof BStructureType) { validateEndpointCaller(serviceName, resource, diagnosticLog, (BStructureType) caller); } } private void validateEndpointCaller(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog, BStructureType event) { String eventType = event.tsymbol.toString(); if (!("ballerina/socket:Listener".equals(eventType) || "ballerina/socket:Client".equals(eventType))) { String msg = String.format(INVALID_RESOURCE_SIGNATURE + "The parameter should be a 'socket:Caller'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); } } }
class SocketCompilerPlugin extends AbstractCompilerPlugin { private static final String INVALID_RESOURCE_SIGNATURE = "Invalid resource signature for %s in service %s. "; private DiagnosticLog diagnosticLog = null; @Override public void init(DiagnosticLog diagnosticLog) { this.diagnosticLog = diagnosticLog; } @Override private int validate(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { int resourceCount = 0; switch (resource.getName().getValue()) { case RESOURCE_ON_CONNECT: case RESOURCE_ON_ACCEPT: validateOnAccept(serviceName, resource, diagnosticLog); resourceCount++; break; case RESOURCE_ON_READ_READY: validateOnReadReady(serviceName, resource, diagnosticLog); resourceCount++; break; case RESOURCE_ON_CLOSE: validateOnClose(serviceName, resource, diagnosticLog); resourceCount++; break; case RESOURCE_ON_ERROR: validateOnError(serviceName, resource, diagnosticLog); resourceCount++; break; default: } return resourceCount; } private void validateOnError(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { final List<BLangSimpleVariable> readReadyParams = resource.getParameters(); if (readReadyParams.size() != 2) { String msg = String .format(INVALID_RESOURCE_SIGNATURE + "Parameters should be a 'socket:Caller' and 'error'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); return; } BType caller = readReadyParams.get(0).type; if (OBJECT.equals(caller.getKind()) && caller instanceof BStructureType) { validateEndpointCaller(serviceName, resource, diagnosticLog, (BStructureType) caller); } BType error = readReadyParams.get(1).getTypeNode().type; if (RECORD.equals(error.getKind()) && error instanceof BRecordType) { if (!"error".equals(error.tsymbol.toString())) { String msg = String.format(INVALID_RESOURCE_SIGNATURE + "The second parameter should be an 'error'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); } } } private void validateOnReadReady(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { final List<BLangSimpleVariable> readReadyParams = resource.getParameters(); if (readReadyParams.size() != 2) { String msg = String .format(INVALID_RESOURCE_SIGNATURE + "Parameters should be a 'socket:Caller' and 'byte[]'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); return; } BType caller = readReadyParams.get(0).type; if (OBJECT.equals(caller.getKind()) && caller instanceof BStructureType) { validateEndpointCaller(serviceName, resource, diagnosticLog, (BStructureType) caller); } BType content = readReadyParams.get(1).getTypeNode().type; if (ARRAY.equals(content.getKind()) && content instanceof BArrayType) { if (!"byte".equals(((BArrayType) content).eType.tsymbol.toString())) { String msg = String.format(INVALID_RESOURCE_SIGNATURE + "Second parameter should be a byte[]", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); } } } private void validateOnClose(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { validateOnAccept(serviceName, resource, diagnosticLog); } private void validateOnAccept(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog) { final List<BLangSimpleVariable> acceptParams = resource.getParameters(); if (acceptParams.size() != 1) { String msg = String.format(INVALID_RESOURCE_SIGNATURE + "The parameter should be a 'socket:Caller'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); return; } BType caller = acceptParams.get(0).type; if (OBJECT.equals(caller.getKind()) && caller instanceof BStructureType) { validateEndpointCaller(serviceName, resource, diagnosticLog, (BStructureType) caller); } } private void validateEndpointCaller(String serviceName, BLangFunction resource, DiagnosticLog diagnosticLog, BStructureType event) { String eventType = event.tsymbol.toString(); if (!("ballerina/socket:Listener".equals(eventType) || "ballerina/socket:Client".equals(eventType))) { String msg = String.format(INVALID_RESOURCE_SIGNATURE + "The parameter should be a 'socket:Caller'", resource.getName().getValue(), serviceName); diagnosticLog.logDiagnostic(ERROR, resource.getPosition(), msg); } } }
based on above suggestion, I am making it not nullable.
public RequestIdPolicy(Supplier<HttpHeaders> requestIdSupplier) { this.requestIdSupplier = requestIdSupplier; }
this.requestIdSupplier = requestIdSupplier;
public RequestIdPolicy(Supplier<HttpHeaders> requestIdSupplier) { this.requestIdSupplier = Objects.requireNonNull(requestIdSupplier, "'requestIdSupplier' must not be null"); }
class RequestIdPolicy implements HttpPipelinePolicy { private static final String REQUEST_ID_HEADER = "x-ms-client-request-id"; private final Supplier<HttpHeaders> requestIdSupplier; /** * Creates default {@link RequestIdPolicy}. */ public RequestIdPolicy() { requestIdSupplier = null; } /** * Creates {@link RequestIdPolicy} with provided {@link Supplier} to dynamically generate request id for each * {@link HttpRequest}. * * @param requestIdSupplier to dynamically generate to request id for each {@link HttpRequest}. {@code null} is * valid value. It is suggested that this {@link Supplier} should provide unique value every time * it is called. Example of these headers are 'x-ms-client-request-id', 'x-ms-correlation-request-id'. */ @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { if (Objects.nonNull(requestIdSupplier)) { HttpHeaders httpHeaders = requestIdSupplier.get(); if (Objects.nonNull(httpHeaders) && httpHeaders.getSize() > 0) { httpHeaders.stream().forEach(httpHeader -> { String requestIdHeaderValue = context.getHttpRequest().getHeaders().getValue(httpHeader.getName()); if (requestIdHeaderValue == null) { context.getHttpRequest().getHeaders().put(httpHeader.getName(), httpHeader.getValue()); } }); return next.process(); } } String requestId = context.getHttpRequest().getHeaders().getValue(REQUEST_ID_HEADER); if (requestId == null) { context.getHttpRequest().getHeaders().put(REQUEST_ID_HEADER, UUID.randomUUID().toString()); } return next.process(); } }
class RequestIdPolicy implements HttpPipelinePolicy { private static final String REQUEST_ID_HEADER = "x-ms-client-request-id"; private final Supplier<HttpHeaders> requestIdSupplier; /** * Creates default {@link RequestIdPolicy}. */ public RequestIdPolicy() { requestIdSupplier = () -> new HttpHeaders().put(REQUEST_ID_HEADER, UUID.randomUUID().toString()); } /** * Creates {@link RequestIdPolicy} with provided {@link Supplier} to dynamically generate request id for each * {@link HttpRequest}. * * @param requestIdSupplier to dynamically generate to request id for each {@link HttpRequest}. It is suggested * that this {@link Supplier} provides unique value every time it is called. * Example of these headers are 'x-ms-client-request-id', 'x-ms-correlation-request-id'. * * @throws NullPointerException when {@code requestIdSupplier} is {@code null}. */ @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { HttpHeaders httpHeaders = requestIdSupplier.get(); if (Objects.nonNull(httpHeaders) && httpHeaders.getSize() > 0) { for (HttpHeader header : httpHeaders) { String requestIdHeaderValue = context.getHttpRequest().getHeaders().getValue(header.getName()); if (requestIdHeaderValue == null) { context.getHttpRequest().getHeaders().put(header.getName(), header.getValue()); } } return next.process(); } String requestId = context.getHttpRequest().getHeaders().getValue(REQUEST_ID_HEADER); if (requestId == null) { context.getHttpRequest().getHeaders().put(REQUEST_ID_HEADER, UUID.randomUUID().toString()); } return next.process(); } }
There's a CheckUtil method you used in the same file that gives you the modifier. `final AccessModifier accessModifier = CheckUtil.getAccessModifierFromModifiersToken(modifiersToken);`
public void visitToken(DetailAST token) { switch (token.getType()) { case TokenTypes.IMPORT: final String importClassPath = FullIdent.createFullIdentBelow(token).getText(); final String className = importClassPath.substring(importClassPath.lastIndexOf(".") + 1); simpleClassNameToQualifiedNameMap.put(className, importClassPath); break; case TokenTypes.CLASS_DEF: final DetailAST modifiersToken = token.findFirstToken(TokenTypes.MODIFIERS); isPublicClass = modifiersToken.branchContains(TokenTypes.LITERAL_PUBLIC); break; case TokenTypes.METHOD_DEF: if (!isPublicClass) { return; } checkNoExternalDependencyExposed(token); break; default: break; } }
final DetailAST modifiersToken = token.findFirstToken(TokenTypes.MODIFIERS);
public void visitToken(DetailAST token) { switch (token.getType()) { case TokenTypes.IMPORT: final String importClassPath = FullIdent.createFullIdentBelow(token).getText(); final String className = importClassPath.substring(importClassPath.lastIndexOf(".") + 1); simpleClassNameToQualifiedNameMap.put(className, importClassPath); break; case TokenTypes.CLASS_DEF: final AccessModifier accessModifier = CheckUtil.getAccessModifierFromModifiersToken( token.findFirstToken(TokenTypes.MODIFIERS)); isPublicClass = accessModifier.equals(AccessModifier.PUBLIC); break; case TokenTypes.METHOD_DEF: if (!isPublicClass) { return; } checkNoExternalDependencyExposed(token); break; default: break; } }
class from external dependency. You should not use it as a return or method argument type."; private static final Set<String> VALID_DEPENDENCY_SET = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( "java", "com.azure", "reactor", "io.netty.buffer.ByteBuf" ))); private final Map<String, String> simpleClassNameToQualifiedNameMap = new HashMap<>(); private boolean isPublicClass; @Override public void beginTree(DetailAST rootAST) { simpleClassNameToQualifiedNameMap.clear(); }
class from external dependency. You should not use it as a return or method argument type."; private static final Set<String> VALID_DEPENDENCY_SET = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( "java", "com.azure", "reactor", "io.netty.buffer.ByteBuf" ))); private final Map<String, String> simpleClassNameToQualifiedNameMap = new HashMap<>(); private boolean isPublicClass; @Override public void beginTree(DetailAST rootAST) { simpleClassNameToQualifiedNameMap.clear(); }
(Probably not directly, as there's no `ParameterContext` here, but ... somehow keep the two places in sync?)
QuarkusComponentTestConfiguration update(Class<?> testClass) { Map<String, String> configProperties = new HashMap<>(this.configProperties); List<Class<?>> componentClasses = new ArrayList<>(this.componentClasses); boolean useDefaultConfigProperties = this.useDefaultConfigProperties; boolean addNestedClassesAsComponents = this.addNestedClassesAsComponents; int configSourceOrdinal = this.configSourceOrdinal; List<AnnotationsTransformer> annotationsTransformers = new ArrayList<>(this.annotationsTransformers); QuarkusComponentTest testAnnotation = testClass.getAnnotation(QuarkusComponentTest.class); if (testAnnotation != null) { Collections.addAll(componentClasses, testAnnotation.value()); useDefaultConfigProperties = testAnnotation.useDefaultConfigProperties(); addNestedClassesAsComponents = testAnnotation.addNestedClassesAsComponents(); configSourceOrdinal = testAnnotation.configSourceOrdinal(); Class<? extends AnnotationsTransformer>[] transformers = testAnnotation.annotationsTransformers(); if (transformers.length > 0) { for (Class<? extends AnnotationsTransformer> transformerClass : transformers) { try { annotationsTransformers.add(transformerClass.getDeclaredConstructor().newInstance()); } catch (Exception e) { LOG.errorf("Unable to instantiate %s", transformerClass); } } } } Class<?> current = testClass; while (current != null && current != Object.class) { for (Field field : current.getDeclaredFields()) { if (field.isAnnotationPresent(Inject.class) && !resolvesToBuiltinBean(field.getType())) { componentClasses.add(field.getType()); } } if (addNestedClassesAsComponents) { for (Class<?> declaredClass : current.getDeclaredClasses()) { if (Modifier.isStatic(declaredClass.getModifiers())) { componentClasses.add(declaredClass); } } } for (Method method : current.getDeclaredMethods()) { if (method.isAnnotationPresent(Test.class)) { for (Parameter param : method.getParameters()) { if (param.getType() == TestInfo.class || param.isAnnotationPresent(InjectMock.class) || param.isAnnotationPresent(SkipInject.class)) { continue; } componentClasses.add(param.getType()); } } } current = current.getSuperclass(); } List<TestConfigProperty> testConfigProperties = new ArrayList<>(); Collections.addAll(testConfigProperties, testClass.getAnnotationsByType(TestConfigProperty.class)); for (TestConfigProperty testConfigProperty : testConfigProperties) { configProperties.put(testConfigProperty.key(), testConfigProperty.value()); } return new QuarkusComponentTestConfiguration(Map.copyOf(configProperties), List.copyOf(componentClasses), this.mockConfigurators, useDefaultConfigProperties, addNestedClassesAsComponents, configSourceOrdinal, List.copyOf(annotationsTransformers)); }
if (param.getType() == TestInfo.class
QuarkusComponentTestConfiguration update(Class<?> testClass) { Map<String, String> configProperties = new HashMap<>(this.configProperties); List<Class<?>> componentClasses = new ArrayList<>(this.componentClasses); boolean useDefaultConfigProperties = this.useDefaultConfigProperties; boolean addNestedClassesAsComponents = this.addNestedClassesAsComponents; int configSourceOrdinal = this.configSourceOrdinal; List<AnnotationsTransformer> annotationsTransformers = new ArrayList<>(this.annotationsTransformers); QuarkusComponentTest testAnnotation = testClass.getAnnotation(QuarkusComponentTest.class); if (testAnnotation != null) { Collections.addAll(componentClasses, testAnnotation.value()); useDefaultConfigProperties = testAnnotation.useDefaultConfigProperties(); addNestedClassesAsComponents = testAnnotation.addNestedClassesAsComponents(); configSourceOrdinal = testAnnotation.configSourceOrdinal(); Class<? extends AnnotationsTransformer>[] transformers = testAnnotation.annotationsTransformers(); if (transformers.length > 0) { for (Class<? extends AnnotationsTransformer> transformerClass : transformers) { try { annotationsTransformers.add(transformerClass.getDeclaredConstructor().newInstance()); } catch (Exception e) { LOG.errorf("Unable to instantiate %s", transformerClass); } } } } Class<?> current = testClass; while (current != null && current != Object.class) { for (Field field : current.getDeclaredFields()) { if (field.isAnnotationPresent(Inject.class) && !resolvesToBuiltinBean(field.getType())) { componentClasses.add(field.getType()); } } if (addNestedClassesAsComponents) { for (Class<?> declaredClass : current.getDeclaredClasses()) { if (Modifier.isStatic(declaredClass.getModifiers())) { componentClasses.add(declaredClass); } } } for (Method method : current.getDeclaredMethods()) { if (method.isAnnotationPresent(Test.class)) { for (Parameter param : method.getParameters()) { if (QuarkusComponentTestExtension.BUILTIN_PARAMETER.test(param) || param.isAnnotationPresent(InjectMock.class) || param.isAnnotationPresent(SkipInject.class)) { continue; } componentClasses.add(param.getType()); } } } current = current.getSuperclass(); } List<TestConfigProperty> testConfigProperties = new ArrayList<>(); Collections.addAll(testConfigProperties, testClass.getAnnotationsByType(TestConfigProperty.class)); for (TestConfigProperty testConfigProperty : testConfigProperties) { configProperties.put(testConfigProperty.key(), testConfigProperty.value()); } return new QuarkusComponentTestConfiguration(Map.copyOf(configProperties), List.copyOf(componentClasses), this.mockConfigurators, useDefaultConfigProperties, addNestedClassesAsComponents, configSourceOrdinal, List.copyOf(annotationsTransformers)); }
class QuarkusComponentTestConfiguration { static final QuarkusComponentTestConfiguration DEFAULT = new QuarkusComponentTestConfiguration(Map.of(), List.of(), List.of(), false, true, QuarkusComponentTestExtensionBuilder.DEFAULT_CONFIG_SOURCE_ORDINAL, List.of()); private static final Logger LOG = Logger.getLogger(QuarkusComponentTestConfiguration.class); final Map<String, String> configProperties; final List<Class<?>> componentClasses; final List<MockBeanConfiguratorImpl<?>> mockConfigurators; final boolean useDefaultConfigProperties; final boolean addNestedClassesAsComponents; final int configSourceOrdinal; final List<AnnotationsTransformer> annotationsTransformers; QuarkusComponentTestConfiguration(Map<String, String> configProperties, List<Class<?>> componentClasses, List<MockBeanConfiguratorImpl<?>> mockConfigurators, boolean useDefaultConfigProperties, boolean addNestedClassesAsComponents, int configSourceOrdinal, List<AnnotationsTransformer> annotationsTransformers) { this.configProperties = configProperties; this.componentClasses = componentClasses; this.mockConfigurators = mockConfigurators; this.useDefaultConfigProperties = useDefaultConfigProperties; this.addNestedClassesAsComponents = addNestedClassesAsComponents; this.configSourceOrdinal = configSourceOrdinal; this.annotationsTransformers = annotationsTransformers; } QuarkusComponentTestConfiguration update(Method testMethod) { Map<String, String> configProperties = new HashMap<>(this.configProperties); List<TestConfigProperty> testConfigProperties = new ArrayList<>(); Collections.addAll(testConfigProperties, testMethod.getAnnotationsByType(TestConfigProperty.class)); for (TestConfigProperty testConfigProperty : testConfigProperties) { configProperties.put(testConfigProperty.key(), testConfigProperty.value()); } return new QuarkusComponentTestConfiguration(configProperties, componentClasses, mockConfigurators, useDefaultConfigProperties, addNestedClassesAsComponents, configSourceOrdinal, annotationsTransformers); } private static boolean resolvesToBuiltinBean(Class<?> rawType) { return Provider.class.equals(rawType) || Instance.class.equals(rawType) || InjectableInstance.class.equals(rawType) || Event.class.equals(rawType) || BeanContainer.class.equals(rawType) || BeanManager.class.equals(rawType); } }
class QuarkusComponentTestConfiguration { static final QuarkusComponentTestConfiguration DEFAULT = new QuarkusComponentTestConfiguration(Map.of(), List.of(), List.of(), false, true, QuarkusComponentTestExtensionBuilder.DEFAULT_CONFIG_SOURCE_ORDINAL, List.of()); private static final Logger LOG = Logger.getLogger(QuarkusComponentTestConfiguration.class); final Map<String, String> configProperties; final List<Class<?>> componentClasses; final List<MockBeanConfiguratorImpl<?>> mockConfigurators; final boolean useDefaultConfigProperties; final boolean addNestedClassesAsComponents; final int configSourceOrdinal; final List<AnnotationsTransformer> annotationsTransformers; QuarkusComponentTestConfiguration(Map<String, String> configProperties, List<Class<?>> componentClasses, List<MockBeanConfiguratorImpl<?>> mockConfigurators, boolean useDefaultConfigProperties, boolean addNestedClassesAsComponents, int configSourceOrdinal, List<AnnotationsTransformer> annotationsTransformers) { this.configProperties = configProperties; this.componentClasses = componentClasses; this.mockConfigurators = mockConfigurators; this.useDefaultConfigProperties = useDefaultConfigProperties; this.addNestedClassesAsComponents = addNestedClassesAsComponents; this.configSourceOrdinal = configSourceOrdinal; this.annotationsTransformers = annotationsTransformers; } QuarkusComponentTestConfiguration update(Method testMethod) { Map<String, String> configProperties = new HashMap<>(this.configProperties); List<TestConfigProperty> testConfigProperties = new ArrayList<>(); Collections.addAll(testConfigProperties, testMethod.getAnnotationsByType(TestConfigProperty.class)); for (TestConfigProperty testConfigProperty : testConfigProperties) { configProperties.put(testConfigProperty.key(), testConfigProperty.value()); } return new QuarkusComponentTestConfiguration(configProperties, componentClasses, mockConfigurators, useDefaultConfigProperties, addNestedClassesAsComponents, configSourceOrdinal, annotationsTransformers); } private static boolean resolvesToBuiltinBean(Class<?> rawType) { return Provider.class.equals(rawType) || Instance.class.equals(rawType) || InjectableInstance.class.equals(rawType) || Event.class.equals(rawType) || BeanContainer.class.equals(rawType) || BeanManager.class.equals(rawType); } }
> Should be covered with one test case. sure, will add one later.
public static Properties flatten(Properties config) { final Properties flattenProperties = new Properties(); Collections.list(config.propertyNames()).stream().forEach( name -> { Preconditions.checkArgument(name instanceof String); flattenProperties.setProperty((String) name, config.getProperty((String) name)); } ); return flattenProperties; }
final Properties flattenProperties = new Properties();
public static Properties flatten(Properties config) { final Properties flattenProperties = new Properties(); Collections.list(config.propertyNames()).stream().forEach( name -> { Preconditions.checkArgument(name instanceof String); flattenProperties.setProperty((String) name, config.getProperty((String) name)); } ); return flattenProperties; }
class PropertiesUtil { /** * Get integer from properties. * This method throws an exception if the integer is not valid. * * @param config Properties * @param key key in Properties * @param defaultValue default value if value is not set * @return default or value of key */ public static int getInt(Properties config, String key, int defaultValue) { String val = config.getProperty(key); if (val == null) { return defaultValue; } else { try { return Integer.parseInt(val); } catch (NumberFormatException nfe) { throw new IllegalArgumentException("Value for configuration key='" + key + "' is not set correctly. " + "Entered value='" + val + "'. Default value='" + defaultValue + "'"); } } } /** * Get long from properties. * This method throws an exception if the long is not valid. * * @param config Properties * @param key key in Properties * @param defaultValue default value if value is not set * @return default or value of key */ public static long getLong(Properties config, String key, long defaultValue) { String val = config.getProperty(key); if (val == null) { return defaultValue; } else { try { return Long.parseLong(val); } catch (NumberFormatException nfe) { throw new IllegalArgumentException("Value for configuration key='" + key + "' is not set correctly. " + "Entered value='" + val + "'. Default value='" + defaultValue + "'"); } } } /** * Get long from properties. * This method only logs if the long is not valid. * * @param config Properties * @param key key in Properties * @param defaultValue default value if value is not set * @return default or value of key */ public static long getLong(Properties config, String key, long defaultValue, Logger logger) { try { return getLong(config, key, defaultValue); } catch (IllegalArgumentException iae) { logger.warn(iae.getMessage()); return defaultValue; } } /** * Get boolean from properties. * This method returns {@code true} iff the parsed value is "true". * * @param config Properties * @param key key in Properties * @param defaultValue default value if value is not set * @return default or value of key */ public static boolean getBoolean(Properties config, String key, boolean defaultValue) { String val = config.getProperty(key); if (val == null) { return defaultValue; } else { return Boolean.parseBoolean(val); } } /** * Flatten a recursive {@link Properties} to a first level property map. * In some cases, {KafkaProducer * without considering its default properties. * * @param config Properties to be flatten * @return Properties without defaults; all properties are put in the first-level */ /** Private default constructor to prevent instantiation. */ private PropertiesUtil() {} }
class PropertiesUtil { /** * Get integer from properties. * This method throws an exception if the integer is not valid. * * @param config Properties * @param key key in Properties * @param defaultValue default value if value is not set * @return default or value of key */ public static int getInt(Properties config, String key, int defaultValue) { String val = config.getProperty(key); if (val == null) { return defaultValue; } else { try { return Integer.parseInt(val); } catch (NumberFormatException nfe) { throw new IllegalArgumentException("Value for configuration key='" + key + "' is not set correctly. " + "Entered value='" + val + "'. Default value='" + defaultValue + "'"); } } } /** * Get long from properties. * This method throws an exception if the long is not valid. * * @param config Properties * @param key key in Properties * @param defaultValue default value if value is not set * @return default or value of key */ public static long getLong(Properties config, String key, long defaultValue) { String val = config.getProperty(key); if (val == null) { return defaultValue; } else { try { return Long.parseLong(val); } catch (NumberFormatException nfe) { throw new IllegalArgumentException("Value for configuration key='" + key + "' is not set correctly. " + "Entered value='" + val + "'. Default value='" + defaultValue + "'"); } } } /** * Get long from properties. * This method only logs if the long is not valid. * * @param config Properties * @param key key in Properties * @param defaultValue default value if value is not set * @return default or value of key */ public static long getLong(Properties config, String key, long defaultValue, Logger logger) { try { return getLong(config, key, defaultValue); } catch (IllegalArgumentException iae) { logger.warn(iae.getMessage()); return defaultValue; } } /** * Get boolean from properties. * This method returns {@code true} iff the parsed value is "true". * * @param config Properties * @param key key in Properties * @param defaultValue default value if value is not set * @return default or value of key */ public static boolean getBoolean(Properties config, String key, boolean defaultValue) { String val = config.getProperty(key); if (val == null) { return defaultValue; } else { return Boolean.parseBoolean(val); } } /** * Flatten a recursive {@link Properties} to a first level property map. * * <p>In some cases, {@code KafkaProducer * without considering its default properties. * * @param config Properties to be flattened * @return Properties without defaults; all properties are put in the first-level */ /** Private default constructor to prevent instantiation. */ private PropertiesUtil() {} }
No, this would not happen. If the finally block throws an exception it would either throw this exception if the try block has not thrown an exception or add the finally exception as a suppressed exception to the try exception.
protected void runTest(RunnableWithException test) { Throwable testFailure = null; try { test.run(); } catch (Throwable t) { testFailure = t; } try { Deadline deadline = Deadline.now().plus(Duration.ofSeconds(10)); boolean isAnyJobRunning = yarnClient.getApplications().stream() .anyMatch(YarnTestBase::isApplicationRunning); while (deadline.hasTimeLeft() && isAnyJobRunning) { try { Thread.sleep(500); } catch (InterruptedException e) { Assert.fail("Should not happen"); } isAnyJobRunning = yarnClient.getApplications().stream() .anyMatch(YarnTestBase::isApplicationRunning); } if (isAnyJobRunning) { final List<String> runningApps = yarnClient.getApplications().stream() .filter(YarnTestBase::isApplicationRunning) .map(app -> "App " + app.getApplicationId() + " is in state " + app.getYarnApplicationState() + '.') .collect(Collectors.toList()); if (!runningApps.isEmpty()) { Assert.fail("There is at least one application on the cluster that is not finished." + runningApps); } } } catch (Throwable t) { throw new AssertionError(ExceptionUtils.firstOrSuppressed(t, testFailure)); } }
testFailure = t;
protected void runTest(RunnableWithException test) throws Exception { try (final CleanupYarnApplication ignored = new CleanupYarnApplication()) { test.run(); } }
class YarnTestBase extends TestLogger { private static final Logger LOG = LoggerFactory.getLogger(YarnTestBase.class); protected static final PrintStream ORIGINAL_STDOUT = System.out; protected static final PrintStream ORIGINAL_STDERR = System.err; private static final InputStream ORIGINAL_STDIN = System.in; protected static final String TEST_CLUSTER_NAME_KEY = "flink-yarn-minicluster-name"; protected static final int NUM_NODEMANAGERS = 2; /** The tests are scanning for these strings in the final output. */ protected static final String[] PROHIBITED_STRINGS = { "Exception", "Started [email protected]:8081" }; /** These strings are white-listed, overriding the prohibited strings. */ protected static final String[] WHITELISTED_STRINGS = { "akka.remote.RemoteTransportExceptionNoStackTrace", "java.lang.InterruptedException", "Remote connection to [null] failed with java.net.ConnectException: Connection refused", "Remote connection to [null] failed with java.nio.channels.NotYetConnectedException", "java.io.IOException: Connection reset by peer", "java.util.concurrent.RejectedExecutionException: Worker has already been shutdown", "org.apache.flink.util.FlinkException: Stopping JobMaster", "org.apache.flink.util.FlinkException: JobManager is shutting down.", "lost the leadership." }; @ClassRule public static TemporaryFolder tmp = new TemporaryFolder(); protected static MiniYARNCluster yarnCluster = null; /** * Uberjar (fat jar) file of Flink. */ protected static File flinkUberjar; protected static final YarnConfiguration YARN_CONFIGURATION; /** * lib/ folder of the flink distribution. */ protected static File flinkLibFolder; /** * Temporary folder where Flink configurations will be kept for secure run. */ protected static File tempConfPathForSecureRun = null; protected static File flinkShadedHadoopDir; protected static File yarnSiteXML = null; private YarnClient yarnClient = null; private static org.apache.flink.configuration.Configuration globalConfiguration; protected org.apache.flink.configuration.Configuration flinkConfiguration; static { YARN_CONFIGURATION = new YarnConfiguration(); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 32); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 4096); YARN_CONFIGURATION.setBoolean(YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, true); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, 4); YARN_CONFIGURATION.setInt(YarnConfiguration.DEBUG_NM_DELETE_DELAY_SEC, 3600); YARN_CONFIGURATION.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, false); YARN_CONFIGURATION.setInt(YarnConfiguration.NM_VCORES, 666); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 20000); } public static void populateYarnSecureConfigurations(Configuration conf, String principal, String keytab) { conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); conf.set(YarnConfiguration.RM_KEYTAB, keytab); conf.set(YarnConfiguration.RM_PRINCIPAL, principal); conf.set(YarnConfiguration.NM_KEYTAB, keytab); conf.set(YarnConfiguration.NM_PRINCIPAL, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set("hadoop.security.auth_to_local", "RULE:[1:$1] RULE:[2:$1]"); } @Before public void setupYarnClient() { if (yarnClient == null) { yarnClient = YarnClient.createYarnClient(); yarnClient.init(getYarnConfiguration()); yarnClient.start(); } flinkConfiguration = new org.apache.flink.configuration.Configuration(globalConfiguration); } /** * Sleep a bit between the tests (we are re-using the YARN cluster for the tests). */ @After public void shutdownYarnClient() { yarnClient.stop(); } private static boolean isApplicationRunning(ApplicationReport app) { final YarnApplicationState yarnApplicationState = app.getYarnApplicationState(); return yarnApplicationState != YarnApplicationState.FINISHED && app.getYarnApplicationState() != YarnApplicationState.KILLED && app.getYarnApplicationState() != YarnApplicationState.FAILED; } @Nullable protected YarnClient getYarnClient() { return yarnClient; } protected static YarnConfiguration getYarnConfiguration() { return YARN_CONFIGURATION; } /** * Locate a file or directory. */ public static File findFile(String startAt, FilenameFilter fnf) { File root = new File(startAt); String[] files = root.list(); if (files == null) { return null; } for (String file : files) { File f = new File(startAt + File.separator + file); if (f.isDirectory()) { File r = findFile(f.getAbsolutePath(), fnf); if (r != null) { return r; } } else if (fnf.accept(f.getParentFile(), f.getName())) { return f; } } return null; } @Nonnull YarnClusterDescriptor createYarnClusterDescriptor(org.apache.flink.configuration.Configuration flinkConfiguration) { final YarnClusterDescriptor yarnClusterDescriptor = new YarnClusterDescriptor( flinkConfiguration, YARN_CONFIGURATION, CliFrontend.getConfigurationDirectoryFromEnv(), yarnClient, true); yarnClusterDescriptor.setLocalJarPath(new Path(flinkUberjar.toURI())); yarnClusterDescriptor.addShipFiles(Collections.singletonList(flinkLibFolder)); return yarnClusterDescriptor; } /** * Filter to find root dir of the flink-yarn dist. */ public static class RootDirFilenameFilter implements FilenameFilter { @Override public boolean accept(File dir, String name) { return name.startsWith("flink-dist") && name.endsWith(".jar") && dir.toString().contains("/lib"); } } /** * A simple {@link FilenameFilter} that only accepts files if their name contains every string in the array passed * to the constructor. */ public static class ContainsName implements FilenameFilter { private String[] names; private String excludeInPath = null; /** * @param names which have to be included in the filename. */ public ContainsName(String[] names) { this.names = names; } public ContainsName(String[] names, String excludeInPath) { this.names = names; this.excludeInPath = excludeInPath; } @Override public boolean accept(File dir, String name) { if (excludeInPath == null) { for (String n: names) { if (!name.contains(n)) { return false; } } return true; } else { for (String n: names) { if (!name.contains(n)) { return false; } } return !dir.toString().contains(excludeInPath); } } } public static void writeYarnSiteConfigXML(Configuration yarnConf, File targetFolder) throws IOException { yarnSiteXML = new File(targetFolder, "/yarn-site.xml"); try (FileWriter writer = new FileWriter(yarnSiteXML)) { yarnConf.writeXml(writer); writer.flush(); } } /** * This method checks the written TaskManager and JobManager log files * for exceptions. * * <p>WARN: Please make sure the tool doesn't find old logfiles from previous test runs. * So always run "mvn clean" before running the tests here. * */ public static void ensureNoProhibitedStringInLogFiles(final String[] prohibited, final String[] whitelisted) { File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to exist", cwd.exists()); Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to be a directory", cwd.isDirectory()); List<String> prohibitedExcerpts = new ArrayList<>(); File foundFile = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { File f = new File(dir.getAbsolutePath() + "/" + name); try { BufferingScanner scanner = new BufferingScanner(new Scanner(f), 10); while (scanner.hasNextLine()) { final String lineFromFile = scanner.nextLine(); for (String aProhibited : prohibited) { if (lineFromFile.contains(aProhibited)) { boolean whitelistedFound = false; for (String white : whitelisted) { if (lineFromFile.contains(white)) { whitelistedFound = true; break; } } if (!whitelistedFound) { Marker fatal = MarkerFactory.getMarker("FATAL"); LOG.error(fatal, "Prohibited String '{}' in line '{}'", aProhibited, lineFromFile); StringBuilder logExcerpt = new StringBuilder(); logExcerpt.append(System.lineSeparator()); for (String previousLine : scanner.getPreviousLines()) { logExcerpt.append(previousLine); logExcerpt.append(System.lineSeparator()); } logExcerpt.append(lineFromFile); logExcerpt.append(System.lineSeparator()); while (scanner.hasNextLine()) { String line = scanner.nextLine(); logExcerpt.append(line); logExcerpt.append(System.lineSeparator()); if (line.isEmpty() || (!Character.isWhitespace(line.charAt(0)) && !line.startsWith("Caused by"))) { for (int x = 0; x < 10 && scanner.hasNextLine(); x++) { logExcerpt.append(scanner.nextLine()); logExcerpt.append(System.lineSeparator()); } break; } } prohibitedExcerpts.add(logExcerpt.toString()); return true; } } } } } catch (FileNotFoundException e) { LOG.warn("Unable to locate file: " + e.getMessage() + " file: " + f.getAbsolutePath()); } return false; } }); if (foundFile != null) { Scanner scanner = null; try { scanner = new Scanner(foundFile); } catch (FileNotFoundException e) { Assert.fail("Unable to locate file: " + e.getMessage() + " file: " + foundFile.getAbsolutePath()); } LOG.warn("Found a file with a prohibited string. Printing contents:"); while (scanner.hasNextLine()) { LOG.warn("LINE: " + scanner.nextLine()); } Assert.fail( "Found a file " + foundFile + " with a prohibited string (one of " + Arrays.toString(prohibited) + "). " + "Excerpts:" + System.lineSeparator() + prohibitedExcerpts); } } public static boolean verifyStringsInNamedLogFiles( final String[] mustHave, final String fileName) { List<String> mustHaveList = Arrays.asList(mustHave); File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!cwd.exists() || !cwd.isDirectory()) { return false; } File foundFile = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (fileName != null && !name.equals(fileName)) { return false; } File f = new File(dir.getAbsolutePath() + "/" + name); LOG.info("Searching in {}", f.getAbsolutePath()); try { Set<String> foundSet = new HashSet<>(mustHave.length); Scanner scanner = new Scanner(f); while (scanner.hasNextLine()) { final String lineFromFile = scanner.nextLine(); for (String str : mustHave) { if (lineFromFile.contains(str)) { foundSet.add(str); } } if (foundSet.containsAll(mustHaveList)) { return true; } } } catch (FileNotFoundException e) { LOG.warn("Unable to locate file: " + e.getMessage() + " file: " + f.getAbsolutePath()); } return false; } }); if (foundFile != null) { LOG.info("Found string {} in {}.", Arrays.toString(mustHave), foundFile.getAbsolutePath()); return true; } else { return false; } } public static void sleep(int time) { try { Thread.sleep(time); } catch (InterruptedException e) { LOG.warn("Interruped", e); } } public static int getRunningContainers() { int count = 0; for (int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) { NodeManager nm = yarnCluster.getNodeManager(nmId); ConcurrentMap<ContainerId, Container> containers = nm.getNMContext().getContainers(); count += containers.size(); } return count; } public static void startYARNSecureMode(YarnConfiguration conf, String principal, String keytab) { start(conf, principal, keytab); } public static void startYARNWithConfig(YarnConfiguration conf) { start(conf, null, null); } private static void start(YarnConfiguration conf, String principal, String keytab) { File homeDir = null; try { homeDir = tmp.newFolder(); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } System.setProperty("user.home", homeDir.getAbsolutePath()); String uberjarStartLoc = ".."; LOG.info("Trying to locate uberjar in {}", new File(uberjarStartLoc).getAbsolutePath()); flinkUberjar = findFile(uberjarStartLoc, new RootDirFilenameFilter()); Assert.assertNotNull("Flink uberjar not found", flinkUberjar); String flinkDistRootDir = flinkUberjar.getParentFile().getParent(); flinkLibFolder = flinkUberjar.getParentFile(); flinkShadedHadoopDir = Paths.get("target/shaded-hadoop").toFile(); Assert.assertNotNull("Flink flinkLibFolder not found", flinkLibFolder); Assert.assertTrue("lib folder not found", flinkLibFolder.exists()); Assert.assertTrue("lib folder not found", flinkLibFolder.isDirectory()); if (!flinkUberjar.exists()) { Assert.fail("Unable to locate yarn-uberjar.jar"); } try { LOG.info("Starting up MiniYARNCluster"); if (yarnCluster == null) { final String testName = conf.get(YarnTestBase.TEST_CLUSTER_NAME_KEY); yarnCluster = new MiniYARNCluster( testName == null ? "YarnTest_" + UUID.randomUUID() : testName, NUM_NODEMANAGERS, 1, 1); yarnCluster.init(conf); yarnCluster.start(); } Map<String, String> map = new HashMap<String, String>(System.getenv()); File flinkConfDirPath = findFile(flinkDistRootDir, new ContainsName(new String[]{"flink-conf.yaml"})); Assert.assertNotNull(flinkConfDirPath); final String confDirPath = flinkConfDirPath.getParentFile().getAbsolutePath(); globalConfiguration = GlobalConfiguration.loadConfiguration(confDirPath); tempConfPathForSecureRun = tmp.newFolder("conf"); FileUtils.copyDirectory(new File(confDirPath), tempConfPathForSecureRun); BootstrapTools.writeConfiguration( globalConfiguration, new File(tempConfPathForSecureRun, "flink-conf.yaml")); String configDir = tempConfPathForSecureRun.getAbsolutePath(); LOG.info("Temporary Flink configuration directory to be used for secure test: {}", configDir); Assert.assertNotNull(configDir); map.put(ConfigConstants.ENV_FLINK_CONF_DIR, configDir); File targetTestClassesFolder = new File("target/test-classes"); writeYarnSiteConfigXML(conf, targetTestClassesFolder); map.put("IN_TESTS", "yes we are in tests"); map.put("YARN_CONF_DIR", targetTestClassesFolder.getAbsolutePath()); TestBaseUtils.setEnv(map); Assert.assertTrue(yarnCluster.getServiceState() == Service.STATE.STARTED); while (!yarnCluster.waitForNodeManagersToConnect(500)) { LOG.info("Waiting for Nodemanagers to connect"); } } catch (Exception ex) { ex.printStackTrace(); LOG.error("setup failure", ex); Assert.fail(); } } /** * Default @BeforeClass impl. Overwrite this for passing a different configuration */ @BeforeClass public static void setup() throws Exception { startYARNWithConfig(YARN_CONFIGURATION); } protected static ByteArrayOutputStream outContent; protected static ByteArrayOutputStream errContent; enum RunTypes { YARN_SESSION, CLI_FRONTEND } /** * This method returns once the "startedAfterString" has been seen. */ protected Runner startWithArgs(String[] args, String startedAfterString, RunTypes type) throws IOException { LOG.info("Running with args {}", Arrays.toString(args)); outContent = new ByteArrayOutputStream(); errContent = new ByteArrayOutputStream(); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); PrintStream stdinPrintStream = new PrintStream(out); System.setOut(new PrintStream(outContent)); System.setErr(new PrintStream(errContent)); System.setIn(in); final int startTimeoutSeconds = 60; Runner runner = new Runner( args, flinkConfiguration, CliFrontend.getConfigurationDirectoryFromEnv(), type, 0, stdinPrintStream); runner.setName("Frontend (CLI/YARN Client) runner thread (startWithArgs())."); runner.start(); for (int second = 0; second < startTimeoutSeconds; second++) { sleep(1000); if (outContent.toString().contains(startedAfterString) || errContent.toString().contains(startedAfterString)) { LOG.info("Found expected output in redirected streams"); return runner; } if (!runner.isAlive()) { resetStreamsAndSendOutput(); if (runner.getRunnerError() != null) { throw new RuntimeException("Runner failed with exception.", runner.getRunnerError()); } Assert.fail("Runner thread died before the test was finished."); } } resetStreamsAndSendOutput(); Assert.fail("During the timeout period of " + startTimeoutSeconds + " seconds the " + "expected string did not show up"); return null; } protected void runWithArgs(String[] args, String terminateAfterString, String[] failOnStrings, RunTypes type, int returnCode) throws IOException { runWithArgs(args, terminateAfterString, failOnStrings, type, returnCode, false); } /** * The test has been passed once the "terminateAfterString" has been seen. * @param args Command line arguments for the runner * @param terminateAfterString the runner is searching the stdout and stderr for this string. as soon as it appears, the test has passed * @param failOnPatterns The runner is searching stdout and stderr for the pattern (regexp) specified here. If one appears, the test has failed * @param type Set the type of the runner * @param expectedReturnValue Expected return code from the runner. * @param checkLogForTerminateString If true, the runner checks also the log4j logger for the terminate string */ protected void runWithArgs(String[] args, String terminateAfterString, String[] failOnPatterns, RunTypes type, int expectedReturnValue, boolean checkLogForTerminateString) throws IOException { LOG.info("Running with args {}", Arrays.toString(args)); outContent = new ByteArrayOutputStream(); errContent = new ByteArrayOutputStream(); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); PrintStream stdinPrintStream = new PrintStream(out); System.setOut(new PrintStream(outContent)); System.setErr(new PrintStream(errContent)); System.setIn(in); final int startTimeoutSeconds = 180; final long deadline = System.currentTimeMillis() + (startTimeoutSeconds * 1000); Runner runner = new Runner( args, flinkConfiguration, CliFrontend.getConfigurationDirectoryFromEnv(), type, expectedReturnValue, stdinPrintStream); runner.start(); boolean expectedStringSeen = false; boolean testPassedFromLog4j = false; long shutdownTimeout = 30000L; do { sleep(1000); String outContentString = outContent.toString(); String errContentString = errContent.toString(); if (failOnPatterns != null) { for (String failOnString : failOnPatterns) { Pattern pattern = Pattern.compile(failOnString); if (pattern.matcher(outContentString).find() || pattern.matcher(errContentString).find()) { LOG.warn("Failing test. Output contained illegal string '" + failOnString + "'"); resetStreamsAndSendOutput(); runner.sendStop(); try { runner.join(shutdownTimeout); } catch (InterruptedException e) { LOG.warn("Interrupted while stopping runner", e); } Assert.fail("Output contained illegal string '" + failOnString + "'"); } } } if (checkLogForTerminateString) { LoggingEvent matchedEvent = UtilsTest.getEventContainingString(terminateAfterString); if (matchedEvent != null) { testPassedFromLog4j = true; LOG.info("Found expected output in logging event {}", matchedEvent); } } if (outContentString.contains(terminateAfterString) || errContentString.contains(terminateAfterString) || testPassedFromLog4j) { expectedStringSeen = true; LOG.info("Found expected output in redirected streams"); LOG.info("RunWithArgs: request runner to stop"); runner.sendStop(); try { runner.join(shutdownTimeout); } catch (InterruptedException e) { LOG.warn("Interrupted while stopping runner", e); } LOG.warn("RunWithArgs runner stopped."); } else { if (!runner.isAlive()) { break; } } } while (runner.getRunnerError() == null && !expectedStringSeen && System.currentTimeMillis() < deadline); resetStreamsAndSendOutput(); if (runner.getRunnerError() != null) { throw new RuntimeException("Runner failed", runner.getRunnerError()); } Assert.assertTrue("During the timeout period of " + startTimeoutSeconds + " seconds the " + "expected string \"" + terminateAfterString + "\" did not show up.", expectedStringSeen); LOG.info("Test was successful"); } protected static void resetStreamsAndSendOutput() { System.setOut(ORIGINAL_STDOUT); System.setErr(ORIGINAL_STDERR); System.setIn(ORIGINAL_STDIN); LOG.info("Sending stdout content through logger: \n\n{}\n\n", outContent.toString()); LOG.info("Sending stderr content through logger: \n\n{}\n\n", errContent.toString()); } /** * Utility class to run yarn jobs. */ protected static class Runner extends Thread { private final String[] args; private final org.apache.flink.configuration.Configuration configuration; private final String configurationDirectory; private final int expectedReturnValue; private final PrintStream stdinPrintStream; private RunTypes type; private FlinkYarnSessionCli yCli; private Throwable runnerError; public Runner( String[] args, org.apache.flink.configuration.Configuration configuration, String configurationDirectory, RunTypes type, int expectedReturnValue, PrintStream stdinPrintStream) { this.args = args; this.configuration = Preconditions.checkNotNull(configuration); this.configurationDirectory = Preconditions.checkNotNull(configurationDirectory); this.type = type; this.expectedReturnValue = expectedReturnValue; this.stdinPrintStream = Preconditions.checkNotNull(stdinPrintStream); } @Override public void run() { try { int returnValue; switch (type) { case YARN_SESSION: yCli = new FlinkYarnSessionCli( configuration, configurationDirectory, "", "", true); returnValue = yCli.run(args); break; case CLI_FRONTEND: try { CliFrontend cli = new CliFrontend( configuration, CliFrontend.loadCustomCommandLines(configuration, configurationDirectory)); returnValue = cli.parseParameters(args); } catch (Exception e) { throw new RuntimeException("Failed to execute the following args with CliFrontend: " + Arrays.toString(args), e); } break; default: throw new RuntimeException("Unknown type " + type); } if (returnValue != this.expectedReturnValue) { Assert.fail("The YARN session returned with unexpected value=" + returnValue + " expected=" + expectedReturnValue); } } catch (Throwable t) { LOG.info("Runner stopped with exception", t); this.runnerError = t; } } /** Stops the Yarn session. */ public void sendStop() { stdinPrintStream.println("stop"); } public Throwable getRunnerError() { return runnerError; } } @AfterClass public static void teardown() throws Exception { LOG.info("Stopping MiniYarn Cluster"); yarnCluster.stop(); Map<String, String> map = new HashMap<>(System.getenv()); map.remove(ConfigConstants.ENV_FLINK_CONF_DIR); map.remove("YARN_CONF_DIR"); map.remove("IN_TESTS"); TestBaseUtils.setEnv(map); if (tempConfPathForSecureRun != null) { FileUtil.fullyDelete(tempConfPathForSecureRun); tempConfPathForSecureRun = null; } if (yarnSiteXML != null) { yarnSiteXML.delete(); } if (isOnTravis()) { File target = new File("../target" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!target.mkdirs()) { LOG.warn("Error creating dirs to {}", target); } File src = tmp.getRoot(); LOG.info("copying the final files from {} to {}", src.getAbsolutePath(), target.getAbsolutePath()); try { FileUtils.copyDirectoryToDirectory(src, target); } catch (IOException e) { LOG.warn("Error copying the final files from {} to {}: msg: {}", src.getAbsolutePath(), target.getAbsolutePath(), e.getMessage(), e); } } } public static boolean isOnTravis() { return System.getenv("TRAVIS") != null && System.getenv("TRAVIS").equals("true"); } /** * Wrapper around a {@link Scanner} that buffers the last N lines read. */ private static class BufferingScanner { private final Scanner scanner; private final int numLinesBuffered; private final List<String> bufferedLines; BufferingScanner(Scanner scanner, int numLinesBuffered) { this.scanner = scanner; this.numLinesBuffered = numLinesBuffered; this.bufferedLines = new ArrayList<>(numLinesBuffered); } public boolean hasNextLine() { return scanner.hasNextLine(); } public String nextLine() { if (bufferedLines.size() == numLinesBuffered) { bufferedLines.remove(0); } String line = scanner.nextLine(); bufferedLines.add(line); return line; } public List<String> getPreviousLines() { return new ArrayList<>(bufferedLines); } } }
class YarnTestBase extends TestLogger { private static final Logger LOG = LoggerFactory.getLogger(YarnTestBase.class); protected static final PrintStream ORIGINAL_STDOUT = System.out; protected static final PrintStream ORIGINAL_STDERR = System.err; private static final InputStream ORIGINAL_STDIN = System.in; protected static final String TEST_CLUSTER_NAME_KEY = "flink-yarn-minicluster-name"; protected static final int NUM_NODEMANAGERS = 2; /** The tests are scanning for these strings in the final output. */ protected static final String[] PROHIBITED_STRINGS = { "Exception", "Started [email protected]:8081" }; /** These strings are white-listed, overriding the prohibited strings. */ protected static final String[] WHITELISTED_STRINGS = { "akka.remote.RemoteTransportExceptionNoStackTrace", "java.lang.InterruptedException", "Remote connection to [null] failed with java.net.ConnectException: Connection refused", "Remote connection to [null] failed with java.nio.channels.NotYetConnectedException", "java.io.IOException: Connection reset by peer", "java.util.concurrent.RejectedExecutionException: Worker has already been shutdown", "org.apache.flink.util.FlinkException: Stopping JobMaster", "org.apache.flink.util.FlinkException: JobManager is shutting down.", "lost the leadership." }; @ClassRule public static TemporaryFolder tmp = new TemporaryFolder(); protected static MiniYARNCluster yarnCluster = null; /** * Uberjar (fat jar) file of Flink. */ protected static File flinkUberjar; protected static final YarnConfiguration YARN_CONFIGURATION; /** * lib/ folder of the flink distribution. */ protected static File flinkLibFolder; /** * Temporary folder where Flink configurations will be kept for secure run. */ protected static File tempConfPathForSecureRun = null; protected static File flinkShadedHadoopDir; protected static File yarnSiteXML = null; private YarnClient yarnClient = null; private static org.apache.flink.configuration.Configuration globalConfiguration; protected org.apache.flink.configuration.Configuration flinkConfiguration; static { YARN_CONFIGURATION = new YarnConfiguration(); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 32); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 4096); YARN_CONFIGURATION.setBoolean(YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, true); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, 4); YARN_CONFIGURATION.setInt(YarnConfiguration.DEBUG_NM_DELETE_DELAY_SEC, 3600); YARN_CONFIGURATION.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, false); YARN_CONFIGURATION.setInt(YarnConfiguration.NM_VCORES, 666); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 20000); } public static void populateYarnSecureConfigurations(Configuration conf, String principal, String keytab) { conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); conf.set(YarnConfiguration.RM_KEYTAB, keytab); conf.set(YarnConfiguration.RM_PRINCIPAL, principal); conf.set(YarnConfiguration.NM_KEYTAB, keytab); conf.set(YarnConfiguration.NM_PRINCIPAL, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set("hadoop.security.auth_to_local", "RULE:[1:$1] RULE:[2:$1]"); } @Before public void setupYarnClient() { if (yarnClient == null) { yarnClient = YarnClient.createYarnClient(); yarnClient.init(getYarnConfiguration()); yarnClient.start(); } flinkConfiguration = new org.apache.flink.configuration.Configuration(globalConfiguration); } /** * Sleep a bit between the tests (we are re-using the YARN cluster for the tests). */ @After public void shutdownYarnClient() { yarnClient.stop(); } private class CleanupYarnApplication implements AutoCloseable { @Override public void close() throws Exception { Deadline deadline = Deadline.now().plus(Duration.ofSeconds(10)); boolean isAnyJobRunning = yarnClient.getApplications().stream() .anyMatch(YarnTestBase::isApplicationRunning); while (deadline.hasTimeLeft() && isAnyJobRunning) { try { Thread.sleep(500); } catch (InterruptedException e) { Assert.fail("Should not happen"); } isAnyJobRunning = yarnClient.getApplications().stream() .anyMatch(YarnTestBase::isApplicationRunning); } if (isAnyJobRunning) { final List<String> runningApps = yarnClient.getApplications().stream() .filter(YarnTestBase::isApplicationRunning) .map(app -> "App " + app.getApplicationId() + " is in state " + app.getYarnApplicationState() + '.') .collect(Collectors.toList()); if (!runningApps.isEmpty()) { Assert.fail("There is at least one application on the cluster that is not finished." + runningApps); } } } } private static boolean isApplicationRunning(ApplicationReport app) { final YarnApplicationState yarnApplicationState = app.getYarnApplicationState(); return yarnApplicationState != YarnApplicationState.FINISHED && app.getYarnApplicationState() != YarnApplicationState.KILLED && app.getYarnApplicationState() != YarnApplicationState.FAILED; } @Nullable protected YarnClient getYarnClient() { return yarnClient; } protected static YarnConfiguration getYarnConfiguration() { return YARN_CONFIGURATION; } /** * Locate a file or directory. */ public static File findFile(String startAt, FilenameFilter fnf) { File root = new File(startAt); String[] files = root.list(); if (files == null) { return null; } for (String file : files) { File f = new File(startAt + File.separator + file); if (f.isDirectory()) { File r = findFile(f.getAbsolutePath(), fnf); if (r != null) { return r; } } else if (fnf.accept(f.getParentFile(), f.getName())) { return f; } } return null; } @Nonnull YarnClusterDescriptor createYarnClusterDescriptor(org.apache.flink.configuration.Configuration flinkConfiguration) { final YarnClusterDescriptor yarnClusterDescriptor = new YarnClusterDescriptor( flinkConfiguration, YARN_CONFIGURATION, CliFrontend.getConfigurationDirectoryFromEnv(), yarnClient, true); yarnClusterDescriptor.setLocalJarPath(new Path(flinkUberjar.toURI())); yarnClusterDescriptor.addShipFiles(Collections.singletonList(flinkLibFolder)); return yarnClusterDescriptor; } /** * Filter to find root dir of the flink-yarn dist. */ public static class RootDirFilenameFilter implements FilenameFilter { @Override public boolean accept(File dir, String name) { return name.startsWith("flink-dist") && name.endsWith(".jar") && dir.toString().contains("/lib"); } } /** * A simple {@link FilenameFilter} that only accepts files if their name contains every string in the array passed * to the constructor. */ public static class ContainsName implements FilenameFilter { private String[] names; private String excludeInPath = null; /** * @param names which have to be included in the filename. */ public ContainsName(String[] names) { this.names = names; } public ContainsName(String[] names, String excludeInPath) { this.names = names; this.excludeInPath = excludeInPath; } @Override public boolean accept(File dir, String name) { if (excludeInPath == null) { for (String n: names) { if (!name.contains(n)) { return false; } } return true; } else { for (String n: names) { if (!name.contains(n)) { return false; } } return !dir.toString().contains(excludeInPath); } } } public static void writeYarnSiteConfigXML(Configuration yarnConf, File targetFolder) throws IOException { yarnSiteXML = new File(targetFolder, "/yarn-site.xml"); try (FileWriter writer = new FileWriter(yarnSiteXML)) { yarnConf.writeXml(writer); writer.flush(); } } /** * This method checks the written TaskManager and JobManager log files * for exceptions. * * <p>WARN: Please make sure the tool doesn't find old logfiles from previous test runs. * So always run "mvn clean" before running the tests here. * */ public static void ensureNoProhibitedStringInLogFiles(final String[] prohibited, final String[] whitelisted) { File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to exist", cwd.exists()); Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to be a directory", cwd.isDirectory()); List<String> prohibitedExcerpts = new ArrayList<>(); File foundFile = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { File f = new File(dir.getAbsolutePath() + "/" + name); try { BufferingScanner scanner = new BufferingScanner(new Scanner(f), 10); while (scanner.hasNextLine()) { final String lineFromFile = scanner.nextLine(); for (String aProhibited : prohibited) { if (lineFromFile.contains(aProhibited)) { boolean whitelistedFound = false; for (String white : whitelisted) { if (lineFromFile.contains(white)) { whitelistedFound = true; break; } } if (!whitelistedFound) { Marker fatal = MarkerFactory.getMarker("FATAL"); LOG.error(fatal, "Prohibited String '{}' in line '{}'", aProhibited, lineFromFile); StringBuilder logExcerpt = new StringBuilder(); logExcerpt.append(System.lineSeparator()); for (String previousLine : scanner.getPreviousLines()) { logExcerpt.append(previousLine); logExcerpt.append(System.lineSeparator()); } logExcerpt.append(lineFromFile); logExcerpt.append(System.lineSeparator()); while (scanner.hasNextLine()) { String line = scanner.nextLine(); logExcerpt.append(line); logExcerpt.append(System.lineSeparator()); if (line.isEmpty() || (!Character.isWhitespace(line.charAt(0)) && !line.startsWith("Caused by"))) { for (int x = 0; x < 10 && scanner.hasNextLine(); x++) { logExcerpt.append(scanner.nextLine()); logExcerpt.append(System.lineSeparator()); } break; } } prohibitedExcerpts.add(logExcerpt.toString()); return true; } } } } } catch (FileNotFoundException e) { LOG.warn("Unable to locate file: " + e.getMessage() + " file: " + f.getAbsolutePath()); } return false; } }); if (foundFile != null) { Scanner scanner = null; try { scanner = new Scanner(foundFile); } catch (FileNotFoundException e) { Assert.fail("Unable to locate file: " + e.getMessage() + " file: " + foundFile.getAbsolutePath()); } LOG.warn("Found a file with a prohibited string. Printing contents:"); while (scanner.hasNextLine()) { LOG.warn("LINE: " + scanner.nextLine()); } Assert.fail( "Found a file " + foundFile + " with a prohibited string (one of " + Arrays.toString(prohibited) + "). " + "Excerpts:" + System.lineSeparator() + prohibitedExcerpts); } } public static boolean verifyStringsInNamedLogFiles( final String[] mustHave, final String fileName) { List<String> mustHaveList = Arrays.asList(mustHave); File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!cwd.exists() || !cwd.isDirectory()) { return false; } File foundFile = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (fileName != null && !name.equals(fileName)) { return false; } File f = new File(dir.getAbsolutePath() + "/" + name); LOG.info("Searching in {}", f.getAbsolutePath()); try (Scanner scanner = new Scanner(f)) { Set<String> foundSet = new HashSet<>(mustHave.length); while (scanner.hasNextLine()) { final String lineFromFile = scanner.nextLine(); for (String str : mustHave) { if (lineFromFile.contains(str)) { foundSet.add(str); } } if (foundSet.containsAll(mustHaveList)) { return true; } } } catch (FileNotFoundException e) { LOG.warn("Unable to locate file: " + e.getMessage() + " file: " + f.getAbsolutePath()); } return false; } }); if (foundFile != null) { LOG.info("Found string {} in {}.", Arrays.toString(mustHave), foundFile.getAbsolutePath()); return true; } else { return false; } } public static boolean verifyTokenKindInContainerCredentials(final Collection<String> tokens, final String containerId) throws IOException { File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!cwd.exists() || !cwd.isDirectory()) { return false; } File containerTokens = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.equals(containerId + ".tokens"); } }); if (containerTokens != null) { LOG.info("Verifying tokens in {}", containerTokens.getAbsolutePath()); Credentials tmCredentials = Credentials.readTokenStorageFile(containerTokens, new Configuration()); Collection<Token<? extends TokenIdentifier>> userTokens = tmCredentials.getAllTokens(); Set<String> tokenKinds = new HashSet<>(4); for (Token<? extends TokenIdentifier> token : userTokens) { tokenKinds.add(token.getKind().toString()); } return tokenKinds.containsAll(tokens); } else { LOG.warn("Unable to find credential file for container {}", containerId); return false; } } public static String getContainerIdByLogName(String logName) { File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); File containerLog = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.equals(logName); } }); if (containerLog != null) { return containerLog.getParentFile().getName(); } else { throw new IllegalStateException("No container has log named " + logName); } } public static void sleep(int time) { try { Thread.sleep(time); } catch (InterruptedException e) { LOG.warn("Interruped", e); } } public static int getRunningContainers() { int count = 0; for (int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) { NodeManager nm = yarnCluster.getNodeManager(nmId); ConcurrentMap<ContainerId, Container> containers = nm.getNMContext().getContainers(); count += containers.size(); } return count; } public static void startYARNSecureMode(YarnConfiguration conf, String principal, String keytab) { start(conf, principal, keytab); } public static void startYARNWithConfig(YarnConfiguration conf) { start(conf, null, null); } private static void start(YarnConfiguration conf, String principal, String keytab) { File homeDir = null; try { homeDir = tmp.newFolder(); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } System.setProperty("user.home", homeDir.getAbsolutePath()); String uberjarStartLoc = ".."; LOG.info("Trying to locate uberjar in {}", new File(uberjarStartLoc).getAbsolutePath()); flinkUberjar = findFile(uberjarStartLoc, new RootDirFilenameFilter()); Assert.assertNotNull("Flink uberjar not found", flinkUberjar); String flinkDistRootDir = flinkUberjar.getParentFile().getParent(); flinkLibFolder = flinkUberjar.getParentFile(); flinkShadedHadoopDir = Paths.get("target/shaded-hadoop").toFile(); Assert.assertNotNull("Flink flinkLibFolder not found", flinkLibFolder); Assert.assertTrue("lib folder not found", flinkLibFolder.exists()); Assert.assertTrue("lib folder not found", flinkLibFolder.isDirectory()); if (!flinkUberjar.exists()) { Assert.fail("Unable to locate yarn-uberjar.jar"); } try { LOG.info("Starting up MiniYARNCluster"); if (yarnCluster == null) { final String testName = conf.get(YarnTestBase.TEST_CLUSTER_NAME_KEY); yarnCluster = new MiniYARNCluster( testName == null ? "YarnTest_" + UUID.randomUUID() : testName, NUM_NODEMANAGERS, 1, 1); yarnCluster.init(conf); yarnCluster.start(); } Map<String, String> map = new HashMap<String, String>(System.getenv()); File flinkConfDirPath = findFile(flinkDistRootDir, new ContainsName(new String[]{"flink-conf.yaml"})); Assert.assertNotNull(flinkConfDirPath); final String confDirPath = flinkConfDirPath.getParentFile().getAbsolutePath(); globalConfiguration = GlobalConfiguration.loadConfiguration(confDirPath); tempConfPathForSecureRun = tmp.newFolder("conf"); FileUtils.copyDirectory(new File(confDirPath), tempConfPathForSecureRun); BootstrapTools.writeConfiguration( globalConfiguration, new File(tempConfPathForSecureRun, "flink-conf.yaml")); String configDir = tempConfPathForSecureRun.getAbsolutePath(); LOG.info("Temporary Flink configuration directory to be used for secure test: {}", configDir); Assert.assertNotNull(configDir); map.put(ConfigConstants.ENV_FLINK_CONF_DIR, configDir); File targetTestClassesFolder = new File("target/test-classes"); writeYarnSiteConfigXML(conf, targetTestClassesFolder); map.put("IN_TESTS", "yes we are in tests"); map.put("YARN_CONF_DIR", targetTestClassesFolder.getAbsolutePath()); TestBaseUtils.setEnv(map); Assert.assertTrue(yarnCluster.getServiceState() == Service.STATE.STARTED); while (!yarnCluster.waitForNodeManagersToConnect(500)) { LOG.info("Waiting for Nodemanagers to connect"); } } catch (Exception ex) { ex.printStackTrace(); LOG.error("setup failure", ex); Assert.fail(); } } /** * Default @BeforeClass impl. Overwrite this for passing a different configuration */ @BeforeClass public static void setup() throws Exception { startYARNWithConfig(YARN_CONFIGURATION); } protected static ByteArrayOutputStream outContent; protected static ByteArrayOutputStream errContent; enum RunTypes { YARN_SESSION, CLI_FRONTEND } /** * This method returns once the "startedAfterString" has been seen. */ protected Runner startWithArgs(String[] args, String startedAfterString, RunTypes type) throws IOException { LOG.info("Running with args {}", Arrays.toString(args)); outContent = new ByteArrayOutputStream(); errContent = new ByteArrayOutputStream(); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); PrintStream stdinPrintStream = new PrintStream(out); System.setOut(new PrintStream(outContent)); System.setErr(new PrintStream(errContent)); System.setIn(in); final int startTimeoutSeconds = 60; Runner runner = new Runner( args, flinkConfiguration, CliFrontend.getConfigurationDirectoryFromEnv(), type, 0, stdinPrintStream); runner.setName("Frontend (CLI/YARN Client) runner thread (startWithArgs())."); runner.start(); for (int second = 0; second < startTimeoutSeconds; second++) { sleep(1000); if (outContent.toString().contains(startedAfterString) || errContent.toString().contains(startedAfterString)) { LOG.info("Found expected output in redirected streams"); return runner; } if (!runner.isAlive()) { resetStreamsAndSendOutput(); if (runner.getRunnerError() != null) { throw new RuntimeException("Runner failed with exception.", runner.getRunnerError()); } Assert.fail("Runner thread died before the test was finished."); } } resetStreamsAndSendOutput(); Assert.fail("During the timeout period of " + startTimeoutSeconds + " seconds the " + "expected string did not show up"); return null; } protected void runWithArgs(String[] args, String terminateAfterString, String[] failOnStrings, RunTypes type, int returnCode) throws IOException { runWithArgs(args, terminateAfterString, failOnStrings, type, returnCode, false); } /** * The test has been passed once the "terminateAfterString" has been seen. * @param args Command line arguments for the runner * @param terminateAfterString the runner is searching the stdout and stderr for this string. as soon as it appears, the test has passed * @param failOnPatterns The runner is searching stdout and stderr for the pattern (regexp) specified here. If one appears, the test has failed * @param type Set the type of the runner * @param expectedReturnValue Expected return code from the runner. * @param checkLogForTerminateString If true, the runner checks also the log4j logger for the terminate string */ protected void runWithArgs(String[] args, String terminateAfterString, String[] failOnPatterns, RunTypes type, int expectedReturnValue, boolean checkLogForTerminateString) throws IOException { LOG.info("Running with args {}", Arrays.toString(args)); outContent = new ByteArrayOutputStream(); errContent = new ByteArrayOutputStream(); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); PrintStream stdinPrintStream = new PrintStream(out); System.setOut(new PrintStream(outContent)); System.setErr(new PrintStream(errContent)); System.setIn(in); final int startTimeoutSeconds = 180; final long deadline = System.currentTimeMillis() + (startTimeoutSeconds * 1000); Runner runner = new Runner( args, flinkConfiguration, CliFrontend.getConfigurationDirectoryFromEnv(), type, expectedReturnValue, stdinPrintStream); runner.start(); boolean expectedStringSeen = false; boolean testPassedFromLog4j = false; long shutdownTimeout = 30000L; do { sleep(1000); String outContentString = outContent.toString(); String errContentString = errContent.toString(); if (failOnPatterns != null) { for (String failOnString : failOnPatterns) { Pattern pattern = Pattern.compile(failOnString); if (pattern.matcher(outContentString).find() || pattern.matcher(errContentString).find()) { LOG.warn("Failing test. Output contained illegal string '" + failOnString + "'"); resetStreamsAndSendOutput(); runner.sendStop(); try { runner.join(shutdownTimeout); } catch (InterruptedException e) { LOG.warn("Interrupted while stopping runner", e); } Assert.fail("Output contained illegal string '" + failOnString + "'"); } } } if (checkLogForTerminateString) { LoggingEvent matchedEvent = UtilsTest.getEventContainingString(terminateAfterString); if (matchedEvent != null) { testPassedFromLog4j = true; LOG.info("Found expected output in logging event {}", matchedEvent); } } if (outContentString.contains(terminateAfterString) || errContentString.contains(terminateAfterString) || testPassedFromLog4j) { expectedStringSeen = true; LOG.info("Found expected output in redirected streams"); LOG.info("RunWithArgs: request runner to stop"); runner.sendStop(); try { runner.join(shutdownTimeout); } catch (InterruptedException e) { LOG.warn("Interrupted while stopping runner", e); } LOG.warn("RunWithArgs runner stopped."); } else { if (!runner.isAlive()) { break; } } } while (runner.getRunnerError() == null && !expectedStringSeen && System.currentTimeMillis() < deadline); resetStreamsAndSendOutput(); if (runner.getRunnerError() != null) { throw new RuntimeException("Runner failed", runner.getRunnerError()); } Assert.assertTrue("During the timeout period of " + startTimeoutSeconds + " seconds the " + "expected string \"" + terminateAfterString + "\" did not show up.", expectedStringSeen); LOG.info("Test was successful"); } protected static void resetStreamsAndSendOutput() { System.setOut(ORIGINAL_STDOUT); System.setErr(ORIGINAL_STDERR); System.setIn(ORIGINAL_STDIN); LOG.info("Sending stdout content through logger: \n\n{}\n\n", outContent.toString()); LOG.info("Sending stderr content through logger: \n\n{}\n\n", errContent.toString()); } /** * Utility class to run yarn jobs. */ protected static class Runner extends Thread { private final String[] args; private final org.apache.flink.configuration.Configuration configuration; private final String configurationDirectory; private final int expectedReturnValue; private final PrintStream stdinPrintStream; private RunTypes type; private FlinkYarnSessionCli yCli; private Throwable runnerError; public Runner( String[] args, org.apache.flink.configuration.Configuration configuration, String configurationDirectory, RunTypes type, int expectedReturnValue, PrintStream stdinPrintStream) { this.args = args; this.configuration = Preconditions.checkNotNull(configuration); this.configurationDirectory = Preconditions.checkNotNull(configurationDirectory); this.type = type; this.expectedReturnValue = expectedReturnValue; this.stdinPrintStream = Preconditions.checkNotNull(stdinPrintStream); } @Override public void run() { try { int returnValue; switch (type) { case YARN_SESSION: yCli = new FlinkYarnSessionCli( configuration, configurationDirectory, "", "", true); returnValue = yCli.run(args); break; case CLI_FRONTEND: try { CliFrontend cli = new CliFrontend( configuration, CliFrontend.loadCustomCommandLines(configuration, configurationDirectory)); returnValue = cli.parseParameters(args); } catch (Exception e) { throw new RuntimeException("Failed to execute the following args with CliFrontend: " + Arrays.toString(args), e); } break; default: throw new RuntimeException("Unknown type " + type); } if (returnValue != this.expectedReturnValue) { Assert.fail("The YARN session returned with unexpected value=" + returnValue + " expected=" + expectedReturnValue); } } catch (Throwable t) { LOG.info("Runner stopped with exception", t); this.runnerError = t; } } /** Stops the Yarn session. */ public void sendStop() { stdinPrintStream.println("stop"); } public Throwable getRunnerError() { return runnerError; } } @AfterClass public static void teardown() throws Exception { LOG.info("Stopping MiniYarn Cluster"); yarnCluster.stop(); Map<String, String> map = new HashMap<>(System.getenv()); map.remove(ConfigConstants.ENV_FLINK_CONF_DIR); map.remove("YARN_CONF_DIR"); map.remove("IN_TESTS"); TestBaseUtils.setEnv(map); if (tempConfPathForSecureRun != null) { FileUtil.fullyDelete(tempConfPathForSecureRun); tempConfPathForSecureRun = null; } if (yarnSiteXML != null) { yarnSiteXML.delete(); } if (isOnTravis()) { File target = new File("../target" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!target.mkdirs()) { LOG.warn("Error creating dirs to {}", target); } File src = tmp.getRoot(); LOG.info("copying the final files from {} to {}", src.getAbsolutePath(), target.getAbsolutePath()); try { FileUtils.copyDirectoryToDirectory(src, target); } catch (IOException e) { LOG.warn("Error copying the final files from {} to {}: msg: {}", src.getAbsolutePath(), target.getAbsolutePath(), e.getMessage(), e); } } } public static boolean isOnTravis() { return System.getenv("TRAVIS") != null && System.getenv("TRAVIS").equals("true"); } /** * Wrapper around a {@link Scanner} that buffers the last N lines read. */ private static class BufferingScanner { private final Scanner scanner; private final int numLinesBuffered; private final List<String> bufferedLines; BufferingScanner(Scanner scanner, int numLinesBuffered) { this.scanner = scanner; this.numLinesBuffered = numLinesBuffered; this.bufferedLines = new ArrayList<>(numLinesBuffered); } public boolean hasNextLine() { return scanner.hasNextLine(); } public String nextLine() { if (bufferedLines.size() == numLinesBuffered) { bufferedLines.remove(0); } String line = scanner.nextLine(); bufferedLines.add(line); return line; } public List<String> getPreviousLines() { return new ArrayList<>(bufferedLines); } } }
Added a const value with https://github.com/ballerina-platform/ballerina-lang/pull/14163/commits/b58dcf2630d4aacb53767112899c20e0e564323c.
public void exitArrayTypeNameLabel(BallerinaParser.ArrayTypeNameLabelContext ctx) { if (isInErrorState) { return; } int index = 1; int dimensions = 0; List<Integer> sizes = new ArrayList<>(); List<ParseTree> children = ctx.children; while (index < children.size()) { if (children.get(index).getText().equals("[")) { if (children.get(index + 1).getText().equals("]")) { sizes.add(UNSEALED_ARRAY_INDICATOR); index += 2; } else if (children.get(index + 1).getText().equals("*")) { sizes.add(OPEN_SEALED_ARRAY_INDICATOR); index += 1; } else { sizes.add(Integer.parseInt(children.get(index + 1).getText())); index += 1; } dimensions++; } else { index++; } } Collections.reverse(sizes); this.pkgBuilder.addArrayType( getCurrentPos(ctx), getWS(ctx), dimensions, sizes.stream().mapToInt(val -> val).toArray()); }
} else if (children.get(index + 1).getText().equals("*")) {
public void exitArrayTypeNameLabel(BallerinaParser.ArrayTypeNameLabelContext ctx) { if (isInErrorState) { return; } int index = 1; int dimensions = 0; List<Integer> sizes = new ArrayList<>(); List<ParseTree> children = ctx.children; while (index < children.size()) { if (children.get(index).getText().equals("[")) { if (children.get(index + 1).getText().equals("]")) { sizes.add(UNSEALED_ARRAY_INDICATOR); index += 2; } else if (children.get(index + 1).getText().equals(OPEN_SEALED_ARRAY)) { sizes.add(OPEN_SEALED_ARRAY_INDICATOR); index += 1; } else { sizes.add(Integer.parseInt(children.get(index + 1).getText())); index += 1; } dimensions++; } else { index++; } } Collections.reverse(sizes); this.pkgBuilder.addArrayType( getCurrentPos(ctx), getWS(ctx), dimensions, sizes.stream().mapToInt(val -> val).toArray()); }
class BLangParserListener extends BallerinaParserBaseListener { private static final String KEYWORD_PUBLIC = "public"; private static final String KEYWORD_KEY = "key"; private BLangPackageBuilder pkgBuilder; private BDiagnosticSource diagnosticSrc; private BLangDiagnosticLog dlog; private List<String> pkgNameComps; private String pkgVersion; private boolean isInErrorState = false; private boolean enableExperimentalFeatures; private boolean isSiddhiRuntimeEnabled; BLangParserListener(CompilerContext context, CompilationUnitNode compUnit, BDiagnosticSource diagnosticSource) { this.pkgBuilder = new BLangPackageBuilder(context, compUnit); this.diagnosticSrc = diagnosticSource; this.dlog = BLangDiagnosticLog.getInstance(context); this.enableExperimentalFeatures = Boolean.parseBoolean( CompilerOptions.getInstance(context).get(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED)); this.isSiddhiRuntimeEnabled = Boolean.parseBoolean( CompilerOptions.getInstance(context).get(CompilerOptionName.SIDDHI_RUNTIME_ENABLED)); } @Override public void enterParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitSimpleParameter(BallerinaParser.SimpleParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFormalParameterList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDefaultableParameter(BallerinaParser.DefaultableParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRestParameter(BallerinaParser.RestParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitParameterTypeName(BallerinaParser.ParameterTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), null, false, 0); } @Override public void enterCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} */ @Override public void exitCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { this.pkgBuilder.endCompilationUnit(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPackageName(BallerinaParser.PackageNameContext ctx) { if (isInErrorState) { return; } this.pkgNameComps = new ArrayList<>(); ctx.Identifier().forEach(e -> pkgNameComps.add(e.getText())); this.pkgVersion = ctx.version() != null ? ctx.version().Identifier().getText() : null; } /** * {@inheritDoc} */ @Override public void exitImportDeclaration(BallerinaParser.ImportDeclarationContext ctx) { if (isInErrorState) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; BallerinaParser.OrgNameContext orgNameContext = ctx.orgName(); if (orgNameContext == null) { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), null, this.pkgNameComps, this.pkgVersion, alias); } else { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), orgNameContext.getText(), this.pkgNameComps, this.pkgVersion, alias); } } /** * {@inheritDoc} */ @Override public void exitServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; final DiagnosticPos varPos = ctx.Identifier() != null ? getCurrentPosFromIdentifier(ctx.Identifier()) : serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, false); } /** * {@inheritDoc} */ @Override public void enterServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startServiceDef(getCurrentPos(ctx)); this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder .addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, false, false, false, true); } /** * {@inheritDoc} */ @Override public void enterCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCallableUnitBody(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } int annotCount = ((BallerinaParser.CompilationUnitContext) ctx.parent.parent).annotationAttachment().size(); this.pkgBuilder.startFunctionDef(annotCount); } /** * {@inheritDoc} */ @Override public void exitFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean nativeFunc = ctx.EXTERN() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean privateFunc = ctx.PRIVATE() != null; if (ctx.Identifier() != null) { this.pkgBuilder .endObjectOuterFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, privateFunc, remoteFunc, nativeFunc, bodyExists, ctx.Identifier().getText()); return; } boolean isReceiverAttached = ctx.typeName() != null; this.pkgBuilder.endFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, remoteFunc, nativeFunc, privateFunc, bodyExists, isReceiverAttached, false); } @Override public void enterLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLambdaFunctionDef(diagnosticSrc.pkgID); } @Override public void exitLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLambdaFunctionDef(getCurrentPos(ctx), getWS(ctx), ctx.formalParameterList() != null, ctx.lambdaReturnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } @Override public void enterArrowFunction(BallerinaParser.ArrowFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitArrowFunctionExpression(BallerinaParser.ArrowFunctionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addArrowFunctionDef(getCurrentPos(ctx), getWS(ctx), diagnosticSrc.pkgID); } @Override public void exitArrowParam(BallerinaParser.ArrowParamContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addVarWithoutType(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, 0); } /** * {@inheritDoc} */ @Override public void exitCallableUnitSignature(BallerinaParser.CallableUnitSignatureContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCallableUnitSignature(getCurrentPos(ctx), getWS(ctx), ctx.anyIdentifierName().getText(), getCurrentPos(ctx.anyIdentifierName()), ctx.formalParameterList() != null, ctx.returnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFiniteType(BallerinaParser.FiniteTypeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFiniteType(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTypeDefinition(BallerinaParser.TypeDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicObject = ctx.PUBLIC() != null; this.pkgBuilder.endTypeDefinition(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), publicObject); } /** * {@inheritDoc} */ @Override public void enterObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean isAbstract = ((ObjectTypeNameLabelContext) ctx.parent).ABSTRACT() != null; boolean isClient = ((ObjectTypeNameLabelContext) ctx.parent).CLIENT() != null; this.pkgBuilder.addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, isAbstract, isClient, false); } @Override public void exitTypeReference(BallerinaParser.TypeReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTypeReference(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitFieldDefinition(BallerinaParser.FieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); boolean exprAvailable = ctx.expression() != null; boolean isOptional = ctx.QUESTION_MARK() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, exprAvailable, ctx.annotationAttachment().size(), false, isOptional); } /** * {@inheritDoc} */ @Override public void exitObjectFieldDefinition(BallerinaParser.ObjectFieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); boolean exprAvailable = ctx.expression() != null; boolean deprecatedDocExists = ctx.deprecatedAttachment() != null; int annotationCount = ctx.annotationAttachment().size(); boolean isPrivate = ctx.PRIVATE() != null; boolean isPublic = ctx.PUBLIC() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, exprAvailable, deprecatedDocExists, annotationCount, isPrivate, isPublic); } /** * {@inheritDoc} */ @Override public void enterObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean resourceFunc = ctx.RESOURCE() != null; boolean nativeFunc = ctx.EXTERN() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean markdownDocExists = ctx.documentationString() != null; boolean deprecatedDocExists = ctx.deprecatedAttachment() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, isPrivate, remoteFunc, resourceFunc, nativeFunc, bodyExists, markdownDocExists, deprecatedDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicAnnotation = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); boolean isTypeAttached = ctx.typeName() != null; this.pkgBuilder.endAnnotationDef(getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), publicAnnotation, isTypeAttached); } /** * {@inheritDoc} */ @Override public void exitConstantDefinition(BallerinaParser.ConstantDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isTypeAvailable = ctx.typeName() != null; this.pkgBuilder.addConstant(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), isPublic, isTypeAvailable); } /** * {@inheritDoc} */ @Override public void exitGlobalVariableDefinition(BallerinaParser.GlobalVariableDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; boolean isListenerVar = ctx.LISTENER() != null; this.pkgBuilder.addGlobalVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), isPublic, isFinal, isDeclaredWithVar, isExpressionAvailable, isListenerVar); } @Override public void exitAttachmentPoint(BallerinaParser.AttachmentPointContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAttachPoint(AttachPoint.getAttachmentPoint(ctx.getText()), getWS(ctx)); } @Override public void enterWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWorker(diagnosticSrc.pkgID); } @Override public void exitWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } String workerName = null; if (ctx.workerDefinition() != null) { workerName = ctx.workerDefinition().Identifier().getText(); } boolean retParamsAvail = ctx.workerDefinition().returnParameter() != null; this.pkgBuilder.addWorker(getCurrentPos(ctx), getWS(ctx), workerName, retParamsAvail); } /** * {@inheritDoc} */ @Override public void exitWorkerDefinition(BallerinaParser.WorkerDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachWorkerWS(getWS(ctx)); } @Override @Override public void exitUnionTypeNameLabel(BallerinaParser.UnionTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUnionType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleTypeNameLabel(BallerinaParser.TupleTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleType(getCurrentPos(ctx), getWS(ctx), ctx.typeName().size()); } @Override public void exitNullableTypeNameLabel(BallerinaParser.NullableTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsNullable(getWS(ctx)); } @Override public void exitGroupTypeNameLabel(BallerinaParser.GroupTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsGrouped(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterRecordFieldDefinitionList(BallerinaParser.RecordFieldDefinitionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitRecordFieldDefinitionList(BallerinaParser.RecordFieldDefinitionListContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean hasRestField = ctx.recordRestFieldDefinition() != null; boolean sealed = hasRestField ? ctx.recordRestFieldDefinition().sealedLiteral() != null : false; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, sealed, hasRestField); } @Override public void exitSimpleTypeName(BallerinaParser.SimpleTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.referenceTypeName() != null || ctx.valueTypeName() != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitUserDefineTypeName(BallerinaParser.UserDefineTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUserDefineType(getWS(ctx)); } @Override public void exitValueTypeName(BallerinaParser.ValueTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getText()); } @Override public void exitBuiltInReferenceTypeName(BallerinaParser.BuiltInReferenceTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.functionTypeName() != null) { return; } if (ctx.errorTypeName() != null) { return; } String typeName = ctx.getChild(0).getText(); DiagnosticPos pos = getCurrentPos(ctx); checkTypeValidity(typeName, pos); if (ctx.typeName() != null) { this.pkgBuilder.addConstraintTypeWithTypeName(pos, getWS(ctx), typeName); } else { this.pkgBuilder.addBuiltInReferenceType(pos, getWS(ctx), typeName); } } @Override public void exitErrorTypeName(BallerinaParser.ErrorTypeNameContext ctx) { if (isInErrorState) { return; } boolean isReasonTypeExists = !ctx.typeName().isEmpty(); boolean isDetailsTypeExists = ctx.typeName().size() > 1; this.pkgBuilder.addErrorType(getCurrentPos(ctx), getWS(ctx), isReasonTypeExists, isDetailsTypeExists); } @Override public void exitFunctionTypeName(BallerinaParser.FunctionTypeNameContext ctx) { if (isInErrorState) { return; } boolean paramsAvail = false, paramsTypeOnly = false, retParamAvail = false; if (ctx.parameterList() != null) { paramsAvail = ctx.parameterList().parameter().size() > 0; } else if (ctx.parameterTypeNameList() != null) { paramsAvail = ctx.parameterTypeNameList().parameterTypeName().size() > 0; paramsTypeOnly = true; } if (ctx.returnParameter() != null) { retParamAvail = true; } this.pkgBuilder.addFunctionType(getCurrentPos(ctx), getWS(ctx), paramsAvail, retParamAvail); } /** * {@inheritDoc} */ @Override public void enterAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationAttachment(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.setAnnotationAttachmentName(getWS(ctx), ctx.recordLiteral() != null, getCurrentPos(ctx), false); } @Override public void exitErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } String detailIdentifier = null; if (ctx.Identifier(1) != null) { detailIdentifier = ctx.Identifier(1).getText(); } this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier(0).getText(), detailIdentifier, ctx.recordBindingPattern() != null); } @Override public void exitErrorRefBindingPattern(BallerinaParser.ErrorRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean recordBindingPattern = false; if (ctx.recordRefBindingPattern() != null || ctx.variableReference().size() == 2) { recordBindingPattern = true; } this.pkgBuilder.addErrorVariableReference(getCurrentPos(ctx), getWS(ctx), recordBindingPattern); } @Override public void exitTupleBindingPattern(BallerinaParser.TupleBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleVariable(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().size()); } @Override public void exitTupleRefBindingPattern(BallerinaParser.TupleRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleVariableReference(getCurrentPos(ctx), getWS(ctx), ctx.bindingRefPattern().size()); } @Override public void enterEntryBindingPattern(BallerinaParser.EntryBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableList(); } @Override public void exitEntryBindingPattern(BallerinaParser.EntryBindingPatternContext ctx) { if (isInErrorState) { return; } RestBindingPatternState restBindingPattern = (ctx.restBindingPattern() == null) ? NO_BINDING_PATTERN : ((ctx.restBindingPattern().sealedLiteral() != null) ? CLOSED_REST_BINDING_PATTERN : OPEN_REST_BINDING_PATTERN); this.pkgBuilder.addRecordVariable(getCurrentPos(ctx), getWS(ctx), restBindingPattern); } @Override public void exitRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRecordBindingWS(getWS(ctx)); } @Override public void enterEntryRefBindingPattern(BallerinaParser.EntryRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableReferenceList(); } @Override public void exitEntryRefBindingPattern(BallerinaParser.EntryRefBindingPatternContext ctx) { if (isInErrorState) { return; } RestBindingPatternState restRefBindingPattern = (ctx.restRefBindingPattern() == null) ? NO_BINDING_PATTERN : ((ctx.restRefBindingPattern().sealedLiteral() != null) ? CLOSED_REST_BINDING_PATTERN : OPEN_REST_BINDING_PATTERN); this.pkgBuilder.addRecordVariableReference(getCurrentPos(ctx), getWS(ctx), restRefBindingPattern); } @Override public void exitBindingPattern(BallerinaParser.BindingPatternContext ctx) { if (isInErrorState) { return; } if ((ctx.Identifier() != null) && ((ctx.parent instanceof BallerinaParser.TupleBindingPatternContext) || (ctx.parent instanceof BallerinaParser.FieldBindingPatternContext) || (ctx.parent instanceof BallerinaParser.MatchPatternClauseContext))) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } else if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternNameWhitespace(getWS(ctx)); } } @Override public void exitFieldBindingPattern(BallerinaParser.FieldBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldBindingMemberVar( getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.bindingPattern() != null); } @Override public void exitFieldRefBindingPattern(BallerinaParser.FieldRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldRefBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.bindingRefPattern() != null); } @Override public void exitRestBindingPattern(BallerinaParser.RestBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } } @Override public void exitVariableDefinitionStatement(BallerinaParser.VariableDefinitionStatementContext ctx) { if (isInErrorState) { return; } boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; if (ctx.Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else { this.pkgBuilder.addTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } } @Override public void enterRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMapStructLiteral(); } @Override public void exitRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addMapStructLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordKeyValue(BallerinaParser.RecordKeyValueContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addKeyValueRecord(getWS(ctx)); } @Override public void exitRecordKey(BallerinaParser.RecordKeyContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(pos, getWS(ctx)); } } @Override public void enterTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTableLiteral(); } @Override public void exitTableColumnDefinition(BallerinaParser.TableColumnDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableColumnDefinition(getWS(ctx)); } @Override public void exitTableColumn(BallerinaParser.TableColumnContext ctx) { if (isInErrorState) { return; } String columnName; int childCount = ctx.getChildCount(); if (childCount == 2) { boolean keyColumn = KEYWORD_KEY.equals(ctx.getChild(0).getText()); if (keyColumn) { columnName = ctx.getChild(1).getText(); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); this.pkgBuilder.markPrimaryKeyColumn(columnName); } else { DiagnosticPos pos = getCurrentPos(ctx); dlog.error(pos, DiagnosticCode.TABLE_KEY_EXPECTED); } } else { columnName = ctx.getChild(0).getText(); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); } } @Override public void exitTableDataArray(BallerinaParser.TableDataArrayContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataArray(getWS(ctx)); } @Override public void exitTableDataList(BallerinaParser.TableDataListContext ctx) { if (isInErrorState) { return; } if (ctx.expressionList() != null) { this.pkgBuilder.endTableDataRow(getWS(ctx)); } } @Override public void exitTableData(BallerinaParser.TableDataContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataList(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTableLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitArrayLiteral(BallerinaParser.ArrayLiteralContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.expressionList() != null; this.pkgBuilder.addArrayInitExpr(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitTypeInitExpr(BallerinaParser.TypeInitExprContext ctx) { if (isInErrorState) { return; } String initName = ctx.NEW().getText(); boolean typeAvailable = ctx.userDefineTypeName() != null; boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.addTypeInitExpression(getCurrentPos(ctx), getWS(ctx), initName, typeAvailable, argsAvailable); } @Override public void exitErrorConstructorExpr(BallerinaParser.ErrorConstructorExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addErrorConstructor(getCurrentPos(ctx), getWS(ctx), ctx.COMMA() != null); } @Override public void exitServiceConstructorExpression(BallerinaParser.ServiceConstructorExpressionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = null; final DiagnosticPos varPos = serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, true); } @Override public void exitChannelType(BallerinaParser.ChannelTypeContext ctx) { if (isInErrorState) { return; } String typeName = ctx.getChild(0).getText(); DiagnosticPos pos = getCurrentPos(ctx); checkTypeValidity(typeName, pos); this.pkgBuilder.addConstraintTypeWithTypeName(pos, getWS(ctx), typeName); } /** * {@inheritDoc} */ @Override public void exitAssignmentStatement(BallerinaParser.AssignmentStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAssignmentStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleDestructuringStatement(BallerinaParser.TupleDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordDestructuringStatement(BallerinaParser.RecordDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRecordDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitErrorDestructuringStatement(BallerinaParser.ErrorDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addErrorDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitCompoundAssignmentStatement(BallerinaParser.CompoundAssignmentStatementContext ctx) { if (isInErrorState) { return; } String compoundOperatorText = ctx.compoundOperator().getText(); String operator = compoundOperatorText.substring(0, compoundOperatorText.length() - 1); this.pkgBuilder.addCompoundAssignmentStatement(getCurrentPos(ctx), getWS(ctx), operator); } /** * {@inheritDoc} */ @Override public void exitCompoundOperator(BallerinaParser.CompoundOperatorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addCompoundOperator(getWS(ctx)); } @Override public void enterVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } /** * {@inheritDoc} */ @Override public void enterIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endIfElseNode(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfClause(BallerinaParser.IfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createMatchNode(getCurrentPos(ctx)); } @Override public void exitMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.completeMatchNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMatchStmtPattern(); } @Override public void exitMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } if (ctx.bindingPattern() != null) { boolean isTypeGuardPresent = ctx.IF() != null; this.pkgBuilder.addMatchStmtStructuredBindingPattern(getCurrentPos(ctx), getWS(ctx), isTypeGuardPresent); return; } this.pkgBuilder.addMatchStmtStaticBindingPattern(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForeachStatement(); } @Override public void exitForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addForeachStatementWithSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else { this.pkgBuilder.addForeachStatementWithTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } } @Override public void exitIntRangeExpression(BallerinaParser.IntRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIntRangeExpression(getCurrentPos(ctx), getWS(ctx), ctx.LEFT_PARENTHESIS() == null, ctx.RIGHT_PARENTHESIS() == null, ctx.expression(1) == null); } /** * {@inheritDoc} */ @Override public void enterWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWhileStmt(); } /** * {@inheritDoc} */ @Override public void exitWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWhileStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitContinueStatement(BallerinaParser.ContinueStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addContinueStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitBreakStatement(BallerinaParser.BreakStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addBreakStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForkJoinStmt(); } @Override public void exitForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addForkJoinStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTryCatchFinallyStmt(); } @Override public void exitTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryCatchFinallyStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterCatchClauses(BallerinaParser.CatchClausesContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryClause(getCurrentPos(ctx)); } @Override public void enterCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCatchClause(); } @Override public void exitCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } String paramName = ctx.Identifier().getText(); this.pkgBuilder.addCatchClause(getCurrentPos(ctx), getWS(ctx), paramName); } @Override public void enterFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startFinallyBlock(); } @Override public void exitFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFinallyBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitThrowStatement(BallerinaParser.ThrowStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addThrowStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitPanicStatement(BallerinaParser.PanicStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addPanicStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitReturnStatement(BallerinaParser.ReturnStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnStatement(this.getCurrentPos(ctx), getWS(ctx), ctx.expression() != null); } @Override public void exitWorkerReceiveExpression(BallerinaParser.WorkerReceiveExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWorkerReceiveExpr(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx .expression() != null); } @Override public void exitFlushWorker(BallerinaParser.FlushWorkerContext ctx) { if (isInErrorState) { return; } String workerName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addWorkerFlushExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWorkerSendAsyncStatement(BallerinaParser.WorkerSendAsyncStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.expression().size() > 1); } @Override public void exitWorkerSendSyncExpression(BallerinaParser.WorkerSendSyncExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWorkerSendSyncExpr(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } @Override public void exitWaitExpression(BallerinaParser.WaitExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.waitForCollection() != null) { this.pkgBuilder.handleWaitForAll(getCurrentPos(ctx), getWS(ctx)); } else { this.pkgBuilder.handleWait(getCurrentPos(ctx), getWS(ctx)); } } @Override public void enterWaitForCollection(BallerinaParser.WaitForCollectionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWaitForAll(); } @Override public void exitWaitKeyValue(BallerinaParser.WaitKeyValueContext ctx) { if (isInErrorState) { return; } boolean containsExpr = ctx.expression() != null; this.pkgBuilder.addKeyValueToWaitForAll(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), containsExpr); } /** * {@inheritDoc} */ @Override public void exitXmlAttribVariableReference(BallerinaParser.XmlAttribVariableReferenceContext ctx) { if (isInErrorState) { return; } boolean isSingleAttrRef = ctx.xmlAttrib().expression() != null; this.pkgBuilder.createXmlAttributesRefExpr(getCurrentPos(ctx), getWS(ctx), isSingleAttrRef); } @Override public void exitSimpleVariableReference(BallerinaParser.SimpleVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitStringFunctionInvocationReference(BallerinaParser.StringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); boolean argsAvailable = ctx.invocation().invocationArgList() != null; String invocation = ctx.invocation().anyIdentifierName().getText(); boolean safeNavigate = ctx.invocation().NOT() != null; this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, safeNavigate); } @Override public void exitFunctionInvocation(BallerinaParser.FunctionInvocationContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.createFunctionInvocation(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitFieldVariableReference(BallerinaParser.FieldVariableReferenceContext ctx) { if (isInErrorState) { return; } FieldContext field = ctx.field(); String fieldName; FieldKind fieldType; if (field.Identifier() != null) { fieldName = field.Identifier().getText(); fieldType = FieldKind.SINGLE; } else { fieldName = field.MUL().getText(); fieldType = FieldKind.ALL; } this.pkgBuilder.createFieldBasedAccessNode(getCurrentPos(ctx), getWS(ctx), fieldName, fieldType, ctx.field().NOT() != null); } @Override public void exitMapArrayVariableReference(BallerinaParser.MapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitReservedWord(BallerinaParser.ReservedWordContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startInvocationNode(getWS(ctx)); } @Override public void exitAnyIdentifierName(BallerinaParser.AnyIdentifierNameContext ctx) { if (isInErrorState) { return; } if (ctx.reservedWord() == null) { this.pkgBuilder.startInvocationNode(getWS(ctx)); } } @Override public void exitInvocationReference(BallerinaParser.InvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; String invocation = ctx.invocation().anyIdentifierName().getText(); boolean safeNavigate = ctx.invocation().NOT() != null; this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, safeNavigate); } @Override public void exitTypeDescExprInvocationReference(BallerinaParser.TypeDescExprInvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; String invocation = ctx.invocation().anyIdentifierName().getText(); boolean safeNavigate = ctx.invocation().NOT() != null; this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, safeNavigate); } /** * {@inheritDoc} */ @Override public void enterInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } /** * {@inheritDoc} */ @Override public void exitInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } public void enterExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void exitExpressionStmt(BallerinaParser.ExpressionStmtContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addExpressionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTransactionStmt(); } /** * {@inheritDoc} */ @Override public void exitTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); checkExperimentalFeatureValidity(ExperimentalFeatures.TRANSACTIONS.value, pos); this.pkgBuilder.endTransactionStmt(pos, getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionClause(BallerinaParser.TransactionClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTransactionBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionPropertyInitStatementList( BallerinaParser.TransactionPropertyInitStatementListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTransactionPropertyInitStatementList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLockStmt(); } /** * {@inheritDoc} */ @Override public void exitLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLockStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOnretryBlock(); } /** * {@inheritDoc} */ @Override public void exitOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addOnretryBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCommittedBlock(); } /** * {@inheritDoc} */ @Override public void exitCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCommittedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAbortedBlock(); } /** * {@inheritDoc} */ @Override public void exitAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endAbortedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitAbortStatement(BallerinaParser.AbortStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAbortStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetryStatement(BallerinaParser.RetryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetriesStatement(BallerinaParser.RetriesStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryCountExpression(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { } @Override public void exitNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { if (isInErrorState) { return; } boolean isTopLevel = ctx.parent instanceof BallerinaParser.CompilationUnitContext; String namespaceUri = ctx.QuotedStringLiteral().getText(); namespaceUri = namespaceUri.substring(1, namespaceUri.length() - 1); namespaceUri = StringEscapeUtils.unescapeJava(namespaceUri); String prefix = (ctx.Identifier() != null) ? ctx.Identifier().getText() : null; this.pkgBuilder.addXMLNSDeclaration(getCurrentPos(ctx), getWS(ctx), namespaceUri, prefix, isTopLevel); } @Override public void exitBinaryDivMulModExpression(BallerinaParser.BinaryDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryOrExpression(BallerinaParser.BinaryOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryRefEqualExpression(BallerinaParser.BinaryRefEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryEqualExpression(BallerinaParser.BinaryEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchOrExpression(BallerinaParser.StaticMatchOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchIdentifierLiteral(BallerinaParser.StaticMatchIdentifierLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTypeDescExpr(BallerinaParser.TypeDescExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeAccessExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitActionInvocation(BallerinaParser.ActionInvocationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createActionInvocationNode(getCurrentPos(ctx), getWS(ctx), ctx.START() != null); } @Override public void exitBinaryAndExpression(BallerinaParser.BinaryAndExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryAddSubExpression(BallerinaParser.BinaryAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseExpression(BallerinaParser.BitwiseExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseShiftExpression(BallerinaParser.BitwiseShiftExpressionContext ctx) { if (isInErrorState) { return; } StringBuilder operator = new StringBuilder(); for (int i = 1; i < ctx.getChildCount() - 1; i++) { operator.append(ctx.getChild(i).getText()); } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), operator.toString()); } /** * {@inheritDoc} */ @Override public void exitTypeConversionExpression(BallerinaParser.TypeConversionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeConversionExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitBinaryCompareExpression(BallerinaParser.BinaryCompareExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitIntegerRangeExpression(BallerinaParser.IntegerRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitUnaryExpression(BallerinaParser.UnaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createUnaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitTypeTestExpression(BallerinaParser.TypeTestExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeTestExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleLiteral(BallerinaParser.TupleLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBracedOrTupleExpression(getCurrentPos(ctx), getWS(ctx), ctx.expression().size()); } /** * {@inheritDoc} */ @Override public void exitTernaryExpression(BallerinaParser.TernaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTernaryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckedExpression(BallerinaParser.CheckedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitNameReference(BallerinaParser.NameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier().size() == 2) { String pkgName = ctx.Identifier(0).getText(); String name = ctx.Identifier(1).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), pkgName, name); } else { String name = ctx.Identifier(0).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } @Override public void exitFunctionNameReference(BallerinaParser.FunctionNameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { String pkgName = ctx.Identifier().getText(); String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), pkgName, name); } else { String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } /** * {@inheritDoc} */ @Override public void exitReturnParameter(BallerinaParser.ReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void exitLambdaReturnParameter(BallerinaParser.LambdaReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void enterParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitSimpleLiteral(BallerinaParser.SimpleLiteralContext ctx) { if (isInErrorState) { return; } TerminalNode node; DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); Object value; BallerinaParser.IntegerLiteralContext integerLiteralContext = ctx.integerLiteral(); if (integerLiteralContext != null && (value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.INT, value, ctx.getText()); } else if (ctx.floatingPointLiteral() != null) { if ((node = ctx.floatingPointLiteral().DecimalFloatingPointNumber()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, getNodeValue(ctx, node), node.getText()); } else if ((node = ctx.floatingPointLiteral().HexadecimalFloatingPointLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, getHexNodeValue(ctx, node), node.getText()); } } else if ((node = ctx.BooleanLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BOOLEAN, Boolean.parseBoolean(node.getText()), node.getText()); } else if ((node = ctx.QuotedStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length() - 1); text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } else if (ctx.NullLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "null"); } else if (ctx.emptyTupleLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "()"); } else if (ctx.blobLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BYTE_ARRAY, ctx.blobLiteral().getText()); } else if ((node = ctx.SymbolicStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length()); text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } } /** * {@inheritDoc} */ @Override public void exitNamedArgs(BallerinaParser.NamedArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } /** * {@inheritDoc} */ @Override public void exitRestArgs(BallerinaParser.RestArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestArgument(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitXmlLiteral(BallerinaParser.XmlLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachXmlLiteralWS(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitComment(BallerinaParser.CommentContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLCommentTemplateText()); String endingString = getTemplateEndingStr(ctx.XMLCommentText()); endingString = endingString.substring(0, endingString.length() - 3); this.pkgBuilder.createXMLCommentLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElement(BallerinaParser.ElementContext ctx) { if (isInErrorState) { return; } if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitStartTag(BallerinaParser.StartTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitCloseTag(BallerinaParser.CloseTagContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endXMLElement(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitEmptyTag(BallerinaParser.EmptyTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitProcIns(BallerinaParser.ProcInsContext ctx) { if (isInErrorState) { return; } String targetQName = ctx.XML_TAG_SPECIAL_OPEN().getText(); targetQName = targetQName.substring(2, targetQName.length() - 1); Stack<String> textFragments = getTemplateTextFragments(ctx.XMLPITemplateText()); String endingText = getTemplateEndingStr(ctx.XMLPIText()); endingText = endingText.substring(0, endingText.length() - 2); this.pkgBuilder.createXMLPILiteral(getCurrentPos(ctx), getWS(ctx), targetQName, textFragments, endingText); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitAttribute(BallerinaParser.AttributeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createXMLAttribute(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitText(BallerinaParser.TextContext ctx) { if (isInErrorState) { return; } Stack<String> textFragments = getTemplateTextFragments(ctx.XMLTemplateText()); String endingText = getTemplateEndingStr(ctx.XMLText()); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addXMLTextToElement(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } else { this.pkgBuilder.createXMLTextLiteral(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } } /** * {@inheritDoc} */ @Override public void exitXmlSingleQuotedString(BallerinaParser.XmlSingleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLSingleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLSingleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.SINGLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlDoubleQuotedString(BallerinaParser.XmlDoubleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLDoubleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLDoubleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.DOUBLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlQualifiedName(BallerinaParser.XmlQualifiedNameContext ctx) { if (isInErrorState) { return; } if (ctx.expression() != null) { return; } List<TerminalNode> qnames = ctx.XMLQName(); String prefix = null; String localname; if (qnames.size() > 1) { prefix = qnames.get(0).getText(); localname = qnames.get(1).getText(); } else { localname = qnames.get(0).getText(); } this.pkgBuilder.createXMLQName(getCurrentPos(ctx), getWS(ctx), localname, prefix); } /** * {@inheritDoc} */ @Override public void exitStringTemplateLiteral(BallerinaParser.StringTemplateLiteralContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments; String endingText = null; StringTemplateContentContext contentContext = ctx.stringTemplateContent(); if (contentContext != null) { stringFragments = getTemplateTextFragments(contentContext.StringTemplateExpressionStart()); endingText = getTemplateEndingStr(contentContext.StringTemplateText()); } else { stringFragments = new Stack<>(); } this.pkgBuilder.createStringTemplateLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingText); } /** * {@inheritDoc} */ @Override public void exitTableQueryExpression(BallerinaParser.TableQueryExpressionContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); checkExperimentalFeatureValidity(ExperimentalFeatures.TABLE_QUERIES.value, pos); this.pkgBuilder.addTableQueryExpression(pos, getWS(ctx)); } @Override public void enterOrderByClause(BallerinaParser.OrderByClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOrderByClauseNode(getCurrentPos(ctx)); } @Override public void exitOrderByClause(BallerinaParser.OrderByClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endOrderByClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterLimitClause(BallerinaParser.LimitClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLimitClauseNode(getCurrentPos(ctx)); } @Override public void exitLimitClause(BallerinaParser.LimitClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endLimitClauseNode(getCurrentPos(ctx), getWS(ctx), ctx.DecimalIntegerLiteral().getText()); } @Override public void enterOrderByVariable(BallerinaParser.OrderByVariableContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOrderByVariableNode(getCurrentPos(ctx)); } @Override public void exitOrderByVariable(BallerinaParser.OrderByVariableContext ctx) { if (isInErrorState) { return; } boolean isAscending = ctx.orderByType() != null && ctx.orderByType().ASCENDING() != null; boolean isDescending = ctx.orderByType() != null && ctx.orderByType().DESCENDING() != null; this.pkgBuilder.endOrderByVariableNode(getCurrentPos(ctx), getWS(ctx), isAscending, isDescending); } @Override public void enterGroupByClause(BallerinaParser.GroupByClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startGroupByClauseNode(getCurrentPos(ctx)); } @Override public void exitGroupByClause(BallerinaParser.GroupByClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endGroupByClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterHavingClause(BallerinaParser.HavingClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startHavingClauseNode(getCurrentPos(ctx)); } @Override public void exitHavingClause(BallerinaParser.HavingClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endHavingClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectExpression(BallerinaParser.SelectExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startSelectExpressionNode(getCurrentPos(ctx)); } @Override public void exitSelectExpression(BallerinaParser.SelectExpressionContext ctx) { if (isInErrorState) { return; } String identifier = ctx.Identifier() == null ? null : ctx.Identifier().getText(); this.pkgBuilder.endSelectExpressionNode(identifier, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectClause(BallerinaParser.SelectClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startSelectClauseNode(getCurrentPos(ctx)); } @Override public void exitSelectClause(BallerinaParser.SelectClauseContext ctx) { if (isInErrorState) { return; } boolean isSelectAll = ctx.MUL() != null; boolean isGroupByClauseAvailable = ctx.groupByClause() != null; boolean isHavingClauseAvailable = ctx.havingClause() != null; this.pkgBuilder.endSelectClauseNode(isSelectAll, isGroupByClauseAvailable, isHavingClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectExpressionList(BallerinaParser.SelectExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startSelectExpressionList(); } @Override public void exitSelectExpressionList(BallerinaParser.SelectExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endSelectExpressionList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void enterWhereClause(BallerinaParser.WhereClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWhereClauseNode(getCurrentPos(ctx)); } @Override public void exitWhereClause(BallerinaParser.WhereClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endWhereClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterStreamingAction(BallerinaParser.StreamingActionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startStreamActionNode(getCurrentPos(ctx), diagnosticSrc.pkgID); } @Override public void exitStreamingAction(BallerinaParser.StreamingActionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endStreamActionNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterPatternStreamingEdgeInput(BallerinaParser.PatternStreamingEdgeInputContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startPatternStreamingEdgeInputNode(getCurrentPos(ctx)); } @Override public void exitPatternStreamingEdgeInput(BallerinaParser.PatternStreamingEdgeInputContext ctx) { if (isInErrorState) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.endPatternStreamingEdgeInputNode(getCurrentPos(ctx), getWS(ctx), alias); } @Override public void enterWindowClause(BallerinaParser.WindowClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWindowClauseNode(getCurrentPos(ctx)); } @Override public void exitWindowClause(BallerinaParser.WindowClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endWindowsClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterWithinClause(BallerinaParser.WithinClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWithinClause(getCurrentPos(ctx)); } @Override public void exitWithinClause(BallerinaParser.WithinClauseContext ctx) { if (isInErrorState) { return; } String timeScale = null; String timeDurationValue = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); timeDurationValue = ctx.DecimalIntegerLiteral().getText(); } this.pkgBuilder.endWithinClause(getCurrentPos(ctx), getWS(ctx), timeDurationValue, timeScale); } @Override public void enterPatternClause(BallerinaParser.PatternClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startPatternClause(getCurrentPos(ctx)); } @Override public void exitPatternClause(BallerinaParser.PatternClauseContext ctx) { if (isInErrorState) { return; } boolean isForAllEvents = ctx.EVERY() != null; boolean isWithinClauseAvailable = ctx.withinClause() != null; this.pkgBuilder.endPatternClause(isForAllEvents, isWithinClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterPatternStreamingInput(BallerinaParser.PatternStreamingInputContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startPatternStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitPatternStreamingInput(BallerinaParser.PatternStreamingInputContext ctx) { if (isInErrorState) { return; } boolean followedByAvailable = ctx.FOLLOWED() != null && ctx.BY() != null; boolean enclosedInParenthesis = ctx.LEFT_PARENTHESIS() != null && ctx.RIGHT_PARENTHESIS() != null; boolean andWithNotAvailable = ctx.NOT() != null && ctx.AND() != null; boolean forWithNotAvailable = ctx.timeScale() != null; boolean onlyAndAvailable = ctx.AND() != null && ctx.NOT() == null && ctx.FOR() == null; boolean onlyOrAvailable = ctx.OR() != null && ctx.NOT() == null && ctx.FOR() == null; boolean commaSeparated = ctx.COMMA() != null; String timeScale = null; String timeDurationValue = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); timeDurationValue = ctx.DecimalIntegerLiteral().getText(); } this.pkgBuilder.endPatternStreamingInputNode(getCurrentPos(ctx), getWS(ctx), followedByAvailable, enclosedInParenthesis, andWithNotAvailable, forWithNotAvailable, onlyAndAvailable, onlyOrAvailable, commaSeparated, timeDurationValue, timeScale); } @Override public void enterStreamingInput(BallerinaParser.StreamingInputContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitStreamingInput(BallerinaParser.StreamingInputContext ctx) { if (isInErrorState) { return; } String alias = null; if (ctx.alias != null) { alias = ctx.alias.getText(); } this.pkgBuilder.endStreamingInputNode(alias, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterJoinStreamingInput(BallerinaParser.JoinStreamingInputContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startJoinStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitJoinStreamingInput(BallerinaParser.JoinStreamingInputContext ctx) { if (isInErrorState) { return; } boolean unidirectionalJoin = ctx.UNIDIRECTIONAL() != null; if (!unidirectionalJoin) { String joinType = (ctx).children.get(0).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), false, false, joinType); } else { if (ctx.getChild(0).getText().equals("unidirectional")) { String joinType = (ctx).children.get(1).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), true, false, joinType); } else { String joinType = (ctx).children.get(0).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), false, true, joinType); } } } /** * {@inheritDoc} */ @Override public void exitJoinType(BallerinaParser.JoinTypeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endJoinType(getWS(ctx)); } @Override public void enterOutputRateLimit(BallerinaParser.OutputRateLimitContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOutputRateLimitNode(getCurrentPos(ctx)); } @Override public void exitOutputRateLimit(BallerinaParser.OutputRateLimitContext ctx) { if (isInErrorState) { return; } boolean isSnapshotOutputRateLimit = false; boolean isFirst = false; boolean isLast = false; boolean isAll = false; if (ctx.SNAPSHOT() != null) { isSnapshotOutputRateLimit = true; } else { if (ctx.LAST() != null) { isLast = true; } else if (ctx.FIRST() != null) { isFirst = true; } else if (ctx.LAST() != null) { isAll = true; } } String timeScale = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); } this.pkgBuilder.endOutputRateLimitNode(getCurrentPos(ctx), getWS(ctx), isSnapshotOutputRateLimit, isFirst, isLast, isAll, timeScale, ctx.DecimalIntegerLiteral().getText()); } @Override public void enterTableQuery(BallerinaParser.TableQueryContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTableQueryNode(getCurrentPos(ctx)); } @Override public void exitTableQuery(BallerinaParser.TableQueryContext ctx) { if (isInErrorState) { return; } boolean isSelectClauseAvailable = ctx.selectClause() != null; boolean isOrderByClauseAvailable = ctx.orderByClause() != null; boolean isJoinClauseAvailable = ctx.joinStreamingInput() != null; boolean isLimitClauseAvailable = ctx.limitClause() != null; this.pkgBuilder.endTableQueryNode(isJoinClauseAvailable, isSelectClauseAvailable, isOrderByClauseAvailable, isLimitClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterStreamingQueryStatement(BallerinaParser.StreamingQueryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startStreamingQueryStatementNode(getCurrentPos(ctx)); } @Override public void exitStreamingQueryStatement(BallerinaParser.StreamingQueryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endStreamingQueryStatementNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeverStatement(BallerinaParser.ForeverStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForeverNode(getCurrentPos(ctx), isSiddhiRuntimeEnabled); } @Override public void exitForeverStatement(BallerinaParser.ForeverStatementContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); checkExperimentalFeatureValidity(ExperimentalFeatures.STREAMING_QUERIES.value, pos); this.pkgBuilder.endForeverNode(pos, getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMarkdownDocumentationString(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkdownDocumentationString(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationLine(BallerinaParser.DocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkDownDocumentLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationContent(BallerinaParser.DocumentationContentContext ctx) { if (isInErrorState) { return; } String text = ctx.getText() != null ? ctx.getText() : ""; this.pkgBuilder.endMarkdownDocumentationText(getCurrentPos(ctx), getWS(ctx), text); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentationLine(BallerinaParser.ParameterDocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endParameterDocumentationLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentation(BallerinaParser.ParameterDocumentationContext ctx) { if (isInErrorState) { return; } String parameterName = ctx.docParameterName() != null ? ctx.docParameterName().getText() : ""; String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentation(getCurrentPos(ctx.docParameterName()), getWS(ctx), parameterName, description); } /** * {@inheritDoc} */ @Override public void exitParameterDescriptionLine(BallerinaParser.ParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDocumentation(BallerinaParser.ReturnParameterDocumentationContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDescriptionLine(BallerinaParser.ReturnParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitDeprecatedAttachment(BallerinaParser.DeprecatedAttachmentContext ctx) { if (isInErrorState) { return; } String contentText = ctx.deprecatedText() != null ? ctx.deprecatedText().getText() : ""; this.pkgBuilder.createDeprecatedNode(getCurrentPos(ctx), getWS(ctx), contentText); } @Override public void exitTrapExpression(BallerinaParser.TrapExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTrapExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitVariableReferenceExpression(BallerinaParser.VariableReferenceExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.START() != null) { this.pkgBuilder.markLastInvocationAsAsync(getCurrentPos(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElvisExpression(BallerinaParser.ElvisExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createElvisExpr(getCurrentPos(ctx), getWS(ctx)); } private DiagnosticPos getCurrentPos(ParserRuleContext ctx) { int startLine = ctx.getStart().getLine(); int startCol = ctx.getStart().getCharPositionInLine() + 1; int endLine = -1; int endCol = -1; Token stop = ctx.getStop(); if (stop != null) { endLine = stop.getLine(); endCol = stop.getCharPositionInLine() + (stop.getStopIndex() - stop.getStartIndex() + 1) + 1; } return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } private DiagnosticPos getCurrentPosFromIdentifier(TerminalNode node) { Token symbol = node.getSymbol(); int startLine = symbol.getLine(); int startCol = symbol.getCharPositionInLine() + 1; int endLine = startLine; int endCol = startCol + symbol.getText().length(); return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } protected Set<Whitespace> getWS(ParserRuleContext ctx) { return null; } private Stack<String> getTemplateTextFragments(List<TerminalNode> nodes) { Stack<String> templateStrFragments = new Stack<>(); nodes.forEach(node -> { if (node == null) { templateStrFragments.push(null); } else { String str = node.getText(); templateStrFragments.push(str.substring(0, str.length() - 2)); } }); return templateStrFragments; } private String getTemplateEndingStr(TerminalNode node) { return node == null ? null : node.getText(); } private String getNodeValue(ParserRuleContext ctx, TerminalNode node) { String op = ctx.getChild(0).getText(); String value = node.getText(); if (op != null && "-".equals(op)) { value = "-" + value; } return value; } private String getHexNodeValue(ParserRuleContext ctx, TerminalNode node) { String value = getNodeValue(ctx, node); if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private Object getIntegerLiteral(ParserRuleContext simpleLiteralContext, BallerinaParser.IntegerLiteralContext integerLiteralContext) { if (integerLiteralContext.DecimalIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.DecimalIntegerLiteral()); return parseLong(simpleLiteralContext, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL, DiagnosticCode.INTEGER_TOO_LARGE); } else if (integerLiteralContext.HexIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.HexIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 16, DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE); } return null; } private Object parseLong(ParserRuleContext context, String originalNodeValue, String processedNodeValue, int radix, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { DiagnosticPos pos = getCurrentPos(context); Set<Whitespace> ws = getWS(context); if (originalNodeValue.startsWith("-")) { dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } private void checkTypeValidity(String typeName, DiagnosticPos pos) { if (enableExperimentalFeatures) { return; } if (ExperimentalFeatures.STREAMS.value.equals(typeName) || ExperimentalFeatures.CHANNEL.value.equals(typeName)) { dlog.error(pos, DiagnosticCode.INVALID_USE_OF_EXPERIMENTAL_FEATURE, typeName); } } private void checkExperimentalFeatureValidity(String constructName, DiagnosticPos pos) { if (enableExperimentalFeatures) { return; } dlog.error(pos, DiagnosticCode.INVALID_USE_OF_EXPERIMENTAL_FEATURE, constructName); } private enum ExperimentalFeatures { STREAMS("stream"), CHANNEL("channel"), TABLE_QUERIES("table queries"), STREAMING_QUERIES("streaming queries"), TRANSACTIONS("transaction"), CHECKPOINTING("checkpoint"); private String value; private ExperimentalFeatures(String value) { this.value = value; } @Override public String toString() { return value; } } /** * Mark that this listener is in error state. */ public void setErrorState() { this.isInErrorState = true; } /** * Mark that this listener is not in an error state. */ public void unsetErrorState() { this.isInErrorState = false; } }
class BLangParserListener extends BallerinaParserBaseListener { private static final String KEYWORD_PUBLIC = "public"; private static final String KEYWORD_KEY = "key"; private BLangPackageBuilder pkgBuilder; private BDiagnosticSource diagnosticSrc; private BLangDiagnosticLog dlog; private List<String> pkgNameComps; private String pkgVersion; private boolean isInErrorState = false; private boolean enableExperimentalFeatures; private boolean isSiddhiRuntimeEnabled; BLangParserListener(CompilerContext context, CompilationUnitNode compUnit, BDiagnosticSource diagnosticSource) { this.pkgBuilder = new BLangPackageBuilder(context, compUnit); this.diagnosticSrc = diagnosticSource; this.dlog = BLangDiagnosticLog.getInstance(context); this.enableExperimentalFeatures = Boolean.parseBoolean( CompilerOptions.getInstance(context).get(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED)); this.isSiddhiRuntimeEnabled = Boolean.parseBoolean( CompilerOptions.getInstance(context).get(CompilerOptionName.SIDDHI_RUNTIME_ENABLED)); } @Override public void enterParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitSimpleParameter(BallerinaParser.SimpleParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFormalParameterList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDefaultableParameter(BallerinaParser.DefaultableParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRestParameter(BallerinaParser.RestParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitParameterTypeName(BallerinaParser.ParameterTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), null, false, 0); } @Override public void enterCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} */ @Override public void exitCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { this.pkgBuilder.endCompilationUnit(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPackageName(BallerinaParser.PackageNameContext ctx) { if (isInErrorState) { return; } this.pkgNameComps = new ArrayList<>(); ctx.Identifier().forEach(e -> pkgNameComps.add(e.getText())); this.pkgVersion = ctx.version() != null ? ctx.version().Identifier().getText() : null; } /** * {@inheritDoc} */ @Override public void exitImportDeclaration(BallerinaParser.ImportDeclarationContext ctx) { if (isInErrorState) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; BallerinaParser.OrgNameContext orgNameContext = ctx.orgName(); if (orgNameContext == null) { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), null, this.pkgNameComps, this.pkgVersion, alias); } else { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), orgNameContext.getText(), this.pkgNameComps, this.pkgVersion, alias); } } /** * {@inheritDoc} */ @Override public void exitServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; final DiagnosticPos varPos = ctx.Identifier() != null ? getCurrentPosFromIdentifier(ctx.Identifier()) : serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, false); } /** * {@inheritDoc} */ @Override public void enterServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startServiceDef(getCurrentPos(ctx)); this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder .addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, false, false, false, true); } /** * {@inheritDoc} */ @Override public void enterCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCallableUnitBody(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } int annotCount = ((BallerinaParser.CompilationUnitContext) ctx.parent.parent).annotationAttachment().size(); this.pkgBuilder.startFunctionDef(annotCount); } /** * {@inheritDoc} */ @Override public void exitFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean nativeFunc = ctx.EXTERN() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean privateFunc = ctx.PRIVATE() != null; if (ctx.Identifier() != null) { this.pkgBuilder .endObjectOuterFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, privateFunc, remoteFunc, nativeFunc, bodyExists, ctx.Identifier().getText()); return; } boolean isReceiverAttached = ctx.typeName() != null; this.pkgBuilder.endFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, remoteFunc, nativeFunc, privateFunc, bodyExists, isReceiverAttached, false); } @Override public void enterLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLambdaFunctionDef(diagnosticSrc.pkgID); } @Override public void exitLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLambdaFunctionDef(getCurrentPos(ctx), getWS(ctx), ctx.formalParameterList() != null, ctx.lambdaReturnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } @Override public void enterArrowFunction(BallerinaParser.ArrowFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitArrowFunctionExpression(BallerinaParser.ArrowFunctionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addArrowFunctionDef(getCurrentPos(ctx), getWS(ctx), diagnosticSrc.pkgID); } @Override public void exitArrowParam(BallerinaParser.ArrowParamContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addVarWithoutType(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, 0); } /** * {@inheritDoc} */ @Override public void exitCallableUnitSignature(BallerinaParser.CallableUnitSignatureContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCallableUnitSignature(getCurrentPos(ctx), getWS(ctx), ctx.anyIdentifierName().getText(), getCurrentPos(ctx.anyIdentifierName()), ctx.formalParameterList() != null, ctx.returnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFiniteType(BallerinaParser.FiniteTypeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFiniteType(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTypeDefinition(BallerinaParser.TypeDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicObject = ctx.PUBLIC() != null; this.pkgBuilder.endTypeDefinition(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), publicObject); } /** * {@inheritDoc} */ @Override public void enterObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean isAbstract = ((ObjectTypeNameLabelContext) ctx.parent).ABSTRACT() != null; boolean isClient = ((ObjectTypeNameLabelContext) ctx.parent).CLIENT() != null; this.pkgBuilder.addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, isAbstract, isClient, false); } @Override public void exitTypeReference(BallerinaParser.TypeReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTypeReference(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitFieldDefinition(BallerinaParser.FieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); boolean exprAvailable = ctx.expression() != null; boolean isOptional = ctx.QUESTION_MARK() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, exprAvailable, ctx.annotationAttachment().size(), false, isOptional); } /** * {@inheritDoc} */ @Override public void exitObjectFieldDefinition(BallerinaParser.ObjectFieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); boolean exprAvailable = ctx.expression() != null; boolean deprecatedDocExists = ctx.deprecatedAttachment() != null; int annotationCount = ctx.annotationAttachment().size(); boolean isPrivate = ctx.PRIVATE() != null; boolean isPublic = ctx.PUBLIC() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, exprAvailable, deprecatedDocExists, annotationCount, isPrivate, isPublic); } /** * {@inheritDoc} */ @Override public void enterObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean resourceFunc = ctx.RESOURCE() != null; boolean nativeFunc = ctx.EXTERN() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean markdownDocExists = ctx.documentationString() != null; boolean deprecatedDocExists = ctx.deprecatedAttachment() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, isPrivate, remoteFunc, resourceFunc, nativeFunc, bodyExists, markdownDocExists, deprecatedDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicAnnotation = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); boolean isTypeAttached = ctx.typeName() != null; this.pkgBuilder.endAnnotationDef(getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), publicAnnotation, isTypeAttached); } /** * {@inheritDoc} */ @Override public void exitConstantDefinition(BallerinaParser.ConstantDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isTypeAvailable = ctx.typeName() != null; this.pkgBuilder.addConstant(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), isPublic, isTypeAvailable); } /** * {@inheritDoc} */ @Override public void exitGlobalVariableDefinition(BallerinaParser.GlobalVariableDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; boolean isListenerVar = ctx.LISTENER() != null; this.pkgBuilder.addGlobalVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), isPublic, isFinal, isDeclaredWithVar, isExpressionAvailable, isListenerVar); } @Override public void exitAttachmentPoint(BallerinaParser.AttachmentPointContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAttachPoint(AttachPoint.getAttachmentPoint(ctx.getText()), getWS(ctx)); } @Override public void enterWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWorker(diagnosticSrc.pkgID); } @Override public void exitWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } String workerName = null; if (ctx.workerDefinition() != null) { workerName = ctx.workerDefinition().Identifier().getText(); } boolean retParamsAvail = ctx.workerDefinition().returnParameter() != null; this.pkgBuilder.addWorker(getCurrentPos(ctx), getWS(ctx), workerName, retParamsAvail); } /** * {@inheritDoc} */ @Override public void exitWorkerDefinition(BallerinaParser.WorkerDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachWorkerWS(getWS(ctx)); } @Override @Override public void exitUnionTypeNameLabel(BallerinaParser.UnionTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUnionType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleTypeNameLabel(BallerinaParser.TupleTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleType(getCurrentPos(ctx), getWS(ctx), ctx.typeName().size()); } @Override public void exitNullableTypeNameLabel(BallerinaParser.NullableTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsNullable(getWS(ctx)); } @Override public void exitGroupTypeNameLabel(BallerinaParser.GroupTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsGrouped(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterRecordFieldDefinitionList(BallerinaParser.RecordFieldDefinitionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitRecordFieldDefinitionList(BallerinaParser.RecordFieldDefinitionListContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean hasRestField = ctx.recordRestFieldDefinition() != null; boolean sealed = hasRestField ? ctx.recordRestFieldDefinition().sealedLiteral() != null : false; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, sealed, hasRestField); } @Override public void exitSimpleTypeName(BallerinaParser.SimpleTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.referenceTypeName() != null || ctx.valueTypeName() != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitUserDefineTypeName(BallerinaParser.UserDefineTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUserDefineType(getWS(ctx)); } @Override public void exitValueTypeName(BallerinaParser.ValueTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getText()); } @Override public void exitBuiltInReferenceTypeName(BallerinaParser.BuiltInReferenceTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.functionTypeName() != null) { return; } if (ctx.errorTypeName() != null) { return; } String typeName = ctx.getChild(0).getText(); DiagnosticPos pos = getCurrentPos(ctx); checkTypeValidity(typeName, pos); if (ctx.typeName() != null) { this.pkgBuilder.addConstraintTypeWithTypeName(pos, getWS(ctx), typeName); } else { this.pkgBuilder.addBuiltInReferenceType(pos, getWS(ctx), typeName); } } @Override public void exitErrorTypeName(BallerinaParser.ErrorTypeNameContext ctx) { if (isInErrorState) { return; } boolean isReasonTypeExists = !ctx.typeName().isEmpty(); boolean isDetailsTypeExists = ctx.typeName().size() > 1; this.pkgBuilder.addErrorType(getCurrentPos(ctx), getWS(ctx), isReasonTypeExists, isDetailsTypeExists); } @Override public void exitFunctionTypeName(BallerinaParser.FunctionTypeNameContext ctx) { if (isInErrorState) { return; } boolean paramsAvail = false, paramsTypeOnly = false, retParamAvail = false; if (ctx.parameterList() != null) { paramsAvail = ctx.parameterList().parameter().size() > 0; } else if (ctx.parameterTypeNameList() != null) { paramsAvail = ctx.parameterTypeNameList().parameterTypeName().size() > 0; paramsTypeOnly = true; } if (ctx.returnParameter() != null) { retParamAvail = true; } this.pkgBuilder.addFunctionType(getCurrentPos(ctx), getWS(ctx), paramsAvail, retParamAvail); } /** * {@inheritDoc} */ @Override public void enterAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationAttachment(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.setAnnotationAttachmentName(getWS(ctx), ctx.recordLiteral() != null, getCurrentPos(ctx), false); } @Override public void exitErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } String detailIdentifier = null; if (ctx.Identifier(1) != null) { detailIdentifier = ctx.Identifier(1).getText(); } this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier(0).getText(), detailIdentifier, ctx.recordBindingPattern() != null); } @Override public void exitErrorRefBindingPattern(BallerinaParser.ErrorRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean recordBindingPattern = false; if (ctx.recordRefBindingPattern() != null || ctx.variableReference().size() == 2) { recordBindingPattern = true; } this.pkgBuilder.addErrorVariableReference(getCurrentPos(ctx), getWS(ctx), recordBindingPattern); } @Override public void exitTupleBindingPattern(BallerinaParser.TupleBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleVariable(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().size()); } @Override public void exitTupleRefBindingPattern(BallerinaParser.TupleRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleVariableReference(getCurrentPos(ctx), getWS(ctx), ctx.bindingRefPattern().size()); } @Override public void enterEntryBindingPattern(BallerinaParser.EntryBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableList(); } @Override public void exitEntryBindingPattern(BallerinaParser.EntryBindingPatternContext ctx) { if (isInErrorState) { return; } RestBindingPatternState restBindingPattern = (ctx.restBindingPattern() == null) ? NO_BINDING_PATTERN : ((ctx.restBindingPattern().sealedLiteral() != null) ? CLOSED_REST_BINDING_PATTERN : OPEN_REST_BINDING_PATTERN); this.pkgBuilder.addRecordVariable(getCurrentPos(ctx), getWS(ctx), restBindingPattern); } @Override public void exitRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRecordBindingWS(getWS(ctx)); } @Override public void enterEntryRefBindingPattern(BallerinaParser.EntryRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableReferenceList(); } @Override public void exitEntryRefBindingPattern(BallerinaParser.EntryRefBindingPatternContext ctx) { if (isInErrorState) { return; } RestBindingPatternState restRefBindingPattern = (ctx.restRefBindingPattern() == null) ? NO_BINDING_PATTERN : ((ctx.restRefBindingPattern().sealedLiteral() != null) ? CLOSED_REST_BINDING_PATTERN : OPEN_REST_BINDING_PATTERN); this.pkgBuilder.addRecordVariableReference(getCurrentPos(ctx), getWS(ctx), restRefBindingPattern); } @Override public void exitBindingPattern(BallerinaParser.BindingPatternContext ctx) { if (isInErrorState) { return; } if ((ctx.Identifier() != null) && ((ctx.parent instanceof BallerinaParser.TupleBindingPatternContext) || (ctx.parent instanceof BallerinaParser.FieldBindingPatternContext) || (ctx.parent instanceof BallerinaParser.MatchPatternClauseContext))) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } else if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternNameWhitespace(getWS(ctx)); } } @Override public void exitFieldBindingPattern(BallerinaParser.FieldBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldBindingMemberVar( getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.bindingPattern() != null); } @Override public void exitFieldRefBindingPattern(BallerinaParser.FieldRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldRefBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.bindingRefPattern() != null); } @Override public void exitRestBindingPattern(BallerinaParser.RestBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } } @Override public void exitVariableDefinitionStatement(BallerinaParser.VariableDefinitionStatementContext ctx) { if (isInErrorState) { return; } boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; if (ctx.Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else { this.pkgBuilder.addTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } } @Override public void enterRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMapStructLiteral(); } @Override public void exitRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addMapStructLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordKeyValue(BallerinaParser.RecordKeyValueContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addKeyValueRecord(getWS(ctx)); } @Override public void exitRecordKey(BallerinaParser.RecordKeyContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(pos, getWS(ctx)); } } @Override public void enterTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTableLiteral(); } @Override public void exitTableColumnDefinition(BallerinaParser.TableColumnDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableColumnDefinition(getWS(ctx)); } @Override public void exitTableColumn(BallerinaParser.TableColumnContext ctx) { if (isInErrorState) { return; } String columnName; int childCount = ctx.getChildCount(); if (childCount == 2) { boolean keyColumn = KEYWORD_KEY.equals(ctx.getChild(0).getText()); if (keyColumn) { columnName = ctx.getChild(1).getText(); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); this.pkgBuilder.markPrimaryKeyColumn(columnName); } else { DiagnosticPos pos = getCurrentPos(ctx); dlog.error(pos, DiagnosticCode.TABLE_KEY_EXPECTED); } } else { columnName = ctx.getChild(0).getText(); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); } } @Override public void exitTableDataArray(BallerinaParser.TableDataArrayContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataArray(getWS(ctx)); } @Override public void exitTableDataList(BallerinaParser.TableDataListContext ctx) { if (isInErrorState) { return; } if (ctx.expressionList() != null) { this.pkgBuilder.endTableDataRow(getWS(ctx)); } } @Override public void exitTableData(BallerinaParser.TableDataContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataList(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTableLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitArrayLiteral(BallerinaParser.ArrayLiteralContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.expressionList() != null; this.pkgBuilder.addArrayInitExpr(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitTypeInitExpr(BallerinaParser.TypeInitExprContext ctx) { if (isInErrorState) { return; } String initName = ctx.NEW().getText(); boolean typeAvailable = ctx.userDefineTypeName() != null; boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.addTypeInitExpression(getCurrentPos(ctx), getWS(ctx), initName, typeAvailable, argsAvailable); } @Override public void exitErrorConstructorExpr(BallerinaParser.ErrorConstructorExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addErrorConstructor(getCurrentPos(ctx), getWS(ctx), ctx.COMMA() != null); } @Override public void exitServiceConstructorExpression(BallerinaParser.ServiceConstructorExpressionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = null; final DiagnosticPos varPos = serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, true); } @Override public void exitChannelType(BallerinaParser.ChannelTypeContext ctx) { if (isInErrorState) { return; } String typeName = ctx.getChild(0).getText(); DiagnosticPos pos = getCurrentPos(ctx); checkTypeValidity(typeName, pos); this.pkgBuilder.addConstraintTypeWithTypeName(pos, getWS(ctx), typeName); } /** * {@inheritDoc} */ @Override public void exitAssignmentStatement(BallerinaParser.AssignmentStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAssignmentStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleDestructuringStatement(BallerinaParser.TupleDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordDestructuringStatement(BallerinaParser.RecordDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRecordDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitErrorDestructuringStatement(BallerinaParser.ErrorDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addErrorDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitCompoundAssignmentStatement(BallerinaParser.CompoundAssignmentStatementContext ctx) { if (isInErrorState) { return; } String compoundOperatorText = ctx.compoundOperator().getText(); String operator = compoundOperatorText.substring(0, compoundOperatorText.length() - 1); this.pkgBuilder.addCompoundAssignmentStatement(getCurrentPos(ctx), getWS(ctx), operator); } /** * {@inheritDoc} */ @Override public void exitCompoundOperator(BallerinaParser.CompoundOperatorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addCompoundOperator(getWS(ctx)); } @Override public void enterVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } /** * {@inheritDoc} */ @Override public void enterIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endIfElseNode(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfClause(BallerinaParser.IfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createMatchNode(getCurrentPos(ctx)); } @Override public void exitMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.completeMatchNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMatchStmtPattern(); } @Override public void exitMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } if (ctx.bindingPattern() != null) { boolean isTypeGuardPresent = ctx.IF() != null; this.pkgBuilder.addMatchStmtStructuredBindingPattern(getCurrentPos(ctx), getWS(ctx), isTypeGuardPresent); return; } this.pkgBuilder.addMatchStmtStaticBindingPattern(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForeachStatement(); } @Override public void exitForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addForeachStatementWithSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else { this.pkgBuilder.addForeachStatementWithTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } } @Override public void exitIntRangeExpression(BallerinaParser.IntRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIntRangeExpression(getCurrentPos(ctx), getWS(ctx), ctx.LEFT_PARENTHESIS() == null, ctx.RIGHT_PARENTHESIS() == null, ctx.expression(1) == null); } /** * {@inheritDoc} */ @Override public void enterWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWhileStmt(); } /** * {@inheritDoc} */ @Override public void exitWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWhileStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitContinueStatement(BallerinaParser.ContinueStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addContinueStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitBreakStatement(BallerinaParser.BreakStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addBreakStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForkJoinStmt(); } @Override public void exitForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addForkJoinStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTryCatchFinallyStmt(); } @Override public void exitTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryCatchFinallyStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterCatchClauses(BallerinaParser.CatchClausesContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryClause(getCurrentPos(ctx)); } @Override public void enterCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCatchClause(); } @Override public void exitCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } String paramName = ctx.Identifier().getText(); this.pkgBuilder.addCatchClause(getCurrentPos(ctx), getWS(ctx), paramName); } @Override public void enterFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startFinallyBlock(); } @Override public void exitFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFinallyBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitThrowStatement(BallerinaParser.ThrowStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addThrowStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitPanicStatement(BallerinaParser.PanicStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addPanicStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitReturnStatement(BallerinaParser.ReturnStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnStatement(this.getCurrentPos(ctx), getWS(ctx), ctx.expression() != null); } @Override public void exitWorkerReceiveExpression(BallerinaParser.WorkerReceiveExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWorkerReceiveExpr(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx .expression() != null); } @Override public void exitFlushWorker(BallerinaParser.FlushWorkerContext ctx) { if (isInErrorState) { return; } String workerName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addWorkerFlushExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWorkerSendAsyncStatement(BallerinaParser.WorkerSendAsyncStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.expression().size() > 1); } @Override public void exitWorkerSendSyncExpression(BallerinaParser.WorkerSendSyncExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWorkerSendSyncExpr(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } @Override public void exitWaitExpression(BallerinaParser.WaitExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.waitForCollection() != null) { this.pkgBuilder.handleWaitForAll(getCurrentPos(ctx), getWS(ctx)); } else { this.pkgBuilder.handleWait(getCurrentPos(ctx), getWS(ctx)); } } @Override public void enterWaitForCollection(BallerinaParser.WaitForCollectionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWaitForAll(); } @Override public void exitWaitKeyValue(BallerinaParser.WaitKeyValueContext ctx) { if (isInErrorState) { return; } boolean containsExpr = ctx.expression() != null; this.pkgBuilder.addKeyValueToWaitForAll(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), containsExpr); } /** * {@inheritDoc} */ @Override public void exitXmlAttribVariableReference(BallerinaParser.XmlAttribVariableReferenceContext ctx) { if (isInErrorState) { return; } boolean isSingleAttrRef = ctx.xmlAttrib().expression() != null; this.pkgBuilder.createXmlAttributesRefExpr(getCurrentPos(ctx), getWS(ctx), isSingleAttrRef); } @Override public void exitSimpleVariableReference(BallerinaParser.SimpleVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitStringFunctionInvocationReference(BallerinaParser.StringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); boolean argsAvailable = ctx.invocation().invocationArgList() != null; String invocation = ctx.invocation().anyIdentifierName().getText(); boolean safeNavigate = ctx.invocation().NOT() != null; this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, safeNavigate); } @Override public void exitFunctionInvocation(BallerinaParser.FunctionInvocationContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.createFunctionInvocation(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitFieldVariableReference(BallerinaParser.FieldVariableReferenceContext ctx) { if (isInErrorState) { return; } FieldContext field = ctx.field(); String fieldName; FieldKind fieldType; if (field.Identifier() != null) { fieldName = field.Identifier().getText(); fieldType = FieldKind.SINGLE; } else { fieldName = field.MUL().getText(); fieldType = FieldKind.ALL; } this.pkgBuilder.createFieldBasedAccessNode(getCurrentPos(ctx), getWS(ctx), fieldName, fieldType, ctx.field().NOT() != null); } @Override public void exitMapArrayVariableReference(BallerinaParser.MapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitReservedWord(BallerinaParser.ReservedWordContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startInvocationNode(getWS(ctx)); } @Override public void exitAnyIdentifierName(BallerinaParser.AnyIdentifierNameContext ctx) { if (isInErrorState) { return; } if (ctx.reservedWord() == null) { this.pkgBuilder.startInvocationNode(getWS(ctx)); } } @Override public void exitInvocationReference(BallerinaParser.InvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; String invocation = ctx.invocation().anyIdentifierName().getText(); boolean safeNavigate = ctx.invocation().NOT() != null; this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, safeNavigate); } @Override public void exitTypeDescExprInvocationReference(BallerinaParser.TypeDescExprInvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; String invocation = ctx.invocation().anyIdentifierName().getText(); boolean safeNavigate = ctx.invocation().NOT() != null; this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, safeNavigate); } /** * {@inheritDoc} */ @Override public void enterInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } /** * {@inheritDoc} */ @Override public void exitInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } public void enterExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void exitExpressionStmt(BallerinaParser.ExpressionStmtContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addExpressionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTransactionStmt(); } /** * {@inheritDoc} */ @Override public void exitTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); checkExperimentalFeatureValidity(ExperimentalFeatures.TRANSACTIONS.value, pos); this.pkgBuilder.endTransactionStmt(pos, getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionClause(BallerinaParser.TransactionClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTransactionBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionPropertyInitStatementList( BallerinaParser.TransactionPropertyInitStatementListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTransactionPropertyInitStatementList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLockStmt(); } /** * {@inheritDoc} */ @Override public void exitLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLockStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOnretryBlock(); } /** * {@inheritDoc} */ @Override public void exitOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addOnretryBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCommittedBlock(); } /** * {@inheritDoc} */ @Override public void exitCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCommittedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAbortedBlock(); } /** * {@inheritDoc} */ @Override public void exitAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endAbortedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitAbortStatement(BallerinaParser.AbortStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAbortStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetryStatement(BallerinaParser.RetryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetriesStatement(BallerinaParser.RetriesStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryCountExpression(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { } @Override public void exitNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { if (isInErrorState) { return; } boolean isTopLevel = ctx.parent instanceof BallerinaParser.CompilationUnitContext; String namespaceUri = ctx.QuotedStringLiteral().getText(); namespaceUri = namespaceUri.substring(1, namespaceUri.length() - 1); namespaceUri = StringEscapeUtils.unescapeJava(namespaceUri); String prefix = (ctx.Identifier() != null) ? ctx.Identifier().getText() : null; this.pkgBuilder.addXMLNSDeclaration(getCurrentPos(ctx), getWS(ctx), namespaceUri, prefix, isTopLevel); } @Override public void exitBinaryDivMulModExpression(BallerinaParser.BinaryDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryOrExpression(BallerinaParser.BinaryOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryRefEqualExpression(BallerinaParser.BinaryRefEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryEqualExpression(BallerinaParser.BinaryEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchOrExpression(BallerinaParser.StaticMatchOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchIdentifierLiteral(BallerinaParser.StaticMatchIdentifierLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTypeDescExpr(BallerinaParser.TypeDescExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeAccessExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitActionInvocation(BallerinaParser.ActionInvocationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createActionInvocationNode(getCurrentPos(ctx), getWS(ctx), ctx.START() != null); } @Override public void exitBinaryAndExpression(BallerinaParser.BinaryAndExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryAddSubExpression(BallerinaParser.BinaryAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseExpression(BallerinaParser.BitwiseExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseShiftExpression(BallerinaParser.BitwiseShiftExpressionContext ctx) { if (isInErrorState) { return; } StringBuilder operator = new StringBuilder(); for (int i = 1; i < ctx.getChildCount() - 1; i++) { operator.append(ctx.getChild(i).getText()); } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), operator.toString()); } /** * {@inheritDoc} */ @Override public void exitTypeConversionExpression(BallerinaParser.TypeConversionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeConversionExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitBinaryCompareExpression(BallerinaParser.BinaryCompareExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitIntegerRangeExpression(BallerinaParser.IntegerRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitUnaryExpression(BallerinaParser.UnaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createUnaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitTypeTestExpression(BallerinaParser.TypeTestExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeTestExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleLiteral(BallerinaParser.TupleLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBracedOrTupleExpression(getCurrentPos(ctx), getWS(ctx), ctx.expression().size()); } /** * {@inheritDoc} */ @Override public void exitTernaryExpression(BallerinaParser.TernaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTernaryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckedExpression(BallerinaParser.CheckedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitNameReference(BallerinaParser.NameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier().size() == 2) { String pkgName = ctx.Identifier(0).getText(); String name = ctx.Identifier(1).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), pkgName, name); } else { String name = ctx.Identifier(0).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } @Override public void exitFunctionNameReference(BallerinaParser.FunctionNameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { String pkgName = ctx.Identifier().getText(); String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), pkgName, name); } else { String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } /** * {@inheritDoc} */ @Override public void exitReturnParameter(BallerinaParser.ReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void exitLambdaReturnParameter(BallerinaParser.LambdaReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void enterParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitSimpleLiteral(BallerinaParser.SimpleLiteralContext ctx) { if (isInErrorState) { return; } TerminalNode node; DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); Object value; BallerinaParser.IntegerLiteralContext integerLiteralContext = ctx.integerLiteral(); if (integerLiteralContext != null && (value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.INT, value, ctx.getText()); } else if (ctx.floatingPointLiteral() != null) { if ((node = ctx.floatingPointLiteral().DecimalFloatingPointNumber()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, getNodeValue(ctx, node), node.getText()); } else if ((node = ctx.floatingPointLiteral().HexadecimalFloatingPointLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, getHexNodeValue(ctx, node), node.getText()); } } else if ((node = ctx.BooleanLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BOOLEAN, Boolean.parseBoolean(node.getText()), node.getText()); } else if ((node = ctx.QuotedStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length() - 1); text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } else if (ctx.NullLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "null"); } else if (ctx.emptyTupleLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "()"); } else if (ctx.blobLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BYTE_ARRAY, ctx.blobLiteral().getText()); } else if ((node = ctx.SymbolicStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length()); text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } } /** * {@inheritDoc} */ @Override public void exitNamedArgs(BallerinaParser.NamedArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } /** * {@inheritDoc} */ @Override public void exitRestArgs(BallerinaParser.RestArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestArgument(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitXmlLiteral(BallerinaParser.XmlLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachXmlLiteralWS(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitComment(BallerinaParser.CommentContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLCommentTemplateText()); String endingString = getTemplateEndingStr(ctx.XMLCommentText()); endingString = endingString.substring(0, endingString.length() - 3); this.pkgBuilder.createXMLCommentLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElement(BallerinaParser.ElementContext ctx) { if (isInErrorState) { return; } if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitStartTag(BallerinaParser.StartTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitCloseTag(BallerinaParser.CloseTagContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endXMLElement(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitEmptyTag(BallerinaParser.EmptyTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitProcIns(BallerinaParser.ProcInsContext ctx) { if (isInErrorState) { return; } String targetQName = ctx.XML_TAG_SPECIAL_OPEN().getText(); targetQName = targetQName.substring(2, targetQName.length() - 1); Stack<String> textFragments = getTemplateTextFragments(ctx.XMLPITemplateText()); String endingText = getTemplateEndingStr(ctx.XMLPIText()); endingText = endingText.substring(0, endingText.length() - 2); this.pkgBuilder.createXMLPILiteral(getCurrentPos(ctx), getWS(ctx), targetQName, textFragments, endingText); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitAttribute(BallerinaParser.AttributeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createXMLAttribute(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitText(BallerinaParser.TextContext ctx) { if (isInErrorState) { return; } Stack<String> textFragments = getTemplateTextFragments(ctx.XMLTemplateText()); String endingText = getTemplateEndingStr(ctx.XMLText()); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addXMLTextToElement(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } else { this.pkgBuilder.createXMLTextLiteral(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } } /** * {@inheritDoc} */ @Override public void exitXmlSingleQuotedString(BallerinaParser.XmlSingleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLSingleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLSingleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.SINGLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlDoubleQuotedString(BallerinaParser.XmlDoubleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLDoubleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLDoubleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.DOUBLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlQualifiedName(BallerinaParser.XmlQualifiedNameContext ctx) { if (isInErrorState) { return; } if (ctx.expression() != null) { return; } List<TerminalNode> qnames = ctx.XMLQName(); String prefix = null; String localname; if (qnames.size() > 1) { prefix = qnames.get(0).getText(); localname = qnames.get(1).getText(); } else { localname = qnames.get(0).getText(); } this.pkgBuilder.createXMLQName(getCurrentPos(ctx), getWS(ctx), localname, prefix); } /** * {@inheritDoc} */ @Override public void exitStringTemplateLiteral(BallerinaParser.StringTemplateLiteralContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments; String endingText = null; StringTemplateContentContext contentContext = ctx.stringTemplateContent(); if (contentContext != null) { stringFragments = getTemplateTextFragments(contentContext.StringTemplateExpressionStart()); endingText = getTemplateEndingStr(contentContext.StringTemplateText()); } else { stringFragments = new Stack<>(); } this.pkgBuilder.createStringTemplateLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingText); } /** * {@inheritDoc} */ @Override public void exitTableQueryExpression(BallerinaParser.TableQueryExpressionContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); checkExperimentalFeatureValidity(ExperimentalFeatures.TABLE_QUERIES.value, pos); this.pkgBuilder.addTableQueryExpression(pos, getWS(ctx)); } @Override public void enterOrderByClause(BallerinaParser.OrderByClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOrderByClauseNode(getCurrentPos(ctx)); } @Override public void exitOrderByClause(BallerinaParser.OrderByClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endOrderByClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterLimitClause(BallerinaParser.LimitClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLimitClauseNode(getCurrentPos(ctx)); } @Override public void exitLimitClause(BallerinaParser.LimitClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endLimitClauseNode(getCurrentPos(ctx), getWS(ctx), ctx.DecimalIntegerLiteral().getText()); } @Override public void enterOrderByVariable(BallerinaParser.OrderByVariableContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOrderByVariableNode(getCurrentPos(ctx)); } @Override public void exitOrderByVariable(BallerinaParser.OrderByVariableContext ctx) { if (isInErrorState) { return; } boolean isAscending = ctx.orderByType() != null && ctx.orderByType().ASCENDING() != null; boolean isDescending = ctx.orderByType() != null && ctx.orderByType().DESCENDING() != null; this.pkgBuilder.endOrderByVariableNode(getCurrentPos(ctx), getWS(ctx), isAscending, isDescending); } @Override public void enterGroupByClause(BallerinaParser.GroupByClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startGroupByClauseNode(getCurrentPos(ctx)); } @Override public void exitGroupByClause(BallerinaParser.GroupByClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endGroupByClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterHavingClause(BallerinaParser.HavingClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startHavingClauseNode(getCurrentPos(ctx)); } @Override public void exitHavingClause(BallerinaParser.HavingClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endHavingClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectExpression(BallerinaParser.SelectExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startSelectExpressionNode(getCurrentPos(ctx)); } @Override public void exitSelectExpression(BallerinaParser.SelectExpressionContext ctx) { if (isInErrorState) { return; } String identifier = ctx.Identifier() == null ? null : ctx.Identifier().getText(); this.pkgBuilder.endSelectExpressionNode(identifier, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectClause(BallerinaParser.SelectClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startSelectClauseNode(getCurrentPos(ctx)); } @Override public void exitSelectClause(BallerinaParser.SelectClauseContext ctx) { if (isInErrorState) { return; } boolean isSelectAll = ctx.MUL() != null; boolean isGroupByClauseAvailable = ctx.groupByClause() != null; boolean isHavingClauseAvailable = ctx.havingClause() != null; this.pkgBuilder.endSelectClauseNode(isSelectAll, isGroupByClauseAvailable, isHavingClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectExpressionList(BallerinaParser.SelectExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startSelectExpressionList(); } @Override public void exitSelectExpressionList(BallerinaParser.SelectExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endSelectExpressionList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void enterWhereClause(BallerinaParser.WhereClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWhereClauseNode(getCurrentPos(ctx)); } @Override public void exitWhereClause(BallerinaParser.WhereClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endWhereClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterStreamingAction(BallerinaParser.StreamingActionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startStreamActionNode(getCurrentPos(ctx), diagnosticSrc.pkgID); } @Override public void exitStreamingAction(BallerinaParser.StreamingActionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endStreamActionNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterPatternStreamingEdgeInput(BallerinaParser.PatternStreamingEdgeInputContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startPatternStreamingEdgeInputNode(getCurrentPos(ctx)); } @Override public void exitPatternStreamingEdgeInput(BallerinaParser.PatternStreamingEdgeInputContext ctx) { if (isInErrorState) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.endPatternStreamingEdgeInputNode(getCurrentPos(ctx), getWS(ctx), alias); } @Override public void enterWindowClause(BallerinaParser.WindowClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWindowClauseNode(getCurrentPos(ctx)); } @Override public void exitWindowClause(BallerinaParser.WindowClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endWindowsClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterWithinClause(BallerinaParser.WithinClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWithinClause(getCurrentPos(ctx)); } @Override public void exitWithinClause(BallerinaParser.WithinClauseContext ctx) { if (isInErrorState) { return; } String timeScale = null; String timeDurationValue = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); timeDurationValue = ctx.DecimalIntegerLiteral().getText(); } this.pkgBuilder.endWithinClause(getCurrentPos(ctx), getWS(ctx), timeDurationValue, timeScale); } @Override public void enterPatternClause(BallerinaParser.PatternClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startPatternClause(getCurrentPos(ctx)); } @Override public void exitPatternClause(BallerinaParser.PatternClauseContext ctx) { if (isInErrorState) { return; } boolean isForAllEvents = ctx.EVERY() != null; boolean isWithinClauseAvailable = ctx.withinClause() != null; this.pkgBuilder.endPatternClause(isForAllEvents, isWithinClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterPatternStreamingInput(BallerinaParser.PatternStreamingInputContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startPatternStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitPatternStreamingInput(BallerinaParser.PatternStreamingInputContext ctx) { if (isInErrorState) { return; } boolean followedByAvailable = ctx.FOLLOWED() != null && ctx.BY() != null; boolean enclosedInParenthesis = ctx.LEFT_PARENTHESIS() != null && ctx.RIGHT_PARENTHESIS() != null; boolean andWithNotAvailable = ctx.NOT() != null && ctx.AND() != null; boolean forWithNotAvailable = ctx.timeScale() != null; boolean onlyAndAvailable = ctx.AND() != null && ctx.NOT() == null && ctx.FOR() == null; boolean onlyOrAvailable = ctx.OR() != null && ctx.NOT() == null && ctx.FOR() == null; boolean commaSeparated = ctx.COMMA() != null; String timeScale = null; String timeDurationValue = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); timeDurationValue = ctx.DecimalIntegerLiteral().getText(); } this.pkgBuilder.endPatternStreamingInputNode(getCurrentPos(ctx), getWS(ctx), followedByAvailable, enclosedInParenthesis, andWithNotAvailable, forWithNotAvailable, onlyAndAvailable, onlyOrAvailable, commaSeparated, timeDurationValue, timeScale); } @Override public void enterStreamingInput(BallerinaParser.StreamingInputContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitStreamingInput(BallerinaParser.StreamingInputContext ctx) { if (isInErrorState) { return; } String alias = null; if (ctx.alias != null) { alias = ctx.alias.getText(); } this.pkgBuilder.endStreamingInputNode(alias, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterJoinStreamingInput(BallerinaParser.JoinStreamingInputContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startJoinStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitJoinStreamingInput(BallerinaParser.JoinStreamingInputContext ctx) { if (isInErrorState) { return; } boolean unidirectionalJoin = ctx.UNIDIRECTIONAL() != null; if (!unidirectionalJoin) { String joinType = (ctx).children.get(0).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), false, false, joinType); } else { if (ctx.getChild(0).getText().equals("unidirectional")) { String joinType = (ctx).children.get(1).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), true, false, joinType); } else { String joinType = (ctx).children.get(0).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), false, true, joinType); } } } /** * {@inheritDoc} */ @Override public void exitJoinType(BallerinaParser.JoinTypeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endJoinType(getWS(ctx)); } @Override public void enterOutputRateLimit(BallerinaParser.OutputRateLimitContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOutputRateLimitNode(getCurrentPos(ctx)); } @Override public void exitOutputRateLimit(BallerinaParser.OutputRateLimitContext ctx) { if (isInErrorState) { return; } boolean isSnapshotOutputRateLimit = false; boolean isFirst = false; boolean isLast = false; boolean isAll = false; if (ctx.SNAPSHOT() != null) { isSnapshotOutputRateLimit = true; } else { if (ctx.LAST() != null) { isLast = true; } else if (ctx.FIRST() != null) { isFirst = true; } else if (ctx.LAST() != null) { isAll = true; } } String timeScale = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); } this.pkgBuilder.endOutputRateLimitNode(getCurrentPos(ctx), getWS(ctx), isSnapshotOutputRateLimit, isFirst, isLast, isAll, timeScale, ctx.DecimalIntegerLiteral().getText()); } @Override public void enterTableQuery(BallerinaParser.TableQueryContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTableQueryNode(getCurrentPos(ctx)); } @Override public void exitTableQuery(BallerinaParser.TableQueryContext ctx) { if (isInErrorState) { return; } boolean isSelectClauseAvailable = ctx.selectClause() != null; boolean isOrderByClauseAvailable = ctx.orderByClause() != null; boolean isJoinClauseAvailable = ctx.joinStreamingInput() != null; boolean isLimitClauseAvailable = ctx.limitClause() != null; this.pkgBuilder.endTableQueryNode(isJoinClauseAvailable, isSelectClauseAvailable, isOrderByClauseAvailable, isLimitClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterStreamingQueryStatement(BallerinaParser.StreamingQueryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startStreamingQueryStatementNode(getCurrentPos(ctx)); } @Override public void exitStreamingQueryStatement(BallerinaParser.StreamingQueryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endStreamingQueryStatementNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeverStatement(BallerinaParser.ForeverStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForeverNode(getCurrentPos(ctx), isSiddhiRuntimeEnabled); } @Override public void exitForeverStatement(BallerinaParser.ForeverStatementContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); checkExperimentalFeatureValidity(ExperimentalFeatures.STREAMING_QUERIES.value, pos); this.pkgBuilder.endForeverNode(pos, getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMarkdownDocumentationString(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkdownDocumentationString(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationLine(BallerinaParser.DocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkDownDocumentLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationContent(BallerinaParser.DocumentationContentContext ctx) { if (isInErrorState) { return; } String text = ctx.getText() != null ? ctx.getText() : ""; this.pkgBuilder.endMarkdownDocumentationText(getCurrentPos(ctx), getWS(ctx), text); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentationLine(BallerinaParser.ParameterDocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endParameterDocumentationLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentation(BallerinaParser.ParameterDocumentationContext ctx) { if (isInErrorState) { return; } String parameterName = ctx.docParameterName() != null ? ctx.docParameterName().getText() : ""; String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentation(getCurrentPos(ctx.docParameterName()), getWS(ctx), parameterName, description); } /** * {@inheritDoc} */ @Override public void exitParameterDescriptionLine(BallerinaParser.ParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDocumentation(BallerinaParser.ReturnParameterDocumentationContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDescriptionLine(BallerinaParser.ReturnParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitDeprecatedAttachment(BallerinaParser.DeprecatedAttachmentContext ctx) { if (isInErrorState) { return; } String contentText = ctx.deprecatedText() != null ? ctx.deprecatedText().getText() : ""; this.pkgBuilder.createDeprecatedNode(getCurrentPos(ctx), getWS(ctx), contentText); } @Override public void exitTrapExpression(BallerinaParser.TrapExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTrapExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitVariableReferenceExpression(BallerinaParser.VariableReferenceExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.START() != null) { this.pkgBuilder.markLastInvocationAsAsync(getCurrentPos(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElvisExpression(BallerinaParser.ElvisExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createElvisExpr(getCurrentPos(ctx), getWS(ctx)); } private DiagnosticPos getCurrentPos(ParserRuleContext ctx) { int startLine = ctx.getStart().getLine(); int startCol = ctx.getStart().getCharPositionInLine() + 1; int endLine = -1; int endCol = -1; Token stop = ctx.getStop(); if (stop != null) { endLine = stop.getLine(); endCol = stop.getCharPositionInLine() + (stop.getStopIndex() - stop.getStartIndex() + 1) + 1; } return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } private DiagnosticPos getCurrentPosFromIdentifier(TerminalNode node) { Token symbol = node.getSymbol(); int startLine = symbol.getLine(); int startCol = symbol.getCharPositionInLine() + 1; int endLine = startLine; int endCol = startCol + symbol.getText().length(); return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } protected Set<Whitespace> getWS(ParserRuleContext ctx) { return null; } private Stack<String> getTemplateTextFragments(List<TerminalNode> nodes) { Stack<String> templateStrFragments = new Stack<>(); nodes.forEach(node -> { if (node == null) { templateStrFragments.push(null); } else { String str = node.getText(); templateStrFragments.push(str.substring(0, str.length() - 2)); } }); return templateStrFragments; } private String getTemplateEndingStr(TerminalNode node) { return node == null ? null : node.getText(); } private String getNodeValue(ParserRuleContext ctx, TerminalNode node) { String op = ctx.getChild(0).getText(); String value = node.getText(); if (op != null && "-".equals(op)) { value = "-" + value; } return value; } private String getHexNodeValue(ParserRuleContext ctx, TerminalNode node) { String value = getNodeValue(ctx, node); if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private Object getIntegerLiteral(ParserRuleContext simpleLiteralContext, BallerinaParser.IntegerLiteralContext integerLiteralContext) { if (integerLiteralContext.DecimalIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.DecimalIntegerLiteral()); return parseLong(simpleLiteralContext, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL, DiagnosticCode.INTEGER_TOO_LARGE); } else if (integerLiteralContext.HexIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.HexIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 16, DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE); } return null; } private Object parseLong(ParserRuleContext context, String originalNodeValue, String processedNodeValue, int radix, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { DiagnosticPos pos = getCurrentPos(context); Set<Whitespace> ws = getWS(context); if (originalNodeValue.startsWith("-")) { dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } private void checkTypeValidity(String typeName, DiagnosticPos pos) { if (enableExperimentalFeatures) { return; } if (ExperimentalFeatures.STREAMS.value.equals(typeName) || ExperimentalFeatures.CHANNEL.value.equals(typeName)) { dlog.error(pos, DiagnosticCode.INVALID_USE_OF_EXPERIMENTAL_FEATURE, typeName); } } private void checkExperimentalFeatureValidity(String constructName, DiagnosticPos pos) { if (enableExperimentalFeatures) { return; } dlog.error(pos, DiagnosticCode.INVALID_USE_OF_EXPERIMENTAL_FEATURE, constructName); } private enum ExperimentalFeatures { STREAMS("stream"), CHANNEL("channel"), TABLE_QUERIES("table queries"), STREAMING_QUERIES("streaming queries"), TRANSACTIONS("transaction"), CHECKPOINTING("checkpoint"); private String value; private ExperimentalFeatures(String value) { this.value = value; } @Override public String toString() { return value; } } /** * Mark that this listener is in error state. */ public void setErrorState() { this.isInErrorState = true; } /** * Mark that this listener is not in an error state. */ public void unsetErrorState() { this.isInErrorState = false; } }
Can this ever be null? From the current code, it doesn't look like so.
public String getUserAgent() { String userAgent = null; if (this.feedResponseDiagnostics != null) { userAgent = this.feedResponseDiagnostics.getUserAgent(); } else if (this.clientSideRequestStatistics != null) { userAgent = this.clientSideRequestStatistics.getUserAgent(); } if (userAgent != null) { return userAgent; } return ""; }
if (userAgent != null) {
public String getUserAgent() { if (this.feedResponseDiagnostics != null) { return this.feedResponseDiagnostics.getUserAgent(); } return this.clientSideRequestStatistics.getUserAgent(); }
class CosmosDiagnostics { private static final Logger LOGGER = LoggerFactory.getLogger(CosmosDiagnostics.class); static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final String COSMOS_DIAGNOSTICS_KEY = "cosmosDiagnostics"; private ClientSideRequestStatistics clientSideRequestStatistics; private FeedResponseDiagnostics feedResponseDiagnostics; private CosmosDiagnosticsContext diagnosticsContext; private double samplingRateSnapshot; private final AtomicBoolean diagnosticsCapturedInPagedFlux; static final String USER_AGENT_KEY = "userAgent"; static final String SAMPLING_RATE_SNAPSHOT_KEY = "samplingRateSnapshot"; CosmosDiagnostics(DiagnosticsClientContext diagnosticsClientContext) { this.diagnosticsCapturedInPagedFlux = new AtomicBoolean(false); this.clientSideRequestStatistics = new ClientSideRequestStatistics(diagnosticsClientContext); this.samplingRateSnapshot = 1; } CosmosDiagnostics(FeedResponseDiagnostics feedResponseDiagnostics) { this.diagnosticsCapturedInPagedFlux = new AtomicBoolean(false); this.feedResponseDiagnostics = feedResponseDiagnostics; this.samplingRateSnapshot = 1; } CosmosDiagnostics(CosmosDiagnostics toBeCloned) { if (toBeCloned.feedResponseDiagnostics != null) { this.feedResponseDiagnostics = new FeedResponseDiagnostics(toBeCloned.feedResponseDiagnostics); } if (toBeCloned.clientSideRequestStatistics != null) { this.clientSideRequestStatistics = new ClientSideRequestStatistics(toBeCloned.clientSideRequestStatistics); } this.diagnosticsCapturedInPagedFlux = new AtomicBoolean(toBeCloned.diagnosticsCapturedInPagedFlux.get()); this.samplingRateSnapshot = toBeCloned.samplingRateSnapshot; } ClientSideRequestStatistics clientSideRequestStatistics() { return clientSideRequestStatistics; } /** * Retrieves Response Diagnostic String * * @return Response Diagnostic String */ @Override public String toString() { StringBuilder stringBuilder = new StringBuilder(); fillCosmosDiagnostics(null, stringBuilder); return stringBuilder.toString(); } /** * Returns the associated CosmosDiagnosticsContext or null if not associated with any context yet. * @return the associated CosmosDiagnosticsContext or null if not associated with any context yet. */ public CosmosDiagnosticsContext getDiagnosticsContext() { return this.diagnosticsContext; } void setDiagnosticsContext(CosmosDiagnosticsContext ctx) { checkNotNull("ctx", "Argument 'ctx' must not be null."); this.diagnosticsContext = ctx; } /** * Retrieves duration related to the completion of the request. * This represents end to end duration of an operation including all the retries. * This is meant for point operation only, for query please use toString() to get full query diagnostics. * * @return request completion duration */ public Duration getDuration() { if (this.feedResponseDiagnostics != null) { Collection<ClientSideRequestStatistics> statistics = this.feedResponseDiagnostics.getClientSideRequestStatistics(); if (statistics == null) { return Duration.ZERO; } Instant min = Instant.MAX; Instant max = Instant.MIN; for (ClientSideRequestStatistics s: statistics) { if (s.getRequestStartTimeUTC() != null && s.getRequestStartTimeUTC().isBefore(min)) { min = s.getRequestStartTimeUTC(); } if (s.getRequestEndTimeUTC() != null && s.getRequestEndTimeUTC().isAfter(max)) { max = s.getRequestEndTimeUTC(); } } if (max.isBefore(min)) { return null; } if (min == max) { return Duration.ZERO; } return Duration.between(min, max); } return this.clientSideRequestStatistics.getDuration(); } /** * Regions contacted for this request * * @return set of regions contacted for this request */ @Beta(value = Beta.SinceVersion.V4_9_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING) @Deprecated public Set<URI> getRegionsContacted() { if (this.feedResponseDiagnostics != null) { return null; } return this.clientSideRequestStatistics.getLocationEndpointsContacted(); } /** * Regions contacted for this request * * @return set of regions contacted for this request */ public Set<String> getContactedRegionNames() { if (this.feedResponseDiagnostics != null) { Set<String> aggregatedRegionsContacted = Collections.synchronizedSet(new HashSet<>()); if (this.clientSideRequestStatistics != null) { Set<String> temp = this.clientSideRequestStatistics.getContactedRegionNames(); if (temp != null && temp.size() > 0) { aggregatedRegionsContacted.addAll(temp); } } Collection<ClientSideRequestStatistics> clientStatisticCollection = this.feedResponseDiagnostics.getClientSideRequestStatistics(); if (clientStatisticCollection != null) { for (ClientSideRequestStatistics clientStatistics : clientStatisticCollection) { Set<String> temp = clientStatistics.getContactedRegionNames(); if (temp != null && temp.size() > 0) { aggregatedRegionsContacted.addAll(temp); } } } return aggregatedRegionsContacted; } return this.clientSideRequestStatistics.getContactedRegionNames(); } /** * Gets the UserAgent header value used by the client issueing this operation * @return the UserAgent header value used for the client that issued this operation */ FeedResponseDiagnostics getFeedResponseDiagnostics() { return feedResponseDiagnostics; } /** * Retrieves payload size of the request in bytes * This is meant for point operation only, for query and feed operations the request payload is always 0. * * @return request payload size in bytes */ int getRequestPayloadSizeInBytes() { if (this.feedResponseDiagnostics != null) { return 0; } return this.clientSideRequestStatistics.getRequestPayloadSizeInBytes(); } /** * Retrieves payload size of the response in bytes * * @return response payload size in bytes */ int getTotalResponsePayloadSizeInBytes() { if (this.feedResponseDiagnostics != null) { int totalResponsePayloadSizeInBytes = 0; Collection<ClientSideRequestStatistics> clientStatisticCollection = this.feedResponseDiagnostics.getClientSideRequestStatistics(); if (clientStatisticCollection != null) { for (ClientSideRequestStatistics clientStatistics : clientStatisticCollection) { totalResponsePayloadSizeInBytes += clientStatistics.getMaxResponsePayloadSizeInBytes(); } } return totalResponsePayloadSizeInBytes; } return this.clientSideRequestStatistics.getMaxResponsePayloadSizeInBytes(); } ClientSideRequestStatistics getClientSideRequestStatisticsRaw() { return this.clientSideRequestStatistics; } Collection<ClientSideRequestStatistics> getClientSideRequestStatistics() { if (this.feedResponseDiagnostics != null) { return this.feedResponseDiagnostics.getClientSideRequestStatistics(); } return ImmutableList.of(this.clientSideRequestStatistics); } Collection<ClientSideRequestStatistics> getClientSideRequestStatisticsForQueryPipelineAggregations() { List<ClientSideRequestStatistics> combinedStatistics = new ArrayList<>(); combinedStatistics .addAll(this.feedResponseDiagnostics.getClientSideRequestStatistics()); if (this.clientSideRequestStatistics != null) { combinedStatistics.add(this.clientSideRequestStatistics); } return combinedStatistics; } double getSamplingRateSnapshot() { return this.samplingRateSnapshot; } void fillCosmosDiagnostics(ObjectNode parentNode, StringBuilder stringBuilder) { if (this.feedResponseDiagnostics != null) { feedResponseDiagnostics.setSamplingRateSnapshot(this.samplingRateSnapshot); if (parentNode != null) { parentNode.put(USER_AGENT_KEY, this.feedResponseDiagnostics.getUserAgent()); parentNode.putPOJO(COSMOS_DIAGNOSTICS_KEY, feedResponseDiagnostics); } if (stringBuilder != null) { stringBuilder.append(USER_AGENT_KEY + "=").append(this.feedResponseDiagnostics.getUserAgent()).append(System.lineSeparator()); stringBuilder.append(feedResponseDiagnostics); } } else { clientSideRequestStatistics.setSamplingRateSnapshot(this.samplingRateSnapshot); if (parentNode != null) { parentNode.putPOJO(COSMOS_DIAGNOSTICS_KEY, clientSideRequestStatistics); } if (stringBuilder != null) { try { stringBuilder.append(OBJECT_MAPPER.writeValueAsString(this.clientSideRequestStatistics)); } catch (JsonProcessingException e) { LOGGER.error("Error while parsing diagnostics ", e); } } } } void setFeedResponseDiagnostics(FeedResponseDiagnostics feedResponseDiagnostics) { this.feedResponseDiagnostics = feedResponseDiagnostics; } private AtomicBoolean isDiagnosticsCapturedInPagedFlux(){ return this.diagnosticsCapturedInPagedFlux; } void addClientSideDiagnosticsToFeed(Collection<ClientSideRequestStatistics> requestStatistics) { if (this.feedResponseDiagnostics == null || requestStatistics == null || requestStatistics.isEmpty()) { return; } this.feedResponseDiagnostics .addClientSideRequestStatistics(requestStatistics); } CosmosDiagnostics setSamplingRateSnapshot(double samplingRate) { this.samplingRateSnapshot = samplingRate; return this; } static void initialize() { ImplementationBridgeHelpers.CosmosDiagnosticsHelper.setCosmosDiagnosticsAccessor( new ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor() { @Override public FeedResponseDiagnostics getFeedResponseDiagnostics(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return null; } return cosmosDiagnostics.getFeedResponseDiagnostics(); } @Override public AtomicBoolean isDiagnosticsCapturedInPagedFlux(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return null; } return cosmosDiagnostics.isDiagnosticsCapturedInPagedFlux(); } @Override public Collection<ClientSideRequestStatistics> getClientSideRequestStatistics(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return null; } return cosmosDiagnostics.getClientSideRequestStatistics(); } @Override public Collection<ClientSideRequestStatistics> getClientSideRequestStatisticsForQueryPipelineAggregations(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return new ArrayList<>(); } return cosmosDiagnostics.getClientSideRequestStatisticsForQueryPipelineAggregations(); } @Override public ClientSideRequestStatistics getClientSideRequestStatisticsRaw(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return null; } return cosmosDiagnostics.getClientSideRequestStatisticsRaw(); } @Override public int getTotalResponsePayloadSizeInBytes(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return 0; } return cosmosDiagnostics.getTotalResponsePayloadSizeInBytes(); } @Override public int getRequestPayloadSizeInBytes(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return 0; } return cosmosDiagnostics.getRequestPayloadSizeInBytes(); } @Override public void addClientSideDiagnosticsToFeed(CosmosDiagnostics cosmosDiagnostics, Collection<ClientSideRequestStatistics> requestStatistics) { if (cosmosDiagnostics == null) { return; } cosmosDiagnostics .addClientSideDiagnosticsToFeed(requestStatistics); } @Override public void setSamplingRateSnapshot(CosmosDiagnostics cosmosDiagnostics, double samplingRate) { if (cosmosDiagnostics == null) { return; } cosmosDiagnostics.setSamplingRateSnapshot(samplingRate); } @Override public CosmosDiagnostics create(DiagnosticsClientContext clientContext, double samplingRate) { return new CosmosDiagnostics(clientContext).setSamplingRateSnapshot(samplingRate); } }); } static { initialize(); } }
class CosmosDiagnostics { private static final Logger LOGGER = LoggerFactory.getLogger(CosmosDiagnostics.class); static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final String COSMOS_DIAGNOSTICS_KEY = "cosmosDiagnostics"; private ClientSideRequestStatistics clientSideRequestStatistics; private FeedResponseDiagnostics feedResponseDiagnostics; private CosmosDiagnosticsContext diagnosticsContext; private double samplingRateSnapshot; private final AtomicBoolean diagnosticsCapturedInPagedFlux; static final String USER_AGENT_KEY = "userAgent"; static final String SAMPLING_RATE_SNAPSHOT_KEY = "samplingRateSnapshot"; CosmosDiagnostics(DiagnosticsClientContext diagnosticsClientContext) { this.diagnosticsCapturedInPagedFlux = new AtomicBoolean(false); this.clientSideRequestStatistics = new ClientSideRequestStatistics(diagnosticsClientContext); this.samplingRateSnapshot = 1; } CosmosDiagnostics(FeedResponseDiagnostics feedResponseDiagnostics) { this.diagnosticsCapturedInPagedFlux = new AtomicBoolean(false); this.feedResponseDiagnostics = feedResponseDiagnostics; this.samplingRateSnapshot = 1; } CosmosDiagnostics(CosmosDiagnostics toBeCloned) { if (toBeCloned.feedResponseDiagnostics != null) { this.feedResponseDiagnostics = new FeedResponseDiagnostics(toBeCloned.feedResponseDiagnostics); } if (toBeCloned.clientSideRequestStatistics != null) { this.clientSideRequestStatistics = new ClientSideRequestStatistics(toBeCloned.clientSideRequestStatistics); } this.diagnosticsCapturedInPagedFlux = new AtomicBoolean(toBeCloned.diagnosticsCapturedInPagedFlux.get()); this.samplingRateSnapshot = toBeCloned.samplingRateSnapshot; } ClientSideRequestStatistics clientSideRequestStatistics() { return clientSideRequestStatistics; } /** * Retrieves Response Diagnostic String * * @return Response Diagnostic String */ @Override public String toString() { StringBuilder stringBuilder = new StringBuilder(); fillCosmosDiagnostics(null, stringBuilder); return stringBuilder.toString(); } /** * Returns the associated CosmosDiagnosticsContext or null if not associated with any context yet. * @return the associated CosmosDiagnosticsContext or null if not associated with any context yet. */ public CosmosDiagnosticsContext getDiagnosticsContext() { return this.diagnosticsContext; } void setDiagnosticsContext(CosmosDiagnosticsContext ctx) { checkNotNull("ctx", "Argument 'ctx' must not be null."); this.diagnosticsContext = ctx; } /** * Retrieves duration related to the completion of the request. * This represents end to end duration of an operation including all the retries. * This is meant for point operation only, for query please use toString() to get full query diagnostics. * * @return request completion duration */ public Duration getDuration() { if (this.feedResponseDiagnostics != null) { Collection<ClientSideRequestStatistics> statistics = this.feedResponseDiagnostics.getClientSideRequestStatistics(); if (statistics == null) { return Duration.ZERO; } Instant min = Instant.MAX; Instant max = Instant.MIN; for (ClientSideRequestStatistics s: statistics) { if (s.getRequestStartTimeUTC() != null && s.getRequestStartTimeUTC().isBefore(min)) { min = s.getRequestStartTimeUTC(); } if (s.getRequestEndTimeUTC() != null && s.getRequestEndTimeUTC().isAfter(max)) { max = s.getRequestEndTimeUTC(); } } if (max.isBefore(min)) { return null; } if (min == max) { return Duration.ZERO; } return Duration.between(min, max); } return this.clientSideRequestStatistics.getDuration(); } /** * Regions contacted for this request * * @return set of regions contacted for this request */ @Beta(value = Beta.SinceVersion.V4_9_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING) @Deprecated public Set<URI> getRegionsContacted() { if (this.feedResponseDiagnostics != null) { return null; } return this.clientSideRequestStatistics.getLocationEndpointsContacted(); } /** * Regions contacted for this request * * @return set of regions contacted for this request */ public Set<String> getContactedRegionNames() { if (this.feedResponseDiagnostics != null) { Set<String> aggregatedRegionsContacted = Collections.synchronizedSet(new HashSet<>()); if (this.clientSideRequestStatistics != null) { Set<String> temp = this.clientSideRequestStatistics.getContactedRegionNames(); if (temp != null && temp.size() > 0) { aggregatedRegionsContacted.addAll(temp); } } Collection<ClientSideRequestStatistics> clientStatisticCollection = this.feedResponseDiagnostics.getClientSideRequestStatistics(); if (clientStatisticCollection != null) { for (ClientSideRequestStatistics clientStatistics : clientStatisticCollection) { Set<String> temp = clientStatistics.getContactedRegionNames(); if (temp != null && temp.size() > 0) { aggregatedRegionsContacted.addAll(temp); } } } return aggregatedRegionsContacted; } return this.clientSideRequestStatistics.getContactedRegionNames(); } /** * Gets the UserAgent header value used by the client issueing this operation * @return the UserAgent header value used for the client that issued this operation */ FeedResponseDiagnostics getFeedResponseDiagnostics() { return feedResponseDiagnostics; } /** * Retrieves payload size of the request in bytes * This is meant for point operation only, for query and feed operations the request payload is always 0. * * @return request payload size in bytes */ int getRequestPayloadSizeInBytes() { if (this.feedResponseDiagnostics != null) { return 0; } return this.clientSideRequestStatistics.getRequestPayloadSizeInBytes(); } /** * Retrieves payload size of the response in bytes * * @return response payload size in bytes */ int getTotalResponsePayloadSizeInBytes() { if (this.feedResponseDiagnostics != null) { int totalResponsePayloadSizeInBytes = 0; Collection<ClientSideRequestStatistics> clientStatisticCollection = this.feedResponseDiagnostics.getClientSideRequestStatistics(); if (clientStatisticCollection != null) { for (ClientSideRequestStatistics clientStatistics : clientStatisticCollection) { totalResponsePayloadSizeInBytes += clientStatistics.getMaxResponsePayloadSizeInBytes(); } } return totalResponsePayloadSizeInBytes; } return this.clientSideRequestStatistics.getMaxResponsePayloadSizeInBytes(); } ClientSideRequestStatistics getClientSideRequestStatisticsRaw() { return this.clientSideRequestStatistics; } Collection<ClientSideRequestStatistics> getClientSideRequestStatistics() { if (this.feedResponseDiagnostics != null) { return this.feedResponseDiagnostics.getClientSideRequestStatistics(); } return ImmutableList.of(this.clientSideRequestStatistics); } Collection<ClientSideRequestStatistics> getClientSideRequestStatisticsForQueryPipelineAggregations() { List<ClientSideRequestStatistics> combinedStatistics = new ArrayList<>(); combinedStatistics .addAll(this.feedResponseDiagnostics.getClientSideRequestStatistics()); if (this.clientSideRequestStatistics != null) { combinedStatistics.add(this.clientSideRequestStatistics); } return combinedStatistics; } double getSamplingRateSnapshot() { return this.samplingRateSnapshot; } void fillCosmosDiagnostics(ObjectNode parentNode, StringBuilder stringBuilder) { if (this.feedResponseDiagnostics != null) { feedResponseDiagnostics.setSamplingRateSnapshot(this.samplingRateSnapshot); if (parentNode != null) { parentNode.put(USER_AGENT_KEY, this.feedResponseDiagnostics.getUserAgent()); parentNode.putPOJO(COSMOS_DIAGNOSTICS_KEY, feedResponseDiagnostics); } if (stringBuilder != null) { stringBuilder.append(USER_AGENT_KEY + "=").append(this.feedResponseDiagnostics.getUserAgent()).append(System.lineSeparator()); stringBuilder.append(feedResponseDiagnostics); } } else { clientSideRequestStatistics.setSamplingRateSnapshot(this.samplingRateSnapshot); if (parentNode != null) { parentNode.putPOJO(COSMOS_DIAGNOSTICS_KEY, clientSideRequestStatistics); } if (stringBuilder != null) { try { stringBuilder.append(OBJECT_MAPPER.writeValueAsString(this.clientSideRequestStatistics)); } catch (JsonProcessingException e) { LOGGER.error("Error while parsing diagnostics ", e); } } } } void setFeedResponseDiagnostics(FeedResponseDiagnostics feedResponseDiagnostics) { this.feedResponseDiagnostics = feedResponseDiagnostics; } private AtomicBoolean isDiagnosticsCapturedInPagedFlux(){ return this.diagnosticsCapturedInPagedFlux; } void addClientSideDiagnosticsToFeed(Collection<ClientSideRequestStatistics> requestStatistics) { if (this.feedResponseDiagnostics == null || requestStatistics == null || requestStatistics.isEmpty()) { return; } this.feedResponseDiagnostics .addClientSideRequestStatistics(requestStatistics); } CosmosDiagnostics setSamplingRateSnapshot(double samplingRate) { this.samplingRateSnapshot = samplingRate; return this; } static void initialize() { ImplementationBridgeHelpers.CosmosDiagnosticsHelper.setCosmosDiagnosticsAccessor( new ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor() { @Override public FeedResponseDiagnostics getFeedResponseDiagnostics(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return null; } return cosmosDiagnostics.getFeedResponseDiagnostics(); } @Override public AtomicBoolean isDiagnosticsCapturedInPagedFlux(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return null; } return cosmosDiagnostics.isDiagnosticsCapturedInPagedFlux(); } @Override public Collection<ClientSideRequestStatistics> getClientSideRequestStatistics(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return null; } return cosmosDiagnostics.getClientSideRequestStatistics(); } @Override public Collection<ClientSideRequestStatistics> getClientSideRequestStatisticsForQueryPipelineAggregations(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return new ArrayList<>(); } return cosmosDiagnostics.getClientSideRequestStatisticsForQueryPipelineAggregations(); } @Override public ClientSideRequestStatistics getClientSideRequestStatisticsRaw(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return null; } return cosmosDiagnostics.getClientSideRequestStatisticsRaw(); } @Override public int getTotalResponsePayloadSizeInBytes(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return 0; } return cosmosDiagnostics.getTotalResponsePayloadSizeInBytes(); } @Override public int getRequestPayloadSizeInBytes(CosmosDiagnostics cosmosDiagnostics) { if (cosmosDiagnostics == null) { return 0; } return cosmosDiagnostics.getRequestPayloadSizeInBytes(); } @Override public void addClientSideDiagnosticsToFeed(CosmosDiagnostics cosmosDiagnostics, Collection<ClientSideRequestStatistics> requestStatistics) { if (cosmosDiagnostics == null) { return; } cosmosDiagnostics .addClientSideDiagnosticsToFeed(requestStatistics); } @Override public void setSamplingRateSnapshot(CosmosDiagnostics cosmosDiagnostics, double samplingRate) { if (cosmosDiagnostics == null) { return; } cosmosDiagnostics.setSamplingRateSnapshot(samplingRate); } @Override public CosmosDiagnostics create(DiagnosticsClientContext clientContext, double samplingRate) { return new CosmosDiagnostics(clientContext).setSamplingRateSnapshot(samplingRate); } }); } static { initialize(); } }
the reversed doesn't make sense with the comment. Is it wrong or can comment be improved to make it clearer what reversing is needed?
public void testOnNewWorkerMetadata_redistributesBudget() throws InterruptedException { String workerToken = "workerToken1"; String workerToken2 = "workerToken2"; String workerToken3 = "workerToken3"; WorkerMetadataResponse firstWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(1) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); WorkerMetadataResponse secondWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(2) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken2) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); WorkerMetadataResponse thirdWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(3) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken3) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); List<WorkerMetadataResponse> workerMetadataResponses = Lists.newArrayList(firstWorkerMetadata, secondWorkerMetadata, thirdWorkerMetadata); TestGetWorkBudgetDistributor getWorkBudgetDistributor = spy(new TestGetWorkBudgetDistributor(workerMetadataResponses.size())); streamingEngineClient = newStreamingEngineClient( GetWorkBudget.builder().setItems(1).setBytes(1).build(), getWorkBudgetDistributor, noOpProcessWorkItemFn()); getWorkerMetadataReady.await(); workerMetadataResponses.stream() .sorted(Comparator.comparingLong(WorkerMetadataResponse::getMetadataVersion).reversed()) .forEach(fakeGetWorkerMetadataStub::injectWorkerMetadata); waitForWorkerMetadataToBeConsumed(getWorkBudgetDistributor); verify(getWorkBudgetDistributor, atLeast(workerMetadataResponses.size())) .distributeBudget(any(), any()); }
.sorted(Comparator.comparingLong(WorkerMetadataResponse::getMetadataVersion).reversed())
public void testOnNewWorkerMetadata_redistributesBudget() throws InterruptedException { String workerToken = "workerToken1"; String workerToken2 = "workerToken2"; String workerToken3 = "workerToken3"; WorkerMetadataResponse firstWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(1) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); WorkerMetadataResponse secondWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(2) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken2) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); WorkerMetadataResponse thirdWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(3) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken3) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); List<WorkerMetadataResponse> workerMetadataResponses = Lists.newArrayList(firstWorkerMetadata, secondWorkerMetadata, thirdWorkerMetadata); TestGetWorkBudgetDistributor getWorkBudgetDistributor = spy(new TestGetWorkBudgetDistributor(workerMetadataResponses.size())); streamingEngineClient = newStreamingEngineClient( GetWorkBudget.builder().setItems(1).setBytes(1).build(), getWorkBudgetDistributor, noOpProcessWorkItemFn()); getWorkerMetadataReady.await(); workerMetadataResponses.stream() .sorted(Comparator.comparingLong(WorkerMetadataResponse::getMetadataVersion)) .forEach(fakeGetWorkerMetadataStub::injectWorkerMetadata); waitForWorkerMetadataToBeConsumed(getWorkBudgetDistributor); verify(getWorkBudgetDistributor, atLeast(workerMetadataResponses.size())) .distributeBudget(any(), any()); }
class StreamingEngineClientTest { private static final WindmillServiceAddress DEFAULT_WINDMILL_SERVICE_ADDRESS = WindmillServiceAddress.create(HostAndPort.fromParts(WindmillChannelFactory.LOCALHOST, 443)); private static final ImmutableMap<String, WorkerMetadataResponse.Endpoint> DEFAULT = ImmutableMap.of( "global_data", WorkerMetadataResponse.Endpoint.newBuilder() .setDirectEndpoint(DEFAULT_WINDMILL_SERVICE_ADDRESS.gcpServiceAddress().toString()) .build()); private static final long CLIENT_ID = 1L; private static final String JOB_ID = "jobId"; private static final String PROJECT_ID = "projectId"; private static final String WORKER_ID = "workerId"; private static final JobHeader JOB_HEADER = JobHeader.newBuilder() .setJobId(JOB_ID) .setProjectId(PROJECT_ID) .setWorkerId(WORKER_ID) .build(); @Rule public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule(); private final Set<ManagedChannel> channels = new HashSet<>(); private final MutableHandlerRegistry serviceRegistry = new MutableHandlerRegistry(); private final GrpcWindmillStreamFactory streamFactory = spy(GrpcWindmillStreamFactory.of(JOB_HEADER).build()); private final WindmillStubFactory stubFactory = new FakeWindmillStubFactory( () -> { ManagedChannel channel = grpcCleanup.register( WindmillChannelFactory.inProcessChannel("StreamingEngineClientTest")); channels.add(channel); return channel; }); private final GrpcDispatcherClient dispatcherClient = GrpcDispatcherClient.forTesting( stubFactory, new ArrayList<>(), new ArrayList<>(), new HashSet<>()); private final AtomicReference<StreamingEngineConnectionState> connections = new AtomicReference<>(StreamingEngineConnectionState.EMPTY); private final AtomicBoolean isBudgetRefreshPaused = new AtomicBoolean(false); @Rule public transient Timeout globalTimeout = Timeout.seconds(600); private Server fakeStreamingEngineServer; private CountDownLatch getWorkerMetadataReady; private GetWorkerMetadataTestStub fakeGetWorkerMetadataStub; private StreamingEngineClient streamingEngineClient; private static WorkItemProcessor noOpProcessWorkItemFn() { return (computation, inputDataWatermark, synchronizedProcessingTime, workItem, ackQueuedWorkItem, getWorkStreamLatencies) -> {}; } private static GetWorkRequest getWorkRequest(long items, long bytes) { return GetWorkRequest.newBuilder() .setJobId(JOB_ID) .setProjectId(PROJECT_ID) .setWorkerId(WORKER_ID) .setClientId(CLIENT_ID) .setMaxItems(items) .setMaxBytes(bytes) .build(); } private static WorkerMetadataResponse.Endpoint metadataResponseEndpoint(String workerToken) { return WorkerMetadataResponse.Endpoint.newBuilder().setBackendWorkerToken(workerToken).build(); } @Before public void setUp() throws IOException { channels.forEach(ManagedChannel::shutdownNow); channels.clear(); fakeStreamingEngineServer = grpcCleanup.register( InProcessServerBuilder.forName("StreamingEngineClientTest") .fallbackHandlerRegistry(serviceRegistry) .executor(Executors.newFixedThreadPool(1)) .build()); fakeStreamingEngineServer.start(); dispatcherClient.consumeWindmillDispatcherEndpoints( ImmutableSet.of( HostAndPort.fromString( new InProcessSocketAddress("StreamingEngineClientTest").toString()))); getWorkerMetadataReady = new CountDownLatch(1); fakeGetWorkerMetadataStub = new GetWorkerMetadataTestStub(getWorkerMetadataReady); serviceRegistry.addService(fakeGetWorkerMetadataStub); } @After public void cleanUp() { Preconditions.checkNotNull(streamingEngineClient).finish(); fakeGetWorkerMetadataStub.close(); fakeStreamingEngineServer.shutdownNow(); channels.forEach(ManagedChannel::shutdownNow); } private StreamingEngineClient newStreamingEngineClient( GetWorkBudget getWorkBudget, GetWorkBudgetDistributor getWorkBudgetDistributor, WorkItemProcessor workItemProcessor) { return StreamingEngineClient.forTesting( JOB_HEADER, getWorkBudget, connections, streamFactory, workItemProcessor, stubFactory, getWorkBudgetDistributor, dispatcherClient, CLIENT_ID, isBudgetRefreshPaused); } @Test public void testStreamsStartCorrectly() throws InterruptedException { long items = 10L; long bytes = 10L; int numBudgetDistributionsExpected = 1; TestGetWorkBudgetDistributor getWorkBudgetDistributor = spy(new TestGetWorkBudgetDistributor(numBudgetDistributionsExpected)); streamingEngineClient = newStreamingEngineClient( GetWorkBudget.builder().setItems(items).setBytes(bytes).build(), getWorkBudgetDistributor, noOpProcessWorkItemFn()); String workerToken = "workerToken1"; String workerToken2 = "workerToken2"; WorkerMetadataResponse firstWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(1) .addWorkEndpoints(metadataResponseEndpoint(workerToken)) .addWorkEndpoints(metadataResponseEndpoint(workerToken2)) .putAllGlobalDataEndpoints(DEFAULT) .build(); getWorkerMetadataReady.await(); fakeGetWorkerMetadataStub.injectWorkerMetadata(firstWorkerMetadata); waitForWorkerMetadataToBeConsumed(getWorkBudgetDistributor); StreamingEngineConnectionState currentConnections = connections.get(); assertEquals(2, currentConnections.windmillConnections().size()); assertEquals(2, currentConnections.windmillStreams().size()); Set<String> workerTokens = currentConnections.windmillConnections().values().stream() .map(WindmillConnection::backendWorkerToken) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toSet()); assertTrue(workerTokens.contains(workerToken)); assertTrue(workerTokens.contains(workerToken2)); verify(getWorkBudgetDistributor, atLeast(1)) .distributeBudget( any(), eq(GetWorkBudget.builder().setItems(items).setBytes(bytes).build())); verify(streamFactory, times(2)) .createDirectGetWorkStream( any(), eq(getWorkRequest(0, 0)), any(), any(), any(), eq(noOpProcessWorkItemFn())); verify(streamFactory, times(2)).createGetDataStream(any(), any()); verify(streamFactory, times(2)).createCommitWorkStream(any(), any()); } @Test public void testScheduledBudgetRefresh() throws InterruptedException { TestGetWorkBudgetDistributor getWorkBudgetDistributor = spy(new TestGetWorkBudgetDistributor(2)); streamingEngineClient = newStreamingEngineClient( GetWorkBudget.builder().setItems(1L).setBytes(1L).build(), getWorkBudgetDistributor, noOpProcessWorkItemFn()); getWorkerMetadataReady.await(); fakeGetWorkerMetadataStub.injectWorkerMetadata( WorkerMetadataResponse.newBuilder() .setMetadataVersion(1) .addWorkEndpoints(metadataResponseEndpoint("workerToken")) .putAllGlobalDataEndpoints(DEFAULT) .build()); waitForWorkerMetadataToBeConsumed(getWorkBudgetDistributor); verify(getWorkBudgetDistributor, atLeast(2)).distributeBudget(any(), any()); } @Test public void testOnNewWorkerMetadata_correctlyRemovesStaleWindmillServers() throws InterruptedException { int metadataCount = 2; TestGetWorkBudgetDistributor getWorkBudgetDistributor = spy(new TestGetWorkBudgetDistributor(metadataCount)); streamingEngineClient = newStreamingEngineClient( GetWorkBudget.builder().setItems(1).setBytes(1).build(), getWorkBudgetDistributor, noOpProcessWorkItemFn()); String workerToken = "workerToken1"; String workerToken2 = "workerToken2"; String workerToken3 = "workerToken3"; WorkerMetadataResponse firstWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(1) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken) .build()) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken2) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); WorkerMetadataResponse secondWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(2) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken3) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); getWorkerMetadataReady.await(); fakeGetWorkerMetadataStub.injectWorkerMetadata(firstWorkerMetadata); fakeGetWorkerMetadataStub.injectWorkerMetadata(secondWorkerMetadata); waitForWorkerMetadataToBeConsumed(getWorkBudgetDistributor); StreamingEngineConnectionState currentConnections = connections.get(); assertEquals(1, currentConnections.windmillConnections().size()); assertEquals(1, currentConnections.windmillStreams().size()); Set<String> workerTokens = connections.get().windmillConnections().values().stream() .map(WindmillConnection::backendWorkerToken) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toSet()); assertFalse(workerTokens.contains(workerToken)); assertFalse(workerTokens.contains(workerToken2)); } @Test private void waitForWorkerMetadataToBeConsumed( TestGetWorkBudgetDistributor getWorkBudgetDistributor) throws InterruptedException { getWorkBudgetDistributor.waitForBudgetDistribution(); while (isBudgetRefreshPaused.get()) { } } private static class GetWorkerMetadataTestStub extends CloudWindmillMetadataServiceV1Alpha1Grpc .CloudWindmillMetadataServiceV1Alpha1ImplBase { private static final WorkerMetadataResponse CLOSE_ALL_STREAMS = WorkerMetadataResponse.newBuilder().setMetadataVersion(100).build(); private final CountDownLatch ready; private @Nullable StreamObserver<WorkerMetadataResponse> responseObserver; private GetWorkerMetadataTestStub(CountDownLatch ready) { this.ready = ready; } @Override public StreamObserver<WorkerMetadataRequest> getWorkerMetadata( StreamObserver<WorkerMetadataResponse> responseObserver) { if (this.responseObserver == null) { ready.countDown(); this.responseObserver = responseObserver; } return new StreamObserver<WorkerMetadataRequest>() { @Override public void onNext(WorkerMetadataRequest workerMetadataRequest) {} @Override public void onError(Throwable throwable) { if (responseObserver != null) { responseObserver.onError(throwable); } } @Override public void onCompleted() {} }; } private void injectWorkerMetadata(WorkerMetadataResponse response) { if (responseObserver != null) { responseObserver.onNext(response); } } private void close() { if (responseObserver != null) { responseObserver.onNext(CLOSE_ALL_STREAMS); } } } private static class TestGetWorkBudgetDistributor implements GetWorkBudgetDistributor { private final CountDownLatch getWorkBudgetDistributorTriggered; private TestGetWorkBudgetDistributor(int numBudgetDistributionsExpected) { this.getWorkBudgetDistributorTriggered = new CountDownLatch(numBudgetDistributionsExpected); } @SuppressWarnings("ReturnValueIgnored") private void waitForBudgetDistribution() throws InterruptedException { getWorkBudgetDistributorTriggered.await(5, TimeUnit.SECONDS); } @Override public void distributeBudget( ImmutableCollection<WindmillStreamSender> streams, GetWorkBudget getWorkBudget) { streams.forEach(stream -> stream.adjustBudget(getWorkBudget.items(), getWorkBudget.bytes())); getWorkBudgetDistributorTriggered.countDown(); } } }
class StreamingEngineClientTest { private static final WindmillServiceAddress DEFAULT_WINDMILL_SERVICE_ADDRESS = WindmillServiceAddress.create(HostAndPort.fromParts(WindmillChannelFactory.LOCALHOST, 443)); private static final ImmutableMap<String, WorkerMetadataResponse.Endpoint> DEFAULT = ImmutableMap.of( "global_data", WorkerMetadataResponse.Endpoint.newBuilder() .setDirectEndpoint(DEFAULT_WINDMILL_SERVICE_ADDRESS.gcpServiceAddress().toString()) .build()); private static final long CLIENT_ID = 1L; private static final String JOB_ID = "jobId"; private static final String PROJECT_ID = "projectId"; private static final String WORKER_ID = "workerId"; private static final JobHeader JOB_HEADER = JobHeader.newBuilder() .setJobId(JOB_ID) .setProjectId(PROJECT_ID) .setWorkerId(WORKER_ID) .build(); @Rule public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule(); @Rule public transient Timeout globalTimeout = Timeout.seconds(600); private final Set<ManagedChannel> channels = new HashSet<>(); private final MutableHandlerRegistry serviceRegistry = new MutableHandlerRegistry(); private final GrpcWindmillStreamFactory streamFactory = spy(GrpcWindmillStreamFactory.of(JOB_HEADER).build()); private final WindmillStubFactory stubFactory = new FakeWindmillStubFactory( () -> { ManagedChannel channel = grpcCleanup.register( WindmillChannelFactory.inProcessChannel("StreamingEngineClientTest")); channels.add(channel); return channel; }); private final GrpcDispatcherClient dispatcherClient = GrpcDispatcherClient.forTesting( stubFactory, new ArrayList<>(), new ArrayList<>(), new HashSet<>()); private final AtomicReference<StreamingEngineConnectionState> connections = new AtomicReference<>(StreamingEngineConnectionState.EMPTY); private Server fakeStreamingEngineServer; private CountDownLatch getWorkerMetadataReady; private GetWorkerMetadataTestStub fakeGetWorkerMetadataStub; private StreamingEngineClient streamingEngineClient; private static WorkItemProcessor noOpProcessWorkItemFn() { return (computation, inputDataWatermark, synchronizedProcessingTime, workItem, ackQueuedWorkItem, getWorkStreamLatencies) -> {}; } private static GetWorkRequest getWorkRequest(long items, long bytes) { return GetWorkRequest.newBuilder() .setJobId(JOB_ID) .setProjectId(PROJECT_ID) .setWorkerId(WORKER_ID) .setClientId(CLIENT_ID) .setMaxItems(items) .setMaxBytes(bytes) .build(); } private static WorkerMetadataResponse.Endpoint metadataResponseEndpoint(String workerToken) { return WorkerMetadataResponse.Endpoint.newBuilder().setBackendWorkerToken(workerToken).build(); } @Before public void setUp() throws IOException { channels.forEach(ManagedChannel::shutdownNow); channels.clear(); fakeStreamingEngineServer = grpcCleanup.register( InProcessServerBuilder.forName("StreamingEngineClientTest") .fallbackHandlerRegistry(serviceRegistry) .executor(Executors.newFixedThreadPool(1)) .build()); fakeStreamingEngineServer.start(); dispatcherClient.consumeWindmillDispatcherEndpoints( ImmutableSet.of( HostAndPort.fromString( new InProcessSocketAddress("StreamingEngineClientTest").toString()))); getWorkerMetadataReady = new CountDownLatch(1); fakeGetWorkerMetadataStub = new GetWorkerMetadataTestStub(getWorkerMetadataReady); serviceRegistry.addService(fakeGetWorkerMetadataStub); } @After public void cleanUp() { Preconditions.checkNotNull(streamingEngineClient).finish(); fakeGetWorkerMetadataStub.close(); fakeStreamingEngineServer.shutdownNow(); channels.forEach(ManagedChannel::shutdownNow); } private StreamingEngineClient newStreamingEngineClient( GetWorkBudget getWorkBudget, GetWorkBudgetDistributor getWorkBudgetDistributor, WorkItemProcessor workItemProcessor) { return StreamingEngineClient.forTesting( JOB_HEADER, getWorkBudget, connections, streamFactory, workItemProcessor, stubFactory, getWorkBudgetDistributor, dispatcherClient, CLIENT_ID); } @Test public void testStreamsStartCorrectly() throws InterruptedException { long items = 10L; long bytes = 10L; int numBudgetDistributionsExpected = 1; TestGetWorkBudgetDistributor getWorkBudgetDistributor = spy(new TestGetWorkBudgetDistributor(numBudgetDistributionsExpected)); streamingEngineClient = newStreamingEngineClient( GetWorkBudget.builder().setItems(items).setBytes(bytes).build(), getWorkBudgetDistributor, noOpProcessWorkItemFn()); String workerToken = "workerToken1"; String workerToken2 = "workerToken2"; WorkerMetadataResponse firstWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(1) .addWorkEndpoints(metadataResponseEndpoint(workerToken)) .addWorkEndpoints(metadataResponseEndpoint(workerToken2)) .putAllGlobalDataEndpoints(DEFAULT) .build(); getWorkerMetadataReady.await(); fakeGetWorkerMetadataStub.injectWorkerMetadata(firstWorkerMetadata); waitForWorkerMetadataToBeConsumed(getWorkBudgetDistributor); StreamingEngineConnectionState currentConnections = connections.get(); assertEquals(2, currentConnections.windmillConnections().size()); assertEquals(2, currentConnections.windmillStreams().size()); Set<String> workerTokens = currentConnections.windmillConnections().values().stream() .map(WindmillConnection::backendWorkerToken) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toSet()); assertTrue(workerTokens.contains(workerToken)); assertTrue(workerTokens.contains(workerToken2)); verify(getWorkBudgetDistributor, atLeast(1)) .distributeBudget( any(), eq(GetWorkBudget.builder().setItems(items).setBytes(bytes).build())); verify(streamFactory, times(2)) .createDirectGetWorkStream( any(), eq(getWorkRequest(0, 0)), any(), any(), any(), eq(noOpProcessWorkItemFn())); verify(streamFactory, times(2)).createGetDataStream(any(), any()); verify(streamFactory, times(2)).createCommitWorkStream(any(), any()); } @Test public void testScheduledBudgetRefresh() throws InterruptedException { TestGetWorkBudgetDistributor getWorkBudgetDistributor = spy(new TestGetWorkBudgetDistributor(2)); streamingEngineClient = newStreamingEngineClient( GetWorkBudget.builder().setItems(1L).setBytes(1L).build(), getWorkBudgetDistributor, noOpProcessWorkItemFn()); getWorkerMetadataReady.await(); fakeGetWorkerMetadataStub.injectWorkerMetadata( WorkerMetadataResponse.newBuilder() .setMetadataVersion(1) .addWorkEndpoints(metadataResponseEndpoint("workerToken")) .putAllGlobalDataEndpoints(DEFAULT) .build()); waitForWorkerMetadataToBeConsumed(getWorkBudgetDistributor); verify(getWorkBudgetDistributor, atLeast(2)).distributeBudget(any(), any()); } @Test public void testOnNewWorkerMetadata_correctlyRemovesStaleWindmillServers() throws InterruptedException { int metadataCount = 2; TestGetWorkBudgetDistributor getWorkBudgetDistributor = spy(new TestGetWorkBudgetDistributor(metadataCount)); streamingEngineClient = newStreamingEngineClient( GetWorkBudget.builder().setItems(1).setBytes(1).build(), getWorkBudgetDistributor, noOpProcessWorkItemFn()); String workerToken = "workerToken1"; String workerToken2 = "workerToken2"; String workerToken3 = "workerToken3"; WorkerMetadataResponse firstWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(1) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken) .build()) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken2) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); WorkerMetadataResponse secondWorkerMetadata = WorkerMetadataResponse.newBuilder() .setMetadataVersion(2) .addWorkEndpoints( WorkerMetadataResponse.Endpoint.newBuilder() .setBackendWorkerToken(workerToken3) .build()) .putAllGlobalDataEndpoints(DEFAULT) .build(); getWorkerMetadataReady.await(); fakeGetWorkerMetadataStub.injectWorkerMetadata(firstWorkerMetadata); fakeGetWorkerMetadataStub.injectWorkerMetadata(secondWorkerMetadata); waitForWorkerMetadataToBeConsumed(getWorkBudgetDistributor); StreamingEngineConnectionState currentConnections = connections.get(); assertEquals(1, currentConnections.windmillConnections().size()); assertEquals(1, currentConnections.windmillStreams().size()); Set<String> workerTokens = connections.get().windmillConnections().values().stream() .map(WindmillConnection::backendWorkerToken) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toSet()); assertFalse(workerTokens.contains(workerToken)); assertFalse(workerTokens.contains(workerToken2)); } @Test private void waitForWorkerMetadataToBeConsumed( TestGetWorkBudgetDistributor getWorkBudgetDistributor) throws InterruptedException { getWorkBudgetDistributor.waitForBudgetDistribution(); } private static class GetWorkerMetadataTestStub extends CloudWindmillMetadataServiceV1Alpha1Grpc .CloudWindmillMetadataServiceV1Alpha1ImplBase { private static final WorkerMetadataResponse CLOSE_ALL_STREAMS = WorkerMetadataResponse.newBuilder().setMetadataVersion(Long.MAX_VALUE).build(); private final CountDownLatch ready; private @Nullable StreamObserver<WorkerMetadataResponse> responseObserver; private GetWorkerMetadataTestStub(CountDownLatch ready) { this.ready = ready; } @Override public StreamObserver<WorkerMetadataRequest> getWorkerMetadata( StreamObserver<WorkerMetadataResponse> responseObserver) { if (this.responseObserver == null) { ready.countDown(); this.responseObserver = responseObserver; } return new StreamObserver<WorkerMetadataRequest>() { @Override public void onNext(WorkerMetadataRequest workerMetadataRequest) {} @Override public void onError(Throwable throwable) { if (responseObserver != null) { responseObserver.onError(throwable); } } @Override public void onCompleted() {} }; } private void injectWorkerMetadata(WorkerMetadataResponse response) { if (responseObserver != null) { responseObserver.onNext(response); } } private void close() { if (responseObserver != null) { responseObserver.onNext(CLOSE_ALL_STREAMS); } } } private static class TestGetWorkBudgetDistributor implements GetWorkBudgetDistributor { private final CountDownLatch getWorkBudgetDistributorTriggered; private TestGetWorkBudgetDistributor(int numBudgetDistributionsExpected) { this.getWorkBudgetDistributorTriggered = new CountDownLatch(numBudgetDistributionsExpected); } @SuppressWarnings("ReturnValueIgnored") private void waitForBudgetDistribution() throws InterruptedException { getWorkBudgetDistributorTriggered.await(5, TimeUnit.SECONDS); } @Override public void distributeBudget( ImmutableCollection<WindmillStreamSender> streams, GetWorkBudget getWorkBudget) { streams.forEach(stream -> stream.adjustBudget(getWorkBudget.items(), getWorkBudget.bytes())); getWorkBudgetDistributorTriggered.countDown(); } } }
Here exists following case we don't support: 1. the target table is temporary table 2. Specify the columns explicitly 3. Specify the watermark explicitly 4. Specify the constraint explicitly Regarding to partition key, we think we should support it, see [spark](https://spark.apache.org/docs/latest/sql-ref-syntax-ddl-create-table-datasource.html) also support it.
public void validate() throws SqlValidateException { List<SqlTableConstraint> constraints = getFullConstraints().stream() .filter(SqlTableConstraint::isPrimaryKey) .collect(Collectors.toList()); if (constraints.size() > 1) { throw new SqlValidateException( constraints.get(1).getParserPosition(), "Duplicate primary key definition"); } else if (constraints.size() == 1) { Set<String> primaryKeyColumns = Arrays.stream(constraints.get(0).getColumnNames()).collect(Collectors.toSet()); for (SqlNode column : columnList) { SqlTableColumn tableColumn = (SqlTableColumn) column; if (tableColumn instanceof SqlRegularColumn && primaryKeyColumns.contains(tableColumn.getName().getSimple())) { SqlRegularColumn regularColumn = (SqlRegularColumn) column; SqlDataTypeSpec notNullType = regularColumn.getType().withNullable(false); regularColumn.setType(notNullType); } } } if (tableLike != null) { tableLike.validate(); } if (query != null && (columnList.size() > 0 || constraints.size() > 0 || partitionKeyList.size() > 0)) { throw new SqlValidateException( pos, "CREATE TABLE AS SELECT syntax does not yet support to specific Column/Partition/Constraints."); } }
|| constraints.size() > 0
public void validate() throws SqlValidateException { List<SqlTableConstraint> constraints = getFullConstraints().stream() .filter(SqlTableConstraint::isPrimaryKey) .collect(Collectors.toList()); if (constraints.size() > 1) { throw new SqlValidateException( constraints.get(1).getParserPosition(), "Duplicate primary key definition"); } else if (constraints.size() == 1) { Set<String> primaryKeyColumns = Arrays.stream(constraints.get(0).getColumnNames()).collect(Collectors.toSet()); for (SqlNode column : columnList) { SqlTableColumn tableColumn = (SqlTableColumn) column; if (tableColumn instanceof SqlRegularColumn && primaryKeyColumns.contains(tableColumn.getName().getSimple())) { SqlRegularColumn regularColumn = (SqlRegularColumn) column; SqlDataTypeSpec notNullType = regularColumn.getType().withNullable(false); regularColumn.setType(notNullType); } } } }
class SqlCreateTable extends SqlCreate implements ExtendedSqlNode { public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("CREATE TABLE", SqlKind.CREATE_TABLE); private final SqlIdentifier tableName; private final SqlNodeList columnList; private final SqlNodeList propertyList; private final List<SqlTableConstraint> tableConstraints; private final SqlNodeList partitionKeyList; private final SqlWatermark watermark; private final SqlCharStringLiteral comment; private final SqlTableLike tableLike; private final boolean isTemporary; private final SqlNode query; public SqlCreateTable( SqlParserPos pos, SqlIdentifier tableName, SqlNodeList columnList, List<SqlTableConstraint> tableConstraints, SqlNodeList propertyList, SqlNodeList partitionKeyList, @Nullable SqlWatermark watermark, @Nullable SqlCharStringLiteral comment, @Nullable SqlTableLike tableLike, boolean isTemporary, boolean ifNotExists, @Nullable SqlNode query) { super(OPERATOR, pos, false, ifNotExists); this.tableName = requireNonNull(tableName, "tableName should not be null"); this.columnList = requireNonNull(columnList, "columnList should not be null"); this.tableConstraints = requireNonNull(tableConstraints, "table constraints should not be null"); this.propertyList = requireNonNull(propertyList, "propertyList should not be null"); this.partitionKeyList = requireNonNull(partitionKeyList, "partitionKeyList should not be null"); this.watermark = watermark; this.comment = comment; this.tableLike = tableLike; this.isTemporary = isTemporary; this.query = query; } @Override public @Nonnull SqlOperator getOperator() { return OPERATOR; } @Override public @Nonnull List<SqlNode> getOperandList() { return ImmutableNullableList.of( tableName, columnList, new SqlNodeList(tableConstraints, SqlParserPos.ZERO), propertyList, partitionKeyList, watermark, comment, tableLike, query); } public SqlIdentifier getTableName() { return tableName; } public SqlNodeList getColumnList() { return columnList; } public SqlNodeList getPropertyList() { return propertyList; } public SqlNodeList getPartitionKeyList() { return partitionKeyList; } public List<SqlTableConstraint> getTableConstraints() { return tableConstraints; } public Optional<SqlWatermark> getWatermark() { return Optional.ofNullable(watermark); } public Optional<SqlCharStringLiteral> getComment() { return Optional.ofNullable(comment); } public Optional<SqlTableLike> getTableLike() { return Optional.ofNullable(tableLike); } public boolean isIfNotExists() { return ifNotExists; } public boolean isTemporary() { return isTemporary; } public Optional<SqlNode> getQuery() { return Optional.ofNullable(query); } @Override public boolean hasRegularColumnsOnly() { for (SqlNode column : columnList) { final SqlTableColumn tableColumn = (SqlTableColumn) column; if (!(tableColumn instanceof SqlRegularColumn)) { return false; } } return true; } /** Returns the column constraints plus the table constraints. */ public List<SqlTableConstraint> getFullConstraints() { List<SqlTableConstraint> ret = new ArrayList<>(); this.columnList.forEach( column -> { SqlTableColumn tableColumn = (SqlTableColumn) column; if (tableColumn instanceof SqlRegularColumn) { SqlRegularColumn regularColumn = (SqlRegularColumn) tableColumn; regularColumn.getConstraint().map(ret::add); } }); ret.addAll(this.tableConstraints); return ret; } /** * Returns the projection format of the DDL columns(including computed columns). i.e. the * following DDL: * * <pre> * create table tbl1( * col1 int, * col2 varchar, * col3 as to_timestamp(col2) * ) with ( * 'connector' = 'csv' * ) * </pre> * * <p>is equivalent with query "col1, col2, to_timestamp(col2) as col3", caution that the * "computed column" operands have been reversed. */ public String getColumnSqlString() { SqlPrettyWriter writer = new SqlPrettyWriter( SqlPrettyWriter.config() .withDialect(AnsiSqlDialect.DEFAULT) .withAlwaysUseParentheses(true) .withSelectListItemsOnSeparateLines(false) .withIndentation(0)); writer.startList("", ""); for (SqlNode column : columnList) { writer.sep(","); SqlTableColumn tableColumn = (SqlTableColumn) column; if (tableColumn instanceof SqlComputedColumn) { SqlComputedColumn computedColumn = (SqlComputedColumn) tableColumn; computedColumn.getExpr().unparse(writer, 0, 0); writer.keyword("AS"); } tableColumn.getName().unparse(writer, 0, 0); } return writer.toString(); } @Override public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { writer.keyword("CREATE"); if (isTemporary()) { writer.keyword("TEMPORARY"); } writer.keyword("TABLE"); if (isIfNotExists()) { writer.keyword("IF NOT EXISTS"); } tableName.unparse(writer, leftPrec, rightPrec); if (columnList.size() > 0 || tableConstraints.size() > 0 || watermark != null) { SqlUnparseUtils.unparseTableSchema( writer, leftPrec, rightPrec, columnList, tableConstraints, watermark); } if (comment != null) { writer.newlineAndIndent(); writer.keyword("COMMENT"); comment.unparse(writer, leftPrec, rightPrec); } if (this.partitionKeyList.size() > 0) { writer.newlineAndIndent(); writer.keyword("PARTITIONED BY"); SqlWriter.Frame partitionedByFrame = writer.startList("(", ")"); this.partitionKeyList.unparse(writer, leftPrec, rightPrec); writer.endList(partitionedByFrame); writer.newlineAndIndent(); } if (this.propertyList.size() > 0) { writer.keyword("WITH"); SqlWriter.Frame withFrame = writer.startList("(", ")"); for (SqlNode property : propertyList) { SqlUnparseUtils.printIndent(writer); property.unparse(writer, leftPrec, rightPrec); } writer.newlineAndIndent(); writer.endList(withFrame); } if (this.tableLike != null) { writer.newlineAndIndent(); this.tableLike.unparse(writer, leftPrec, rightPrec); } if (this.query != null) { writer.newlineAndIndent(); writer.keyword("AS"); writer.newlineAndIndent(); this.query.unparse(writer, leftPrec, rightPrec); } } /** Table creation context. */ public static class TableCreationContext { public List<SqlNode> columnList = new ArrayList<>(); public List<SqlTableConstraint> constraints = new ArrayList<>(); @Nullable public SqlWatermark watermark; } public String[] fullTableName() { return tableName.names.toArray(new String[0]); } }
class SqlCreateTable extends SqlCreate implements ExtendedSqlNode { public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("CREATE TABLE", SqlKind.CREATE_TABLE); private final SqlIdentifier tableName; private final SqlNodeList columnList; private final SqlNodeList propertyList; private final List<SqlTableConstraint> tableConstraints; private final SqlNodeList partitionKeyList; private final SqlWatermark watermark; private final SqlCharStringLiteral comment; private final boolean isTemporary; public SqlCreateTable( SqlParserPos pos, SqlIdentifier tableName, SqlNodeList columnList, List<SqlTableConstraint> tableConstraints, SqlNodeList propertyList, SqlNodeList partitionKeyList, @Nullable SqlWatermark watermark, @Nullable SqlCharStringLiteral comment, boolean isTemporary, boolean ifNotExists) { this( OPERATOR, pos, tableName, columnList, tableConstraints, propertyList, partitionKeyList, watermark, comment, isTemporary, ifNotExists); } protected SqlCreateTable( SqlSpecialOperator operator, SqlParserPos pos, SqlIdentifier tableName, SqlNodeList columnList, List<SqlTableConstraint> tableConstraints, SqlNodeList propertyList, SqlNodeList partitionKeyList, @Nullable SqlWatermark watermark, @Nullable SqlCharStringLiteral comment, boolean isTemporary, boolean ifNotExists) { super(operator, pos, false, ifNotExists); this.tableName = requireNonNull(tableName, "tableName should not be null"); this.columnList = requireNonNull(columnList, "columnList should not be null"); this.tableConstraints = requireNonNull(tableConstraints, "table constraints should not be null"); this.propertyList = requireNonNull(propertyList, "propertyList should not be null"); this.partitionKeyList = requireNonNull(partitionKeyList, "partitionKeyList should not be null"); this.watermark = watermark; this.comment = comment; this.isTemporary = isTemporary; } @Override public @Nonnull SqlOperator getOperator() { return OPERATOR; } @Override public @Nonnull List<SqlNode> getOperandList() { return ImmutableNullableList.of( tableName, columnList, new SqlNodeList(tableConstraints, SqlParserPos.ZERO), propertyList, partitionKeyList, watermark, comment); } public SqlIdentifier getTableName() { return tableName; } public SqlNodeList getColumnList() { return columnList; } public SqlNodeList getPropertyList() { return propertyList; } public SqlNodeList getPartitionKeyList() { return partitionKeyList; } public List<SqlTableConstraint> getTableConstraints() { return tableConstraints; } public Optional<SqlWatermark> getWatermark() { return Optional.ofNullable(watermark); } public Optional<SqlCharStringLiteral> getComment() { return Optional.ofNullable(comment); } public boolean isIfNotExists() { return ifNotExists; } public boolean isTemporary() { return isTemporary; } @Override public boolean hasRegularColumnsOnly() { for (SqlNode column : columnList) { final SqlTableColumn tableColumn = (SqlTableColumn) column; if (!(tableColumn instanceof SqlRegularColumn)) { return false; } } return true; } /** Returns the column constraints plus the table constraints. */ public List<SqlTableConstraint> getFullConstraints() { List<SqlTableConstraint> ret = new ArrayList<>(); this.columnList.forEach( column -> { SqlTableColumn tableColumn = (SqlTableColumn) column; if (tableColumn instanceof SqlRegularColumn) { SqlRegularColumn regularColumn = (SqlRegularColumn) tableColumn; regularColumn.getConstraint().map(ret::add); } }); ret.addAll(this.tableConstraints); return ret; } /** * Returns the projection format of the DDL columns(including computed columns). i.e. the * following DDL: * * <pre> * create table tbl1( * col1 int, * col2 varchar, * col3 as to_timestamp(col2) * ) with ( * 'connector' = 'csv' * ) * </pre> * * <p>is equivalent with query "col1, col2, to_timestamp(col2) as col3", caution that the * "computed column" operands have been reversed. */ public String getColumnSqlString() { SqlPrettyWriter writer = new SqlPrettyWriter( SqlPrettyWriter.config() .withDialect(AnsiSqlDialect.DEFAULT) .withAlwaysUseParentheses(true) .withSelectListItemsOnSeparateLines(false) .withIndentation(0)); writer.startList("", ""); for (SqlNode column : columnList) { writer.sep(","); SqlTableColumn tableColumn = (SqlTableColumn) column; if (tableColumn instanceof SqlComputedColumn) { SqlComputedColumn computedColumn = (SqlComputedColumn) tableColumn; computedColumn.getExpr().unparse(writer, 0, 0); writer.keyword("AS"); } tableColumn.getName().unparse(writer, 0, 0); } return writer.toString(); } @Override public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { writer.keyword("CREATE"); if (isTemporary()) { writer.keyword("TEMPORARY"); } writer.keyword("TABLE"); if (isIfNotExists()) { writer.keyword("IF NOT EXISTS"); } tableName.unparse(writer, leftPrec, rightPrec); if (columnList.size() > 0 || tableConstraints.size() > 0 || watermark != null) { SqlUnparseUtils.unparseTableSchema( writer, leftPrec, rightPrec, columnList, tableConstraints, watermark); } if (comment != null) { writer.newlineAndIndent(); writer.keyword("COMMENT"); comment.unparse(writer, leftPrec, rightPrec); } if (this.partitionKeyList.size() > 0) { writer.newlineAndIndent(); writer.keyword("PARTITIONED BY"); SqlWriter.Frame partitionedByFrame = writer.startList("(", ")"); this.partitionKeyList.unparse(writer, leftPrec, rightPrec); writer.endList(partitionedByFrame); writer.newlineAndIndent(); } if (this.propertyList.size() > 0) { writer.keyword("WITH"); SqlWriter.Frame withFrame = writer.startList("(", ")"); for (SqlNode property : propertyList) { SqlUnparseUtils.printIndent(writer); property.unparse(writer, leftPrec, rightPrec); } writer.newlineAndIndent(); writer.endList(withFrame); } } /** Table creation context. */ public static class TableCreationContext { public List<SqlNode> columnList = new ArrayList<>(); public List<SqlTableConstraint> constraints = new ArrayList<>(); @Nullable public SqlWatermark watermark; } public String[] fullTableName() { return tableName.names.toArray(new String[0]); } }
Hey @michalvavrik I've missed all your recent comments, well, I was aware this PR was still open :-), but could not find the proper time to review. So let me ask: do you agree this particular issue, how to manage HTTP Security Policy alongside annotation based authentication requirements like `@Authenticated`, `@CodeFlow`, etc, is an additional concern, and we can handle it in a follow up PR ? IMHO it would simplify the current PR significantly, and will let us focus better on this important problem better later
static void selectAuthMechanism(RoutingContext routingContext, String authMechanism) { if (requestAlreadyAuthenticated(routingContext)) { throw new AuthenticationFailedException("Request has been authenticated before the '" + authMechanism + "' authentication mechanism was selected with an annotation. Most often, this will happen when " + "you configure HTTPSecurityPolicy that requires authentication (like 'roles-allowed' policy). " + "Please revise your 'quarkus.http.auth.permission.*' configuration properties"); } routingContext.put(AUTH_MECHANISM, authMechanism); }
throw new AuthenticationFailedException("Request has been authenticated before the '" + authMechanism
static void selectAuthMechanism(RoutingContext routingContext, String authMechanism) { if (requestAlreadyAuthenticated(routingContext, authMechanism)) { throw new AuthenticationFailedException(""" The '%1$s' authentication mechanism is required to authenticate the request but it was already authenticated with the '%2$s' authentication mechanism. It can happen if the '%1$s' is selected with an annotation but '%2$s' is activated by the HTTP security policy which is enforced before the JAX-RS chain is run. In such cases, please set the 'quarkus.http.auth.permission."permissions".applies-to=JAXRS' to all HTTP security policies which secure the same REST endpoints as the ones secured by the '%1$s' authentication mechanism selected with the annotation. """.formatted(authMechanism, routingContext.get(AUTH_MECHANISM))); } routingContext.put(AUTH_MECHANISM, authMechanism); }
class NoAuthenticationMechanism implements HttpAuthenticationMechanism { @Override public Uni<SecurityIdentity> authenticate(RoutingContext context, IdentityProviderManager identityProviderManager) { return Uni.createFrom().optional(Optional.empty()); } @Override public Uni<ChallengeData> getChallenge(RoutingContext context) { ChallengeData challengeData = new ChallengeData(HttpResponseStatus.FORBIDDEN.code(), null, null); return Uni.createFrom().item(challengeData); } @Override public Set<Class<? extends AuthenticationRequest>> getCredentialTypes() { return Collections.singleton(AnonymousAuthenticationRequest.class); } @Override public HttpCredentialTransport getCredentialTransport() { return null; } }
class HttpAuthenticator { private static final Logger log = Logger.getLogger(HttpAuthenticator.class); /** * Added to a {@link RoutingContext} as selected authentication mechanism. */ private static final String AUTH_MECHANISM = HttpAuthenticator.class.getName() + " /** * Added to a {@link RoutingContext} when {@link this */ private static final String ATTEMPT_AUTH_INVOKED = HttpAuthenticator.class.getName() + " private static boolean selectAuthMechanismWithAnnotation = false; private final IdentityProviderManager identityProviderManager; private final HttpAuthenticationMechanism[] mechanisms; private final SecurityEventHelper<AuthenticationSuccessEvent, AuthenticationFailureEvent> securityEventHelper; public HttpAuthenticator(IdentityProviderManager identityProviderManager, Event<AuthenticationFailureEvent> authFailureEvent, Event<AuthenticationSuccessEvent> authSuccessEvent, BeanManager beanManager, HttpBuildTimeConfig httpBuildTimeConfig, Instance<HttpAuthenticationMechanism> httpAuthenticationMechanism, Instance<IdentityProvider<?>> providers, @ConfigProperty(name = "quarkus.security.events.enabled") boolean securityEventsEnabled) { this.securityEventHelper = new SecurityEventHelper<>(authSuccessEvent, authFailureEvent, AUTHENTICATION_SUCCESS, AUTHENTICATION_FAILURE, beanManager, securityEventsEnabled); this.identityProviderManager = identityProviderManager; List<HttpAuthenticationMechanism> mechanisms = new ArrayList<>(); for (HttpAuthenticationMechanism mechanism : httpAuthenticationMechanism) { if (mechanism.getCredentialTypes().isEmpty()) { log.debugf("HttpAuthenticationMechanism '%s' provided no required credential types, therefore it needs " + "to be able to perform authentication without any IdentityProvider", mechanism.getClass().getName()); mechanisms.add(mechanism); continue; } boolean found = false; for (Class<? extends AuthenticationRequest> mechType : mechanism.getCredentialTypes()) { for (IdentityProvider<?> i : providers) { if (i.getRequestType().equals(mechType)) { found = true; break; } } if (found) { break; } } if (found) { mechanisms.add(mechanism); } else if (BasicAuthenticationMechanism.class.equals(mechanism.getClass()) && httpBuildTimeConfig.auth.basic.isEmpty()) { log.debug(""" BasicAuthenticationMechanism has been enabled because no other authentication mechanism has been detected, but there is no IdentityProvider based on username and password. Please use one of supported extensions if you plan to use the mechanism. For more information go to the https: """); } else { throw new RuntimeException(""" HttpAuthenticationMechanism '%s' requires one or more IdentityProviders supporting at least one of the following credentials types: %s. Please refer to the https: """.formatted(mechanism.getClass().getName(), mechanism.getCredentialTypes())); } } if (mechanisms.isEmpty()) { this.mechanisms = new HttpAuthenticationMechanism[] { new NoAuthenticationMechanism() }; } else { mechanisms.sort(new Comparator<HttpAuthenticationMechanism>() { @Override public int compare(HttpAuthenticationMechanism mech1, HttpAuthenticationMechanism mech2) { return Integer.compare(mech2.getPriority(), mech1.getPriority()); } }); this.mechanisms = mechanisms.toArray(new HttpAuthenticationMechanism[mechanisms.size()]); } } public IdentityProviderManager getIdentityProviderManager() { return identityProviderManager; } /** * Attempts authentication with the contents of the request. If this is possible the Uni * will resolve to a valid SecurityIdentity when it is subscribed to. Note that Uni is lazy, * so this may not happen until the Uni is subscribed to. * <p> * If invalid credentials are present then the completion stage will resolve to a * {@link io.quarkus.security.AuthenticationFailedException} * <p> * If no credentials are present it will resolve to null. */ public Uni<SecurityIdentity> attemptAuthentication(RoutingContext routingContext) { if (selectAuthMechanismWithAnnotation) { rememberAuthAttempted(routingContext); } final String pathSpecificMechanism; if (selectAuthMechanismWithAnnotation && isAuthMechanismSelected(routingContext)) { pathSpecificMechanism = routingContext.get(AUTH_MECHANISM); } else { AbstractPathMatchingHttpSecurityPolicy pathMatchingPolicy = routingContext .get(AbstractPathMatchingHttpSecurityPolicy.class.getName()); pathSpecificMechanism = pathMatchingPolicy != null ? pathMatchingPolicy.getAuthMechanismName(routingContext) : null; } Uni<SecurityIdentity> result; if (pathSpecificMechanism == null) { result = createSecurityIdentity(routingContext, 0); } else { result = findBestCandidateMechanism(routingContext, pathSpecificMechanism, 0).onItem().ifNotNull() .transformToUni(new Function<HttpAuthenticationMechanism, Uni<? extends SecurityIdentity>>() { @Override public Uni<SecurityIdentity> apply(HttpAuthenticationMechanism mech) { return mech.authenticate(routingContext, identityProviderManager); } }); } if (securityEventHelper.fireEventOnFailure()) { result = result.onFailure().invoke(new Consumer<Throwable>() { @Override public void accept(Throwable throwable) { securityEventHelper.fireFailureEvent(new AuthenticationFailureEvent(throwable, Map.of(RoutingContext.class.getName(), routingContext))); } }); } if (securityEventHelper.fireEventOnSuccess()) { result = result.onItem().ifNotNull().invoke(new Consumer<SecurityIdentity>() { @Override public void accept(SecurityIdentity securityIdentity) { securityEventHelper.fireSuccessEvent(new AuthenticationSuccessEvent(securityIdentity, Map.of(RoutingContext.class.getName(), routingContext))); } }); } return result; } private Uni<SecurityIdentity> createSecurityIdentity(RoutingContext routingContext, int i) { if (i == mechanisms.length) { return Uni.createFrom().nullItem(); } return mechanisms[i].authenticate(routingContext, identityProviderManager) .onItem().transformToUni(new Function<SecurityIdentity, Uni<? extends SecurityIdentity>>() { @Override public Uni<SecurityIdentity> apply(SecurityIdentity identity) { if (identity != null) { if (selectAuthMechanismWithAnnotation && !isAuthMechanismSelected(routingContext)) { return rememberAuthMechScheme(mechanisms[i], routingContext).replaceWith(identity); } return Uni.createFrom().item(identity); } return createSecurityIdentity(routingContext, i + 1); } }); } /** * @return */ public Uni<Boolean> sendChallenge(RoutingContext routingContext) { if (!routingContext.request().isEnded()) { routingContext.request().resume(); } Uni<Boolean> result = null; if (mechanisms.length > 1) { HttpAuthenticationMechanism matchingMech = routingContext.get(HttpAuthenticationMechanism.class.getName()); if (matchingMech != null) { result = matchingMech.sendChallenge(routingContext); } } if (result == null) { result = mechanisms[0].sendChallenge(routingContext); for (int i = 1; i < mechanisms.length; ++i) { HttpAuthenticationMechanism mech = mechanisms[i]; result = result.onItem().transformToUni(new Function<Boolean, Uni<? extends Boolean>>() { @Override public Uni<? extends Boolean> apply(Boolean authDone) { if (authDone) { return Uni.createFrom().item(authDone); } return mech.sendChallenge(routingContext); } }); } } return result.onItem().transformToUni(new Function<Boolean, Uni<? extends Boolean>>() { @Override public Uni<? extends Boolean> apply(Boolean authDone) { if (!authDone) { log.debug("Authentication has not been done, returning HTTP status 401"); routingContext.response().setStatusCode(401); routingContext.response().end(); } return Uni.createFrom().item(authDone); } }); } public Uni<ChallengeData> getChallenge(RoutingContext routingContext) { if (mechanisms.length > 1) { HttpAuthenticationMechanism matchingMech = routingContext.get(HttpAuthenticationMechanism.class.getName()); if (matchingMech != null) { return matchingMech.getChallenge(routingContext); } } Uni<ChallengeData> result = mechanisms[0].getChallenge(routingContext); for (int i = 1; i < mechanisms.length; ++i) { HttpAuthenticationMechanism mech = mechanisms[i]; result = result.onItem().transformToUni(new Function<ChallengeData, Uni<? extends ChallengeData>>() { @Override public Uni<? extends ChallengeData> apply(ChallengeData data) { if (data != null) { return Uni.createFrom().item(data); } return mech.getChallenge(routingContext); } }); } return result; } private Uni<HttpAuthenticationMechanism> findBestCandidateMechanism(RoutingContext routingContext, String pathSpecificMechanism, int i) { if (i == mechanisms.length) { return Uni.createFrom().nullItem(); } return getPathSpecificMechanism(i, routingContext, pathSpecificMechanism).onItem().transformToUni( new Function<HttpAuthenticationMechanism, Uni<? extends HttpAuthenticationMechanism>>() { @Override public Uni<? extends HttpAuthenticationMechanism> apply(HttpAuthenticationMechanism mech) { if (mech != null) { if (selectAuthMechanismWithAnnotation && !isAuthMechanismSelected(routingContext)) { return rememberAuthMechScheme(mech, routingContext).replaceWith(mech); } return Uni.createFrom().item(mech); } return findBestCandidateMechanism(routingContext, pathSpecificMechanism, i + 1); } }); } private Uni<HttpAuthenticationMechanism> getPathSpecificMechanism(int index, RoutingContext routingContext, String pathSpecificMechanism) { return getCredentialTransport(mechanisms[index], routingContext).onItem() .transform(new Function<HttpCredentialTransport, HttpAuthenticationMechanism>() { @Override public HttpAuthenticationMechanism apply(HttpCredentialTransport t) { if (t != null && t.getAuthenticationScheme().equalsIgnoreCase(pathSpecificMechanism)) { routingContext.put(HttpAuthenticationMechanism.class.getName(), mechanisms[index]); routingContext.put(AUTH_MECHANISM, t.getAuthenticationScheme()); return mechanisms[index]; } return null; } }); } static void selectAuthMechanismWithAnnotation() { selectAuthMechanismWithAnnotation = true; } private static Uni<HttpCredentialTransport> getCredentialTransport(HttpAuthenticationMechanism mechanism, RoutingContext routingContext) { try { return mechanism.getCredentialTransport(routingContext); } catch (UnsupportedOperationException ex) { return Uni.createFrom().item(mechanism.getCredentialTransport()); } } private static void rememberAuthAttempted(RoutingContext routingContext) { routingContext.put(ATTEMPT_AUTH_INVOKED, TRUE); } private static boolean isAuthMechanismSelected(RoutingContext routingContext) { return routingContext.get(AUTH_MECHANISM) != null; } private static boolean requestAlreadyAuthenticated(RoutingContext event, String newAuthMechanism) { return event.get(ATTEMPT_AUTH_INVOKED) == TRUE && authenticatedWithDifferentAuthMechanism(newAuthMechanism, event); } private static boolean authenticatedWithDifferentAuthMechanism(String newAuthMechanism, RoutingContext event) { return !newAuthMechanism.equalsIgnoreCase(event.get(AUTH_MECHANISM)); } /** * Remember authentication mechanism used for authentication so that we know what mechanism has been used * in case that someone tries to change the mechanism after the authentication. This way, we can be permissive * when the selected mechanism is same as the one already used. */ private static Uni<HttpCredentialTransport> rememberAuthMechScheme(HttpAuthenticationMechanism mech, RoutingContext event) { return getCredentialTransport(mech, event) .onItem().ifNotNull().invoke(new Consumer<HttpCredentialTransport>() { @Override public void accept(HttpCredentialTransport t) { if (t.getAuthenticationScheme() != null) { event.put(AUTH_MECHANISM, t.getAuthenticationScheme()); } } }); } static class NoAuthenticationMechanism implements HttpAuthenticationMechanism { @Override public Uni<SecurityIdentity> authenticate(RoutingContext context, IdentityProviderManager identityProviderManager) { return Uni.createFrom().optional(Optional.empty()); } @Override public Uni<ChallengeData> getChallenge(RoutingContext context) { ChallengeData challengeData = new ChallengeData(HttpResponseStatus.FORBIDDEN.code(), null, null); return Uni.createFrom().item(challengeData); } @Override public Set<Class<? extends AuthenticationRequest>> getCredentialTypes() { return Collections.singleton(AnonymousAuthenticationRequest.class); } @Override public HttpCredentialTransport getCredentialTransport() { return null; } } }
well then don't pass incomplete data. metrics are serialized one at a time, so you can just remember during serialization the last metrics that fit into the buffer, and once you go over the limit go back to that. You can also use other interesting heuristics like dropping histograms first (since they are significantly larger).
public void onReceive(Object message) { try { if (message instanceof AddMetric) { AddMetric added = (AddMetric) message; String metricName = added.metricName; Metric metric = added.metric; AbstractMetricGroup group = added.group; QueryScopeInfo info = group.getQueryServiceMetricInfo(FILTER); if (metric instanceof Counter) { counters.put((Counter) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName))); } else if (metric instanceof Gauge) { gauges.put((Gauge<?>) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName))); } else if (metric instanceof Histogram) { histograms.put((Histogram) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName))); } else if (metric instanceof Meter) { meters.put((Meter) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName))); } } else if (message instanceof RemoveMetric) { Metric metric = (((RemoveMetric) message).metric); if (metric instanceof Counter) { this.counters.remove(metric); } else if (metric instanceof Gauge) { this.gauges.remove(metric); } else if (metric instanceof Histogram) { this.histograms.remove(metric); } else if (metric instanceof Meter) { this.meters.remove(metric); } } else if (message instanceof CreateDump) { MetricDumpSerialization.MetricSerializationResult dump = serializer.serialize(counters, gauges, histograms, meters); int realMsgSize = dump.serializedMetrics.length; if (realMsgSize > maximumFramesize) { String overSizeErrorMsg = "The metric dump message size : " + realMsgSize + " exceeds the maximum akka framesize : " + maximumFramesize + "."; LOG.error(overSizeErrorMsg); getSender().tell(new Status.Failure(new IOException(overSizeErrorMsg)), getSelf()); } else { getSender().tell(dump, getSelf()); } } else { LOG.warn("MetricQueryServiceActor received an invalid message. " + message.toString()); getSender().tell(new Status.Failure(new IOException("MetricQueryServiceActor received an invalid message. " + message.toString())), getSelf()); } } catch (Exception e) { LOG.warn("An exception occurred while processing a message.", e); } }
getSender().tell(new Status.Failure(new IOException(overSizeErrorMsg)), getSelf());
public void onReceive(Object message) { try { if (message instanceof AddMetric) { AddMetric added = (AddMetric) message; String metricName = added.metricName; Metric metric = added.metric; AbstractMetricGroup group = added.group; QueryScopeInfo info = group.getQueryServiceMetricInfo(FILTER); if (metric instanceof Counter) { counters.put((Counter) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName))); } else if (metric instanceof Gauge) { gauges.put((Gauge<?>) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName))); } else if (metric instanceof Histogram) { histograms.put((Histogram) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName))); } else if (metric instanceof Meter) { meters.put((Meter) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName))); } } else if (message instanceof RemoveMetric) { Metric metric = (((RemoveMetric) message).metric); if (metric instanceof Counter) { this.counters.remove(metric); } else if (metric instanceof Gauge) { this.gauges.remove(metric); } else if (metric instanceof Histogram) { this.histograms.remove(metric); } else if (metric instanceof Meter) { this.meters.remove(metric); } } else if (message instanceof CreateDump) { MetricDumpSerialization.MetricSerializationResult dump = serializer.serialize(counters, gauges, histograms, meters); dump = enforceSizeLimit(dump); getSender().tell(dump, getSelf()); } else { LOG.warn("MetricQueryServiceActor received an invalid message. " + message.toString()); getSender().tell(new Status.Failure(new IOException("MetricQueryServiceActor received an invalid message. " + message.toString())), getSelf()); } } catch (Exception e) { LOG.warn("An exception occurred while processing a message.", e); } }
class MetricQueryService extends UntypedActor { private static final Logger LOG = LoggerFactory.getLogger(MetricQueryService.class); public static final String METRIC_QUERY_SERVICE_NAME = "MetricQueryService"; public static final String MAXIMUM_FRAME_SIZE_PATH = "akka.remote.netty.tcp.maximum-frame-size"; private static final CharacterFilter FILTER = new CharacterFilter() { @Override public String filterCharacters(String input) { return replaceInvalidChars(input); } }; private final MetricDumpSerializer serializer = new MetricDumpSerializer(); private final Map<Gauge<?>, Tuple2<QueryScopeInfo, String>> gauges = new HashMap<>(); private final Map<Counter, Tuple2<QueryScopeInfo, String>> counters = new HashMap<>(); private final Map<Histogram, Tuple2<QueryScopeInfo, String>> histograms = new HashMap<>(); private final Map<Meter, Tuple2<QueryScopeInfo, String>> meters = new HashMap<>(); private long maximumFramesize; @Override public void preStart() throws Exception { if (getContext().system().settings().config().hasPath(MAXIMUM_FRAME_SIZE_PATH)) { maximumFramesize = getContext().system().settings().config().getBytes(MAXIMUM_FRAME_SIZE_PATH); } else { maximumFramesize = Long.MAX_VALUE; } } @Override public void postStop() { serializer.close(); } @Override /** * Lightweight method to replace unsupported characters. * If the string does not contain any unsupported characters, this method creates no * new string (and in fact no new objects at all). * * <p>Replacements: * * <ul> * <li>{@code space : . ,} are replaced by {@code _} (underscore)</li> * </ul> */ static String replaceInvalidChars(String str) { char[] chars = null; final int strLen = str.length(); int pos = 0; for (int i = 0; i < strLen; i++) { final char c = str.charAt(i); switch (c) { case ' ': case '.': case ':': case ',': if (chars == null) { chars = str.toCharArray(); } chars[pos++] = '_'; break; default: if (chars != null) { chars[pos] = c; } pos++; } } return chars == null ? str : new String(chars, 0, pos); } /** * Starts the MetricQueryService actor in the given actor system. * * @param actorSystem The actor system running the MetricQueryService * @param resourceID resource ID to disambiguate the actor name * @return actor reference to the MetricQueryService */ public static ActorRef startMetricQueryService(ActorSystem actorSystem, ResourceID resourceID) { String actorName = resourceID == null ? METRIC_QUERY_SERVICE_NAME : METRIC_QUERY_SERVICE_NAME + "_" + resourceID.getResourceIdString(); return actorSystem.actorOf(Props.create(MetricQueryService.class), actorName); } /** * Utility method to notify a MetricQueryService of an added metric. * * @param service MetricQueryService to notify * @param metric added metric * @param metricName metric name * @param group group the metric was added on */ public static void notifyOfAddedMetric(ActorRef service, Metric metric, String metricName, AbstractMetricGroup group) { service.tell(new AddMetric(metricName, metric, group), null); } /** * Utility method to notify a MetricQueryService of a removed metric. * * @param service MetricQueryService to notify * @param metric removed metric */ public static void notifyOfRemovedMetric(ActorRef service, Metric metric) { service.tell(new RemoveMetric(metric), null); } private static class AddMetric { private final String metricName; private final Metric metric; private final AbstractMetricGroup group; private AddMetric(String metricName, Metric metric, AbstractMetricGroup group) { this.metricName = metricName; this.metric = metric; this.group = group; } } private static class RemoveMetric { private final Metric metric; private RemoveMetric(Metric metric) { this.metric = metric; } } public static Object getCreateDump() { return CreateDump.INSTANCE; } private static class CreateDump implements Serializable { private static final CreateDump INSTANCE = new CreateDump(); } }
class MetricQueryService extends UntypedActor { private static final Logger LOG = LoggerFactory.getLogger(MetricQueryService.class); public static final String METRIC_QUERY_SERVICE_NAME = "MetricQueryService"; private static final String SIZE_EXCEEDED_LOG_TEMPLATE = "{} will not be reported as the metric dump would exceed the maximum size of {} bytes."; private static final CharacterFilter FILTER = new CharacterFilter() { @Override public String filterCharacters(String input) { return replaceInvalidChars(input); } }; private final MetricDumpSerializer serializer = new MetricDumpSerializer(); private final Map<Gauge<?>, Tuple2<QueryScopeInfo, String>> gauges = new HashMap<>(); private final Map<Counter, Tuple2<QueryScopeInfo, String>> counters = new HashMap<>(); private final Map<Histogram, Tuple2<QueryScopeInfo, String>> histograms = new HashMap<>(); private final Map<Meter, Tuple2<QueryScopeInfo, String>> meters = new HashMap<>(); private final long messageSizeLimit; public MetricQueryService(long messageSizeLimit) { this.messageSizeLimit = messageSizeLimit; } @Override public void postStop() { serializer.close(); } @Override private MetricDumpSerialization.MetricSerializationResult enforceSizeLimit( MetricDumpSerialization.MetricSerializationResult serializationResult) { int currentLength = 0; boolean hasExceededBefore = false; byte[] serializedCounters = serializationResult.serializedCounters; int numCounters = serializationResult.numCounters; if (exceedsMessageSizeLimit(currentLength + serializationResult.serializedCounters.length)) { logDumpSizeWouldExceedLimit("Counters", hasExceededBefore); hasExceededBefore = true; serializedCounters = new byte[0]; numCounters = 0; } else { currentLength += serializedCounters.length; } byte[] serializedMeters = serializationResult.serializedMeters; int numMeters = serializationResult.numMeters; if (exceedsMessageSizeLimit(currentLength + serializationResult.serializedMeters.length)) { logDumpSizeWouldExceedLimit("Meters", hasExceededBefore); hasExceededBefore = true; serializedMeters = new byte[0]; numMeters = 0; } else { currentLength += serializedMeters.length; } byte[] serializedGauges = serializationResult.serializedGauges; int numGauges = serializationResult.numGauges; if (exceedsMessageSizeLimit(currentLength + serializationResult.serializedGauges.length)) { logDumpSizeWouldExceedLimit("Gauges", hasExceededBefore); hasExceededBefore = true; serializedGauges = new byte[0]; numGauges = 0; } else { currentLength += serializedGauges.length; } byte[] serializedHistograms = serializationResult.serializedHistograms; int numHistograms = serializationResult.numHistograms; if (exceedsMessageSizeLimit(currentLength + serializationResult.serializedHistograms.length)) { logDumpSizeWouldExceedLimit("Histograms", hasExceededBefore); hasExceededBefore = true; serializedHistograms = new byte[0]; numHistograms = 0; } return new MetricDumpSerialization.MetricSerializationResult( serializedCounters, serializedGauges, serializedMeters, serializedHistograms, numCounters, numGauges, numMeters, numHistograms); } private boolean exceedsMessageSizeLimit(final int currentSize) { return currentSize > messageSizeLimit; } private void logDumpSizeWouldExceedLimit(final String metricType, boolean hasExceededBefore) { if (LOG.isDebugEnabled()) { LOG.debug(SIZE_EXCEEDED_LOG_TEMPLATE, metricType, messageSizeLimit); } else { if (!hasExceededBefore) { LOG.info(SIZE_EXCEEDED_LOG_TEMPLATE, "Some metrics", messageSizeLimit); } } } /** * Lightweight method to replace unsupported characters. * If the string does not contain any unsupported characters, this method creates no * new string (and in fact no new objects at all). * * <p>Replacements: * * <ul> * <li>{@code space : . ,} are replaced by {@code _} (underscore)</li> * </ul> */ static String replaceInvalidChars(String str) { char[] chars = null; final int strLen = str.length(); int pos = 0; for (int i = 0; i < strLen; i++) { final char c = str.charAt(i); switch (c) { case ' ': case '.': case ':': case ',': if (chars == null) { chars = str.toCharArray(); } chars[pos++] = '_'; break; default: if (chars != null) { chars[pos] = c; } pos++; } } return chars == null ? str : new String(chars, 0, pos); } /** * Starts the MetricQueryService actor in the given actor system. * * @param actorSystem The actor system running the MetricQueryService * @param resourceID resource ID to disambiguate the actor name * @return actor reference to the MetricQueryService */ public static ActorRef startMetricQueryService( ActorSystem actorSystem, ResourceID resourceID, long maximumFramesize) { String actorName = resourceID == null ? METRIC_QUERY_SERVICE_NAME : METRIC_QUERY_SERVICE_NAME + "_" + resourceID.getResourceIdString(); return actorSystem.actorOf(Props.create(MetricQueryService.class, maximumFramesize), actorName); } /** * Utility method to notify a MetricQueryService of an added metric. * * @param service MetricQueryService to notify * @param metric added metric * @param metricName metric name * @param group group the metric was added on */ public static void notifyOfAddedMetric(ActorRef service, Metric metric, String metricName, AbstractMetricGroup group) { service.tell(new AddMetric(metricName, metric, group), null); } /** * Utility method to notify a MetricQueryService of a removed metric. * * @param service MetricQueryService to notify * @param metric removed metric */ public static void notifyOfRemovedMetric(ActorRef service, Metric metric) { service.tell(new RemoveMetric(metric), null); } private static class AddMetric { private final String metricName; private final Metric metric; private final AbstractMetricGroup group; private AddMetric(String metricName, Metric metric, AbstractMetricGroup group) { this.metricName = metricName; this.metric = metric; this.group = group; } } private static class RemoveMetric { private final Metric metric; private RemoveMetric(Metric metric) { this.metric = metric; } } public static Object getCreateDump() { return CreateDump.INSTANCE; } private static class CreateDump implements Serializable { private static final CreateDump INSTANCE = new CreateDump(); } }
I reuse the `parseListeners()` here which has the following definition. `<code>expression-list := expression (, expression)*</code>` Shall I refactor its name to `parseExpressionsList()` and corresponding `LISTNERS_LIST` ctx name to `EXPRESSIONS_LIST` in error handler. Or Let's keep keep those names intact ?
private STNode parseOptionalExpressionsList() { STToken nextToken = peek(); if (isEndOfListenersList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } return parseListeners(); }
return parseListeners();
private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfExpressionsList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode expr = parseExpression(); expressions.add(expr); nextToken = peek(); STNode leadingComma; while (!isEndOfExpressionsList(nextToken.kind)) { leadingComma = parseComma(); expressions.add(leadingComma); expr = parseExpression(); expressions.add(expr); nextToken = peek(); } return STNodeFactory.createNodeList(expressions); }
class BallerinaParser { private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.ACTION; private final BallerinaParserErrorHandler errorHandler; private final AbstractTokenReader tokenReader; private ParserRuleContext currentParamKind = ParserRuleContext.REQUIRED_PARAM; protected BallerinaParser(AbstractTokenReader tokenReader) { this.tokenReader = tokenReader; this.errorHandler = new BallerinaParserErrorHandler(tokenReader, this); } /** * Start parsing the given input. * * @return Parsed node */ public STNode parse() { return parseCompUnit(); } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /** * Resume the parsing from the given context. * * @param context Context to resume parsing * @param args Arguments that requires to continue parsing from the given parser context * @return Parsed node */ public STNode resumeParsing(ParserRuleContext context, Object... args) { switch (context) { case COMP_UNIT: return parseCompUnit(); case EXTERNAL_FUNC_BODY: return parseExternalFunctionBody(); case FUNC_BODY: return parseFunctionBody(); case OPEN_BRACE: return parseOpenBrace(); case CLOSE_BRACE: return parseCloseBrace(); case FUNC_NAME: return parseFunctionName(); case OPEN_PARENTHESIS: return parseOpenParenthesis(); case PARAM_LIST: return parseParamList(); case RETURN_TYPE_DESCRIPTOR: return parseReturnTypeDescriptor(); case SIMPLE_TYPE_DESCRIPTOR: return parseTypeDescriptor(); case ASSIGN_OP: return parseAssignOp(); case EXTERNAL_KEYWORD: return parseExternalKeyword(); case FUNC_BODY_BLOCK: return parseFunctionBodyBlock(); case SEMICOLON: return parseSemicolon(); case CLOSE_PARENTHESIS: return parseCloseParenthesis(); case VARIABLE_NAME: return parseVariableName(); case TERMINAL_EXPRESSION: return parseTerminalExpression((boolean) args[0], (boolean) args[1]); case STATEMENT: return parseStatement(); case STATEMENT_WITHOUT_ANNOTS: return parseStatement((STNode) args[0]); case EXPRESSION_RHS: return parseExpressionRhs((OperatorPrecedence) args[1], (STNode) args[0], (boolean) args[2], (boolean) args[3]); case PARAMETER: return parseParameter((STNode) args[0], (int) args[1]); case PARAMETER_WITHOUT_ANNOTS: return parseParamGivenAnnots((STNode) args[0], (STNode) args[1], (int) args[2]); case AFTER_PARAMETER_TYPE: return parseAfterParamType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case PARAMETER_RHS: return parseParameterRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case TOP_LEVEL_NODE: return parseTopLevelNode(); case TOP_LEVEL_NODE_WITHOUT_METADATA: return parseTopLevelNode((STNode) args[0]); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return parseTopLevelNode((STNode) args[0], (STNode) args[1]); case STATEMENT_START_IDENTIFIER: return parseStatementStartIdentifier(); case VAR_DECL_STMT_RHS: return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (boolean) args[4]); case TYPE_REFERENCE: return parseTypeReference(); case FIELD_DESCRIPTOR_RHS: return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]); case NAMED_OR_POSITIONAL_ARG_RHS: return parseNamedOrPositionalArg((STNode) args[0]); case RECORD_BODY_START: return parseRecordBodyStartDelimiter(); case TYPE_DESCRIPTOR: return parseTypeDescriptor(); case OBJECT_MEMBER: return parseObjectMember(); case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]); case OBJECT_FIELD_RHS: return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case OBJECT_TYPE_FIRST_QUALIFIER: return parseObjectTypeQualifiers(); case OBJECT_TYPE_SECOND_QUALIFIER: return parseObjectTypeSecondQualifier((STNode) args[0]); case OBJECT_KEYWORD: return parseObjectKeyword(); case TYPE_NAME: return parseTypeName(); case IF_KEYWORD: return parseIfKeyword(); case ELSE_KEYWORD: return parseElseKeyword(); case ELSE_BODY: return parseElseBody(); case WHILE_KEYWORD: return parseWhileKeyword(); case PANIC_KEYWORD: return parsePanicKeyword(); case MAJOR_VERSION: return parseMajorVersion(); case IMPORT_DECL_RHS: return parseImportDecl((STNode) args[0], (STNode) args[1]); case IMPORT_PREFIX: return parseImportPrefix(); case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case VARIABLE_REF: case FIELD_OR_FUNC_NAME: case SERVICE_NAME: return parseIdentifier(context); case IMPORT_KEYWORD: return parseImportKeyword(); case SLASH: return parseSlashToken(); case DOT: return parseDotToken(); case IMPORT_VERSION_DECL: return parseVersion(); case VERSION_KEYWORD: return parseVersionKeywrod(); case VERSION_NUMBER: return parseVersionNumber(); case DECIMAL_INTEGER_LITERAL: return parseDecimalIntLiteral(context); case IMPORT_SUB_VERSION: return parseSubVersion(context); case IMPORT_PREFIX_DECL: return parseImportPrefixDecl(); case AS_KEYWORD: return parseAsKeyword(); case CONTINUE_KEYWORD: return parseContinueKeyword(); case BREAK_KEYWORD: return parseBreakKeyword(); case RETURN_KEYWORD: return parseReturnKeyword(); case MAPPING_FIELD: return parseMappingField((STNode) args[0]); case SPECIFIC_FIELD_RHS: return parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]); case STRING_LITERAL: return parseStringLiteral(); case COLON: return parseColon(); case OPEN_BRACKET: return parseOpenBracket(); case RESOURCE_DEF: return parseResource(); case OPTIONAL_SERVICE_NAME: return parseServiceName(); case SERVICE_KEYWORD: return parseServiceKeyword(); case ON_KEYWORD: return parseOnKeyword(); case RESOURCE_KEYWORD: return parseResourceKeyword(); case LISTENER_KEYWORD: return parseListenerKeyword(); case NIL_TYPE_DESCRIPTOR: return parseNilTypeDescriptor(); case COMPOUND_ASSIGNMENT_STMT: return parseCompoundAssignmentStmt(); case TYPEOF_KEYWORD: return parseTypeofKeyword(); case ARRAY_TYPE_DESCRIPTOR: return parseArrayTypeDescriptor((STNode) args[0]); case ARRAY_LENGTH: return parseArrayLength(); case FUNC_DEFINITION: case REQUIRED_PARAM: case ANNOT_REFERENCE: return parseIdentifier(context); case IS_KEYWORD: return parseIsKeyword(); case STMT_START_WITH_EXPR_RHS: return parseStamentStartWithExpr((STNode) args[0]); case COMMA: return parseComma(); case CONST_DECL_TYPE: return parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]); case STMT_START_WITH_IDENTIFIER: return parseStatementStartsWithIdentifier((STNode) args[0], (STNode) args[1]); case PARAMETERIZED_TYPE_DESCRIPTOR: return parseParameterizedTypeDescriptor(); case LT: return parseLTToken(); case GT: return parseGTToken(); case NIL_LITERAL: return parseNilLiteral(); case RECORD_FIELD_OR_RECORD_END: return parseFieldOrRestDescriptor((boolean) args[0]); case ANNOTATION_KEYWORD: return parseAnnotationKeyword(); case ANNOT_DECL_OPTIONAL_TYPE: return parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case ANNOT_DECL_RHS: return parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case ANNOT_OPTIONAL_ATTACH_POINTS: return parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (STNode) args[5]); case SOURCE_KEYWORD: return parseSourceKeyword(); case ATTACH_POINT_IDENT: return parseAttachPointIdent((STNode) args[0]); case IDENT_AFTER_OBJECT_IDENT: return parseIdentAfterObjectIdent(); case FUNCTION_IDENT: return parseFunctionIdent(); case FIELD_IDENT: return parseFieldIdent(); case ATTACH_POINT_END: return parseAttachPointEnd(); case XMLNS_KEYWORD: return parseXMLNSKeyword(); case XML_NAMESPACE_PREFIX_DECL: return parseXMLDeclRhs((STNode) args[0], (STNode) args[1]); case NAMESPACE_PREFIX: return parseNamespacePrefix(); case WORKER_KEYWORD: return parseWorkerKeyword(); case WORKER_NAME: return parseWorkerName(); case FORK_KEYWORD: return parseForkKeyword(); case DECIMAL_FLOATING_POINT_LITERAL: return parseDecimalFloatingPointLiteral(); case HEX_FLOATING_POINT_LITERAL: return parseHexFloatingPointLiteral(); case TRAP_KEYWORD: return parseTrapKeyword(); default: throw new IllegalStateException("Cannot re-parse rule: " + context); } } /* * Private methods */ private STToken peek() { return this.tokenReader.peek(); } private STToken peek(int k) { return this.tokenReader.peek(k); } private STToken consume() { return this.tokenReader.read(); } private Solution recover(STToken token, ParserRuleContext currentCtx, Object... parsedNodes) { return this.errorHandler.recover(currentCtx, token, parsedNodes); } private void startContext(ParserRuleContext context) { this.errorHandler.startContext(context); } private void endContext() { this.errorHandler.endContext(); } /** * Switch the current context to the provided one. This will replace the * existing context. * * @param context Context to switch to. */ private void switchContext(ParserRuleContext context) { this.errorHandler.switchContext(context); } /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); STToken token = peek(); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(token.kind); if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { otherDecls.add(decl); this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations"); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ private STNode parseTopLevelNode() { STToken token = peek(); return parseTopLevelNode(token.kind); } protected STNode parseTopLevelNode(SyntaxKind tokenKind) { STNode metadata; switch (tokenKind) { case EOF_TOKEN: return consume(); case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(tokenKind); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case SERVICE_KEYWORD: metadata = createEmptyMetadata(); break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(createEmptyMetadata(), null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { metadata = createEmptyMetadata(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind); } return parseTopLevelNode(tokenKind, metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); return parseTopLevelNode(nextToken.kind, metadata); } private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) { STNode qualifier = null; switch (tokenKind) { case EOF_TOKEN: if (metadata != null) { this.errorHandler.reportInvalidNode(null, "invalid metadata"); } return consume(); case PUBLIC_KEYWORD: qualifier = parseQualifier(); tokenKind = peek().kind; break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } return parseTopLevelNode(solution.tokenKind, metadata); } return parseTopLevelNode(tokenKind, metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isModuleVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case OPEN_BRACKET_TOKEN: case QUESTION_MARK_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } case COLON_TOKEN: if (lookahead > 1) { return false; } if (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) { return false; } return isModuleVarDeclStart(lookahead + 2); default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.switchMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STToken token = peek(); STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier); this.tokenReader.resetMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD); return sol.recoveredNode; } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); return parseImportDecl(nextToken.kind, importKeyword, identifier); } private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) { STNode orgName; STNode moduleName; STNode version; STNode alias; switch (tokenKind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgNameNode(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportDecl(solution.tokenKind, importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SLASH); return sol.recoveredNode; } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken nextToken = peek(); return parseDotToken(nextToken.kind); } private STNode parseDotToken(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.DOT_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.DOT); return sol.recoveredNode; } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(peek().kind, moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @param moduleNameStart Starting identifier of the module name * @return Parsed node */ private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); while (!isEndOfImportModuleName(nextTokenKind)) { moduleNameParts.add(parseDotToken()); moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME)); nextTokenKind = peek().kind; } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) { return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); return parseVersion(nextToken.kind); } private STNode parseVersion(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeywrod(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersion(solution.tokenKind); } } /** * Parse version keywrod. * * @return Parsed node */ private STNode parseVersionKeywrod() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD); return sol.recoveredNode; } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); return parseVersionNumber(nextToken.kind); } private STNode parseVersionNumber(SyntaxKind nextTokenKind) { STNode majorVersion; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: majorVersion = parseMajorVersion(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersionNumber(solution.tokenKind); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersion = parseMinorVersion(); if (minorVersion != null) { versionParts.add(minorVersion); STNode patchVersion = parsePatchVersion(); if (patchVersion != null) { versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseSubVersion(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseSubVersion(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) { return consume(); } else { Solution sol = recover(peek(), context); return sol.recoveredNode; } } /** * Parse sub version. i.e: minor-version/patch-version. * * @param context Context indicating what kind of sub-version is being parsed. * @return Parsed node */ private STNode parseSubVersion(ParserRuleContext context) { STToken nextToken = peek(); return parseSubVersion(nextToken.kind, context); } private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) { switch (nextTokenKind) { case AS_KEYWORD: case SEMICOLON_TOKEN: return null; case DOT_TOKEN: STNode leadingDot = parseDotToken(); STNode versionNumber = parseDecimalIntLiteral(context); return STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSubVersion(solution.tokenKind, context); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken token = peek(); return parseImportPrefixDecl(token.kind); } private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefixNode(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportPrefixDecl(solution.tokenKind); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD); return sol.recoveredNode; } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX); return sol.recoveredNode; } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken token = peek(); return parseTopLevelNode(token.kind, metadata, qualifier); } /** * Parse top level node given the next token kind and the modifier that precedes it. * * @param tokenKind Next token kind * @param qualifier Qualifier that precedes the top level node * @return Parsed top-level node */ private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) { switch (tokenKind) { case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, getQualifier(qualifier)); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case ANNOTATION_KEYWORD: STNode constKeyword = STNodeFactory.createEmptyNode(); return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case XMLNS_KEYWORD: reportInvalidQualifier(qualifier); return parseXMLNamepsaceDeclaration(); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { return parseModuleVarDecl(metadata, qualifier); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { return parseModuleVarDecl(metadata, qualifier); } return parseTopLevelNode(solution.tokenKind, metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { this.errorHandler.reportInvalidNode((STToken) qualifier, "invalid qualifier '" + qualifier.toString().trim() + "'"); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse function definition. A function definition has the following structure. * </p> * <code> * function-defn := FUNCTION identifier function-signature function-body * </code> * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @return Parsed node */ private STNode parseFunctionDefinition(STNode metadata, STNode visibilityQualifier) { startContext(ParserRuleContext.FUNC_DEFINITION); STNode functionKeyword = parseFunctionKeyword(); STNode name = parseFunctionName(); STNode openParenthesis = parseOpenParenthesis(); STNode parameters = parseParamList(); STNode closeParenthesis = parseCloseParenthesis(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode body = parseFunctionBody(); endContext(); return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name, openParenthesis, parameters, closeParenthesis, returnTypeDesc, body); } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD); return sol.recoveredNode; } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNC_NAME); return sol.recoveredNode; } } /** * Parse open parenthesis. * * @return Parsed node */ private STNode parseOpenParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_PARENTHESIS); return sol.recoveredNode; } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return sol.recoveredNode; } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @return Parsed node */ private STNode parseParamList() { startContext(ParserRuleContext.PARAM_LIST); ArrayList<STNode> paramsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } STNode startingComma = STNodeFactory.createEmptyNode(); this.currentParamKind = ParserRuleContext.REQUIRED_PARAM; paramsList.add(parseParameter(startingComma)); token = peek(); while (!isEndOfParametersList(token.kind)) { STNode leadingComma = parseComma(); STNode param = parseParameter(leadingComma); paramsList.add(param); token = peek(); } STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param leadingComma Comma that occurs before the param * @return Parsed node */ private STNode parseParameter(STNode leadingComma) { STToken token = peek(); if (this.currentParamKind == ParserRuleContext.REST_PARAM) { this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter"); startContext(ParserRuleContext.REQUIRED_PARAM); } else { startContext(this.currentParamKind); } return parseParameter(token.kind, leadingComma, 1); } private STNode parseParameter(STNode leadingComma, int nextTokenOffset) { return parseParameter(peek().kind, leadingComma, nextTokenOffset); } private STNode parseParameter(SyntaxKind nextTokenKind, STNode leadingComma, int nextTokenOffset) { STNode annots; switch (nextTokenKind) { case AT_TOKEN: annots = parseAnnotations(nextTokenKind); nextTokenKind = peek().kind; break; case PUBLIC_KEYWORD: annots = STNodeFactory.createNodeList(new ArrayList<>()); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); STNode qualifier = STNodeFactory.createEmptyNode(); return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } default: if (nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN && isTypeStartingToken(nextTokenKind)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER, leadingComma, nextTokenOffset); if (solution.action == Action.KEEP) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameter(solution.tokenKind, leadingComma, 0); } return parseParamGivenAnnots(nextTokenKind, leadingComma, annots, 1); } private STNode parseParamGivenAnnots(STNode leadingComma, STNode annots, int nextNextTokenOffset) { return parseParamGivenAnnots(peek().kind, leadingComma, annots, nextNextTokenOffset); } private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, STNode leadingComma, STNode annots, int nextTokenOffset) { STNode qualifier; switch (nextTokenKind) { case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { qualifier = STNodeFactory.createEmptyNode(); break; } case AT_TOKEN: default: if (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) { qualifier = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, leadingComma, annots, nextTokenOffset); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParamGivenAnnots(solution.tokenKind, leadingComma, annots, 0); } return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } private STNode parseParamGivenAnnotsAndQualifier(STNode leadingComma, STNode annots, STNode qualifier) { STNode type = parseTypeDescriptor(); STNode param = parseAfterParamType(leadingComma, annots, qualifier, type); endContext(); return param; } /** * Check whether the cursor is at the start of a parameter that doesn't have annotations. * * @param tokenOffset Offset of the token to check * @return <code>true</code> if the cursor is at the start of a parameter. <code>false</code> otherwise. */ private boolean isParamWithoutAnnotStart(int tokenOffset) { STToken nextToken = peek(tokenOffset + 1); switch (nextToken.kind) { case PUBLIC_KEYWORD: return isParamWithoutAnnotStart(tokenOffset + 1); case ELLIPSIS_TOKEN: return true; case IDENTIFIER_TOKEN: return true; default: return false; } } private STNode parseAfterParamType(STNode leadingComma, STNode annots, STNode qualifier, STNode type) { STToken token = peek(); return parseAfterParamType(token.kind, leadingComma, annots, qualifier, type); } private STNode parseAfterParamType(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type) { switch (tokenKind) { case ELLIPSIS_TOKEN: this.currentParamKind = ParserRuleContext.REST_PARAM; switchContext(ParserRuleContext.REST_PARAM); reportInvalidQualifier(qualifier); STNode ellipsis = parseEllipsis(); STNode paramName = parseVariableName(); return STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(leadingComma, annots, qualifier, type, paramName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, leadingComma, annots, qualifier, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAfterParamType(solution.tokenKind, leadingComma, annots, qualifier, type); } } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELLIPSIS); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param leadingComma Comma that precedes this parameter * @param annots Annotations attached to the parameter * @param qualifier Visibility qualifier * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { STToken token = peek(); return parseParameterRhs(token.kind, leadingComma, annots, qualifier, type, paramName); } private STNode parseParameterRhs(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { if (isEndOfParameter(tokenKind)) { if (this.currentParamKind == ParserRuleContext.DEFAULTABLE_PARAM) { this.errorHandler.reportInvalidNode(peek(), "cannot have a required parameter after a defaultable parameter"); } return STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName); } else if (tokenKind == SyntaxKind.EQUAL_TOKEN) { if (this.currentParamKind == ParserRuleContext.REQUIRED_PARAM) { this.currentParamKind = ParserRuleContext.DEFAULTABLE_PARAM; switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal, expr); } else { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_RHS, leadingComma, annots, qualifier, type, paramName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind, leadingComma, annots, qualifier, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMA); return sol.recoveredNode; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case AT_TOKEN: return true; default: return false; } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: return true; default: return false; } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseReturnTypeDescriptor() { startContext(ParserRuleContext.RETURN_TYPE_DESCRIPTOR); STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { endContext(); return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(); endContext(); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor() { STToken token = peek(); STNode typeDesc = parseTypeDescriptor(token.kind); return parseComplexTypeDescriptor(typeDesc); } /** * This will handle the parsing of optional,array,union type desc to infinite length. * * @param typeDesc * * @return Parsed type descriptor node */ private STNode parseComplexTypeDescriptor(STNode typeDesc) { STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc)); case OPEN_BRACKET_TOKEN: return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc)); default: return typeDesc; } } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTypeDescriptor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseTypeReference(); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilTypeDescriptor(); case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return parseParameterizedTypeDescriptor(); default: if (isSimpleType(tokenKind)) { return parseSimpleTypeDescriptor(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypeDescriptor(solution.tokenKind); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken node = peek(); if (isSimpleType(node.kind)) { STToken token = consume(); SyntaxKind typeKind = getTypeSyntaxKind(token.kind); return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token); } else { Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return sol.recoveredNode; } } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init, named-worker-decl+] default-worker } * </code> * * @return Parsed node */ private STNode parseFunctionBody() { STToken token = peek(); return parseFunctionBody(token.kind); } /** * Parse function body, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ protected STNode parseFunctionBody(SyntaxKind tokenKind) { switch (tokenKind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNC_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.NONE) { return STNodeFactory.createMissingToken(solution.tokenKind); } return parseFunctionBody(solution.tokenKind); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @return Parsed node */ private STNode parseFunctionBodyBlock() { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STToken token = peek(); ArrayList<STNode> firstStmtList = new ArrayList<>(); ArrayList<STNode> workers = new ArrayList<>(); ArrayList<STNode> secondStmtList = new ArrayList<>(); ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT; boolean hasNamedWorkers = false; while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (currentCtx) { case DEFAULT_WORKER_INIT: if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) { firstStmtList.add(stmt); break; } currentCtx = ParserRuleContext.NAMED_WORKERS; hasNamedWorkers = true; case NAMED_WORKERS: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { workers.add(stmt); break; } currentCtx = ParserRuleContext.DEFAULT_WORKER; case DEFAULT_WORKER: default: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } secondStmtList.add(stmt); break; } token = peek(); } STNode namedWorkersList; STNode statements; if (hasNamedWorkers) { STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList); STNode namedWorkers = STNodeFactory.createNodeList(workers); namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers); statements = STNodeFactory.createNodeList(secondStmtList); } else { namedWorkersList = STNodeFactory.createEmptyNode(); statements = STNodeFactory.createNodeList(firstStmtList); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace); } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.RECORD_FIELD, 1); default: return false; } } private boolean isEndOfObjectTypeNode(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); default: return false; } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); return parseVariableName(token.kind); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME); return sol.recoveredNode; } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACE); return sol.recoveredNode; } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE); return sol.recoveredNode; } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); STNode annotation = parseAnnotations(); STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SEMICOLON); return sol.recoveredNode; } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return sol.recoveredNode; } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ASSIGN_OP); return sol.recoveredNode; } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case EQUAL_GT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; case RIGHT_ARROW_TOKEN: return OperatorPrecedence.ACTION; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case UNARY: case ACTION: case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD); return sol.recoveredNode; } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_NAME); return sol.recoveredNode; } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; STNode fields = parseFieldDescriptors(isInclusive); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind); endContext(); return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken token = peek(); return parseRecordBodyStartDelimiter(token.kind); } private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) { switch (kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyStartDelimiter(solution.tokenKind); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return sol.recoveredNode; } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) { switch (startingDelimeter) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyEnd(); case OPEN_BRACE_TOKEN: return parseCloseBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyCloseDelimiter(solution.tokenKind); } } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return sol.recoveredNode; } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse field descriptors. * </p> * * @return Parsed node */ private STNode parseFieldDescriptors(boolean isInclusive) { ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); boolean endOfFields = false; while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); if (field == null) { endOfFields = true; break; } recordFields.add(field); token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { break; } } while (!endOfFields && !isEndOfRecordTypeNode(token.kind)) { parseFieldOrRestDescriptor(isInclusive); this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor"); token = peek(); } return STNodeFactory.createNodeList(recordFields); } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { return parseFieldOrRestDescriptor(peek().kind, isInclusive); } private STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: return null; case ASTERISK_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); case AT_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode metadata = parseMetaData(nextTokenKind); type = parseTypeDescriptor(); STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; default: if (isTypeStartingToken(nextTokenKind)) { startContext(ParserRuleContext.RECORD_FIELD); metadata = createEmptyMetadata(); type = parseTypeDescriptor(nextTokenKind); fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptor(solution.tokenKind, isInclusive); } } private STNode parseFieldDescriptor(boolean isInclusive, STNode type, STNode metadata) { if (isInclusive) { STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); } else { return parseFieldOrRestDescriptorRhs(metadata, type); } } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeRefOrPkgRef = consume(); return parseQualifiedIdentifier(typeRefOrPkgRef); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken colon = consume(); STToken varOrFuncName = consume(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); } else { this.errorHandler.removeInvalidToken(); return parseQualifiedIdentifier(identifier); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken token = peek(); return parseFieldOrRestDescriptorRhs(token.kind, metadata, type); } private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) { switch (kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) { STToken token = peek(); return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName); } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param kind Kind of the next token * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) { switch (kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, type, fieldName, equalsToken, expression, semicolonToken); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.QUESTION_MARK); return sol.recoveredNode; } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { STToken token = peek(); ArrayList<STNode> stmts = new ArrayList<>(); while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } stmts.add(stmt); token = peek(); } return STNodeFactory.createNodeList(stmts); } private boolean isEndOfStatements(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.STATEMENT, 1); default: return false; } } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken token = peek(); return parseStatement(token.kind); } private STNode parseStatement(SyntaxKind tokenKind) { STNode annots = null; switch (tokenKind) { case CLOSE_BRACE_TOKEN: return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(); case AT_TOKEN: annots = parseAnnotations(tokenKind); tokenKind = peek().kind; break; case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case PANIC_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case WORKER_KEYWORD: break; default: if (isTypeStartingToken(tokenKind)) { break; } if (isValidLHSExpression(tokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind); } return parseStatement(tokenKind, annots); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createNodeList(new ArrayList<>()); } private STNode parseStatement(STNode annots) { return parseStatement(peek().kind, annots); } /** * Parse a single statement, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatement(SyntaxKind tokenKind, STNode annots) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: this.errorHandler.reportInvalidNode(null, "invalid annotations"); return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(tokenKind, annots); case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseStamentStartsWithExpr(tokenKind); case IDENTIFIER_TOKEN: STToken nextToken = peek(2); if (nextToken.kind == SyntaxKind.QUESTION_MARK_TOKEN) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } return parseStatementStartsWithIdentifier(getAnnotations(annots)); case LOCK_KEYWORD: return parseLockStatement(); case OPEN_BRACE_TOKEN: return parseBlockNode(); case WORKER_KEYWORD: return parseNamedWorkerDeclaration(getAnnotations(annots)); case FORK_KEYWORD: return parseForkStatement(); default: if (isTypeStartingToken(tokenKind)) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, annots); } } private STNode getNextNextToken(SyntaxKind tokenKind) { return peek(1).kind == tokenKind ? peek(2) : peek(1); } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode type = parseTypeDescriptor(); STNode varName = parseVariableName(); STNode varDecl = parseVarDeclRhs(annots, finalKeyword, type, varName, isModuleVar); endContext(); return varDecl; } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STToken token = peek(); return parseVarDeclRhs(token.kind, metadata, finalKeyword, type, varName, isModuleVar); } /** * Parse the right hand side of a variable declaration statement, given the * next token kind. * * @param tokenKind Next token kind * @param metadata Metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @param isModuleVar flag indicating whether the var is module level * @return Parsed node */ private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; switch (tokenKind) { case EQUAL_TOKEN: assign = parseAssignOp(); if (isModuleVar) { expr = parseExpression(); } else { expr = parseActionOrExpression(); } semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: if (isModuleVar) { this.errorHandler.reportMissingTokenError("assignment required"); } assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, type, varName, isModuleVar); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, type, varName, isModuleVar); } if (isModuleVar) { return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode assign = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, false); } /** * Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseActionOrExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(SyntaxKind tokenKind) { return parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true); } /** * Parse expression. * * @param isRhsExpr Flag indicating whether this is a rhs expression * @return Parsed node */ private STNode parseExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false); } private void validateLVExpr(STNode expression) { if (isValidLVExpr(expression)) { return; } this.errorHandler.reportInvalidNode(null, "invalid expression for assignment lhs"); } private boolean isValidLVExpr(STNode expression) { switch (expression.kind) { case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case FIELD_ACCESS: return isValidLVExpr(((STFieldAccessExpressionNode) expression).expression); case MEMBER_ACCESS: return isValidLVExpr(((STMemberAccessExpressionNode) expression).containerExpression); default: return false; } } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @param isRhsExpr Flag indicating whether this is a rhs expression * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STToken token = peek(); return parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions); } private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions); } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @param isRhsExpr Is a rhs expression * @param allowActions Allow actions * @return Parsed node */ private STNode parseTerminalExpression(boolean isRhsExpr, boolean allowActions) { return parseTerminalExpression(peek().kind, isRhsExpr, allowActions); } private STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case OPEN_PAREN_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } return parseBracedExpression(isRhsExpr, allowActions); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(isRhsExpr, allowActions); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(isRhsExpr); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(isRhsExpr); case TRAP_KEYWORD: return parseTrapExpression(isRhsExpr); case OPEN_BRACKET_TOKEN: return parseListConstructorExpr(); default: Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, isRhsExpr, allowActions); if (solution.recoveredNode.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseQualifiedIdentifier(solution.recoveredNode); } if (solution.recoveredNode.kind == SyntaxKind.OPEN_PAREN_TOKEN && peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } if (solution.recoveredNode.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return parseListConstructorExpr(); } return solution.recoveredNode; } } private STNode parseActionOrExpressionInLhs(STNode lhsExpr) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, lhsExpr, false, true); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @param isLVExpr Flag indicating whether this is on a lhsExpr of a statement * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isLVExpr, boolean allowActions) { STToken token = peek(); return parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isLVExpr, allowActions); } /** * Parse the right hand side of an expression given the next token kind. * * @param tokenKind Next token kind * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression * @param isRhsExpr Flag indicating whether this is a rhs expr or not * @param allowActions Flag indicating whether to allow actions or not * @return Parsed node */ private STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { if (isEndOfExpression(tokenKind, isRhsExpr)) { return lhsExpr; } if (!isValidExprRhsStart(tokenKind)) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel); return parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } else { return parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind); if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence)) { return lhsExpr; } STNode newLhsExpr; switch (tokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr); break; case IS_KEYWORD: newLhsExpr = parseTypeTestExpression(lhsExpr); break; case RIGHT_ARROW_TOKEN: newLhsExpr = parseAction(tokenKind, lhsExpr); if (!allowActions) { this.errorHandler.reportInvalidNode(null, "actions are not allowed here"); } break; default: STNode operator = parseBinaryOperator(); STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false); newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions); } private boolean isValidExprRhsStart(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr) { STNode openBracket = consume(); STNode keyExpr; if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { this.errorHandler.reportMissingTokenError("missing expression"); keyExpr = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { keyExpr = parseExpression(); } STNode closeBracket = parseCloseBracket(); return STNodeFactory.createMemberAccessExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET); return sol.recoveredNode; } } /** * Parse field access expression and method call expression. * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) { STNode dotToken = parseDotToken(); STNode fieldOrMethodName = parseIdentifier(ParserRuleContext.FIELD_OR_FUNC_NAME); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Allow actions * @return Parsed node */ private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) { STNode openParen = parseOpenParenthesis(); STNode expr; if (allowActions) { expr = parseActionOrExpression(isRhsExpr); } else { expr = parseExpression(isRhsExpr); } STNode closeParen = parseCloseParenthesis(); if (isAction(expr)) { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } else { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } } /** * Check whether a given node is an action node. * * @param node Node to check * @return <code>true</code> if the node is an action node. <code>false</code> otherwise */ private boolean isAction(STNode node) { switch (node.kind) { case REMOTE_METHOD_CALL_ACTION: case BRACED_ACTION: case CHECK_ACTION: return true; default: return false; } } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr) { if (!isRhsExpr) { if (isCompoundBinaryOperator(tokenKind)) { return true; } return !isValidExprRhsStart(tokenKind); } switch (tokenKind) { case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case CONST_KEYWORD: case LISTENER_KEYWORD: case EQUAL_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case AS_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse basic literals. It is assumed that we come here after validation. * * @return Parsed node */ private STNode parseBasicLiteral() { STToken literalToken = consume(); return STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen); } /** * Parse function call argument list. * * @return Parsed agrs list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); ArrayList<STNode> argsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } SyntaxKind lastProcessedArgKind = parseFirstArg(argsList); parseFollowUpArg(argsList, lastProcessedArgKind); STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } /** * Parse the first argument of a function call. * * @param argsList Arguments list to which the parsed argument must be added * @return Kind of the argument first argument. */ private SyntaxKind parseFirstArg(ArrayList<STNode> argsList) { startContext(ParserRuleContext.ARG); STNode leadingComma = STNodeFactory.createEmptyNode(); STNode arg = parseArg(leadingComma); endContext(); if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) { argsList.add(arg); return arg.kind; } else { reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind); return SyntaxKind.POSITIONAL_ARG; } } /** * Parse follow up arguments. * * @param argsList Arguments list to which the parsed argument must be added * @param lastProcessedArgKind Kind of the argument processed prior to this */ private void parseFollowUpArg(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) { STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { startContext(ParserRuleContext.ARG); STNode leadingComma = parseComma(); nextToken = peek(); if (isEndOfParametersList(nextToken.kind)) { this.errorHandler.reportInvalidNode((STToken) leadingComma, "invalid token " + leadingComma); endContext(); break; } STNode arg = parseArg(nextToken.kind, leadingComma); if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) { if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) { this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg"); } else { argsList.add(arg); lastProcessedArgKind = arg.kind; } } else { reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind); } nextToken = peek(); endContext(); } } /** * Report invalid order of args. * * @param token Staring token of the arg. * @param lastArgKind Kind of the previously processed arg * @param argKind Current arg */ private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) { this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind); } /** * Parse function call argument. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseArg(STNode leadingComma) { STToken token = peek(); return parseArg(token.kind, leadingComma); } private STNode parseArg(SyntaxKind kind, STNode leadingComma) { STNode arg; switch (kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgumentNode(leadingComma, ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(leadingComma); break; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: default: expr = parseExpression(); arg = STNodeFactory.createPositionalArgumentNode(leadingComma, expr); break; } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseNamedOrPositionalArg(STNode leadingComma) { STToken secondToken = peek(2); switch (secondToken.kind) { case EQUAL_TOKEN: STNode argNameOrVarRef = consume(); STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createNamedArgumentNode(leadingComma, argNameOrVarRef, equal, expr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: argNameOrVarRef = consume(); return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrVarRef); case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: default: expr = parseExpression(); return STNodeFactory.createPositionalArgumentNode(leadingComma, expr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STToken nextToken = peek(); return parseObjectTypeQualifiers(nextToken.kind); } private STNode parseObjectTypeQualifiers(SyntaxKind kind) { List<STNode> qualifiers = new ArrayList<>(); STNode firstQualifier; switch (kind) { case CLIENT_KEYWORD: STNode clientKeyword = parseClientKeyword(); firstQualifier = clientKeyword; break; case ABSTRACT_KEYWORD: STNode abstractKeyword = parseAbstractKeyword(); firstQualifier = abstractKeyword; break; case OBJECT_KEYWORD: return STNodeFactory.createNodeList(qualifiers); default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeQualifiers(solution.tokenKind); } STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier); qualifiers.add(firstQualifier); if (secondQualifier != null) { qualifiers.add(secondQualifier); } return STNodeFactory.createNodeList(qualifiers); } private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) { STToken nextToken = peek(); return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier); } private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) { if (firstQualifier.kind != kind) { switch (kind) { case CLIENT_KEYWORD: return parseClientKeyword(); case ABSTRACT_KEYWORD: return parseAbstractKeyword(); case OBJECT_KEYWORD: return null; default: break; } } Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD); return sol.recoveredNode; } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return sol.recoveredNode; } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD); return sol.recoveredNode; } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfObjectTypeNode(nextToken.kind)) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode member = parseObjectMember(nextToken.kind); endContext(); if (member == null) { break; } objectMembers.add(member); nextToken = peek(); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STToken nextToken = peek(); return parseObjectMember(nextToken.kind); } private STNode parseObjectMember(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isTypeStartingToken(nextTokenKind)) { metadata = createEmptyMetadata(); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return parseObjectMember(nextTokenKind, metadata); } private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) { STNode member; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode()); break; case FUNCTION_KEYWORD: member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode()); break; default: if (isTypeStartingToken(nextTokenKind)) { member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return member; } private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(1); STToken nextNextToken = peek(2); return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers); } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param visibilityQualifiers Visibility qualifiers. A modifier can be * a syntax node with either 'PUBLIC' or 'PRIVATE'. * @param nextTokenKind Next token kind * @param nextNextTokenKind Kind of the token after the * @param metadata Metadata * @param visibilityQualifiers Visibility qualifiers * @return Parse object member node */ private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata, STNode visibilityQualifiers) { switch (nextTokenKind) { case REMOTE_KEYWORD: STNode remoteKeyword = parseRemoteKeyword(); ArrayList<STNode> methodQualifiers = new ArrayList<>(); if (visibilityQualifiers.kind != SyntaxKind.NONE) { methodQualifiers.add(visibilityQualifiers); } methodQualifiers.add(remoteKeyword); return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers)); case FUNCTION_KEYWORD: return parseObjectMethod(metadata, visibilityQualifiers); case IDENTIFIER_TOKEN: if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifiers); } break; default: if (isTypeStartingToken(nextTokenKind)) { return parseObjectField(metadata, visibilityQualifiers); } break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifiers); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers); } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD); return sol.recoveredNode; } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STNode type = parseTypeDescriptor(); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param nextTokenKind Kind of the next token * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextTokenKind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName); } return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) { return parseFunctionDefinition(metadata, methodQualifiers); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IF_KEYWORD); return sol.recoveredNode; } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD); return sol.recoveredNode; } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlockNode(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); return parseElseBody(nextToken.kind); } private STNode parseElseBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ELSE_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseElseBody(solution.tokenKind); } } /** * Parse while statement. * <code>while-stmt := while expression block-stmt</code> * * @return While statement */ private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody); } /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD); return sol.recoveredNode; } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD); return sol.recoveredNode; } } /** * Parse check expression. This method is used to parse both check expression * as well as check action. * * <p> * <code> * checking-expr := checking-keyword expression * checking-action := checking-keyword action * </code> * * @param allowActions Allow actions * @param isRhsExpr Is rhs expression * @return Check expression node */ private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions) { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, allowActions); if (isAction(expr)) { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr); } else { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr); } } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD); return sol.recoveredNode; } } /** * * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD); return sol.recoveredNode; } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STNode expr; STNode semicolon; STToken token = peek(); switch (token.kind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseActionOrExpression(); break; } semicolon = parseSemicolon(); return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { List<STNode> fields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createNodeList(fields); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { leadingComma = parseComma(); field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return false; case EOF_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseMappingField(STNode leadingComma) { STToken nextToken = peek(); return parseMappingField(nextToken.kind, leadingComma); } private STNode parseMappingField(SyntaxKind tokenKind, STNode leadingComma) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionValue(leadingComma); case STRING_LITERAL: STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(leadingComma); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadFieldNode(leadingComma, ellipsis, expr); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD, leadingComma); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingField(solution.tokenKind, leadingComma); } } /** * Parse mapping constructor specific-field with an optional value. * * @param leadingComma * @return Parsed node */ private STNode parseSpecificFieldWithOptionValue(STNode leadingComma) { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(leadingComma, key); } private STNode parseSpecificFieldRhs(STNode leadingComma, STNode key) { STToken nextToken = peek(); return parseSpecificFieldRhs(nextToken.kind, leadingComma, key); } private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode leadingComma, STNode key) { STNode colon; STNode valueExpr; switch (tokenKind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(tokenKind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, leadingComma, key); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSpecificFieldRhs(solution.tokenKind, leadingComma, key); } return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_LITERAL); return sol.recoveredNode; } } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COLON); return sol.recoveredNode; } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseComputedField(STNode leadingComma) { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(leadingComma, openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET); return sol.recoveredNode; } } /** * <p> * Parse compound assignment statement, which takes the following format. * </p> * <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code> * * @return Parsed node */ private STNode parseCompoundAssignmentStmt() { startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT); STNode varName = parseVariableName(); STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName); endContext(); return compoundAssignmentStmt; } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return sol.recoveredNode; } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); STNode service = STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); return service; } private STNode parseServiceName() { STToken nextToken = peek(); return parseServiceName(nextToken.kind); } private STNode parseServiceName(SyntaxKind kind) { switch (kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseServiceName(solution.tokenKind); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD); return sol.recoveredNode; } } /** * Check whether the given token kind is a compound binary operator. * <p> * <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @param tokenKind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) { switch (tokenKind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ON_KEYWORD); return sol.recoveredNode; } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfListenersList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing expression"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); while (!isEndOfListenersList(nextToken.kind)) { leadingComma = parseComma(); exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfListenersList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case AT_TOKEN: case DOCUMENTATION_LINE: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse expression list item. * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseExpressionListItem(STNode leadingComma) { STNode expr = parseExpression(); return STNodeFactory.createExpressionListItemNode(leadingComma, expr); } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STToken nextToken = peek(); return parseResource(nextToken.kind); } private STNode parseResource(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isEndOfServiceDecl(nextTokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind); } return parseResource(nextTokenKind, metadata); } private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) { switch (nextTokenKind) { case RESOURCE_KEYWORD: STNode resourceKeyword = parseResourceKeyword(); return parseFunctionDefinition(metadata, resourceKeyword); case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, STNodeFactory.createEmptyNode()); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind, metadata); } } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD); return sol.recoveredNode; } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */ private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return false; case ON_KEYWORD: return true; default: ParserRuleContext sol = this.errorHandler.findBestPath(currentContext); return sol == ParserRuleContext.SERVICE_DECL || sol == ParserRuleContext.CLOSE_BRACE; } case ON_KEYWORD: return true; default: this.errorHandler.removeInvalidToken(); return false; } } /** * Parse listener declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD); return sol.recoveredNode; } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword); } private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: switchContext(ParserRuleContext.ANNOTATION_DECL); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STToken token = peek(); return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName); } private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STNode type; STNode variableName; switch (nextTokenKind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = typeOrVarName; type = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword, typeOrVarName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName); } STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD); return sol.recoveredNode; } } /** * Parse nil type descriptor. * <p> * <code>nil-type-descriptor := ( ) </code> * </p> * * @return Parsed node */ private STNode parseNilTypeDescriptor() { startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken); } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD); return sol.recoveredNode; } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param typeDescriptorNode * * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createArrayTypeDescriptorNode(typeDescriptorNode, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return consume(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: Solution sol = recover(token, ParserRuleContext.ARRAY_LENGTH); return sol.recoveredNode; } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseAnnotations() { STToken nextToken = peek(); return parseAnnotations(nextToken.kind); } private STNode parseAnnotations(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); while (nextTokenKind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextTokenKind = peek().kind; } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } STNode annotValue = parseMappingConstructorExpr(); return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.AT); return sol.recoveredNode; } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData(SyntaxKind nextTokenKind) { STNode docString; STNode annotations; switch (nextTokenKind) { case DOCUMENTATION_LINE: docString = parseDocumentationString(); annotations = parseAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(nextTokenKind); break; default: return createEmptyMetadata(); } return STNodeFactory.createMetadataNode(docString, annotations); } /** * Create empty metadata node. * * @return A metadata node with no doc string and no annotations */ private STNode createEmptyMetadata() { return STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createNodeList(new ArrayList<>())); } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr) { startContext(ParserRuleContext.TYPE_TEST_EXPRESSION); STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptor(); endContext(); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IS_KEYWORD); return sol.recoveredNode; } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Pass statements that starts with an identifier. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatementStartsWithIdentifier(STNode annots) { startContext(ParserRuleContext.STMT_START_WITH_IDENTIFIER); STNode identifier = parseStatementStartIdentifier(); STToken nextToken = peek(); STNode stmt = parseStatementStartsWithIdentifier(nextToken.kind, annots, identifier); endContext(); return stmt; } private STNode parseStatementStartsWithIdentifier(STNode annots, STNode identifier) { return parseStatementStartsWithIdentifier(peek().kind, annots, identifier); } private STNode parseStatementStartsWithIdentifier(SyntaxKind nextTokenKind, STNode annots, STNode identifier) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: switchContext(ParserRuleContext.VAR_DECL_STMT); STNode varName = parseVariableName(); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVarDeclRhs(annots, finalKeyword, identifier, varName, false); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStamentStartWithExpr(nextTokenKind, identifier); default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(identifier); } if (isValidExprRhsStart(nextTokenKind)) { STNode expression = parseActionOrExpressionInLhs(identifier); return parseStamentStartWithExpr(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_IDENTIFIER, annots, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartsWithIdentifier(solution.tokenKind, annots, identifier); } } /** * Parse statement which is only consists of an action or expression. * * @param nextTokenKind Next token kind * @return Parsed node */ private STNode parseStamentStartsWithExpr(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpression(nextTokenKind); STNode stmt = parseStamentStartWithExpr(expression); endContext(); return stmt; } /** * Parse statements that starts with an expression. * * @return Parsed node */ private STNode parseStamentStartWithExpr(STNode expression) { STToken nextToken = peek(); return parseStamentStartWithExpr(nextToken.kind, expression); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param nextTokenKind Kind of the next token * @return Parsed node */ private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode expression) { switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, expression); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStamentStartWithExpr(solution.tokenKind, expression); } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: return parseActionStatement(expression); default: this.errorHandler.reportInvalidNode(null, "left hand side of an assignment must be a variable reference"); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID, expression, semicolon); } } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { validateExprInCallStmt(expression); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private void validateExprInCallStmt(STNode expression) { switch (expression.kind) { case FUNCTION_CALL: case METHOD_CALL: break; case CHECK_EXPRESSION: validateExprInCallStmt(((STCheckExpressionNode) expression).expression); break; case REMOTE_METHOD_CALL_ACTION: break; case BRACED_EXPRESSION: validateExprInCallStmt(((STBracedExpressionNode) expression).expression); break; default: if (isMissingNode(expression)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the checking keyword must be a " + "func-call, a method-call or a check-expr"); break; } } /** * Check whether a node is a missing node. * * @param node Node to check * @return <code>true</code> if the node is a missing node. <code>false</code> otherwise */ private boolean isMissingNode(STNode node) { return node instanceof STMissingToken; } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } private STNode parseAction(SyntaxKind tokenKind, STNode lhsExpr) { switch (tokenKind) { case RIGHT_ARROW_TOKEN: return parseRemoteMethodCallAction(lhsExpr); default: return null; } } /** * Parse remote method call action, given the starting expression. * <p> * <code>remote-method-call-action := expression -> method-name ( arg-list )</code> * * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallAction(STNode expression) { STNode rightArrow = parseRightArrow(); STNode methodName = parseFunctionName(); STNode openParenToken = parseOpenParenthesis(); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, methodName, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW); return sol.recoveredNode; } } /** * Check whether this is a valid lhs expression. * * @param tokenKind Kind of the next token * @return <code>true</code>if this is a start of an expression. <code>false</code> otherwise */ private boolean isValidLHSExpression(SyntaxKind tokenKind) { switch (tokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return true; case PLUS_TOKEN: case MINUS_TOKEN: return !isCompoundBinaryOperator(tokenKind); case OPEN_PAREN_TOKEN: default: return false; } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor() { startContext(ParserRuleContext.PARAMETERIZED_TYPE_DESCRIPTOR); STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword(); STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(); STNode gtToken = parseGTToken(); endContext(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode, gtToken); } /** * Parse <code>map</code> or <code>future</code> or <code>typedesc</code> keyword token. * * @return Parsed node */ private STNode parseParameterizedTypeKeyword() { STToken nextToken = peek(); switch (nextToken.kind) { case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return consume(); default: Solution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE); return sol.recoveredNode; } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.GT); return sol.recoveredNode; } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.LT); return sol.recoveredNode; } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return sol.recoveredNode; } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); return parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword); } private STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(); STNode annotTag = parseAnnotationTag(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, annotTag, equalsToken, initializer, semicolonToken); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG); return sol.recoveredNode; } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseAnnotationTag(); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken token = peek(); return parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } private STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STNode typeDesc; STNode annotTag; switch (nextTokenKind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STToken nextToken = peek(); return parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; switch (nextTokenKind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNode(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * [object] type * | [object|resource] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing attach point"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { this.errorHandler.reportMissingTokenError("missing attach point"); attachPoint = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { STToken nextToken = peek(); return parseAttachPointEnd(nextToken.kind); } private STNode parseAttachPointEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: Solution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null; } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { return parseAnnotationAttachPoint(peek().kind); } private STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT); return solution.recoveredNode; } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD); return sol.recoveredNode; } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := [object] type | [object|resource] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { return parseAttachPointIdent(peek().kind, sourceKeyword); } private STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode secondIdent = STNodeFactory.createEmptyNode(); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } firstIdent = solution.recoveredNode; return parseDualAttachPointIdent(sourceKeyword, firstIdent); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: default: secondIdent = STNodeFactory.createEmptyNode(); break; } return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case TYPE_KEYWORD: case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: Solution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return sol.recoveredNode; } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_IDENT); return sol.recoveredNode; } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FIELD_IDENT); return sol.recoveredNode; } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamepsaceDeclaration() { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseXMLNamespaceUri(); STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD); return sol.recoveredNode; } } /** * Parse namespace uri. * * @return Parsed node */ private STNode parseXMLNamespaceUri() { STNode expr = parseConstExpr(); switch (expr.kind) { case STRING_LITERAL: case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: break; default: this.errorHandler.reportInvalidNode(null, "namespace uri must be a subtype of string"); } return expr; } /** * Parse constants expr. * * @return Parsed node */ private STNode parseConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STToken nextToken = peek(); STNode expr; switch (nextToken.kind) { case STRING_LITERAL: case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: expr = consume(); break; case IDENTIFIER_TOKEN: expr = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); break; case OPEN_BRACE_TOKEN: default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); expr = solution.recoveredNode; break; } endContext(); return expr; } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri) { return parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri); } private STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (nextTokenKind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri); } STNode semicolon = parseSemicolon(); return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return sol.recoveredNode; } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code> * * @param annots Annotations attached to the worker decl * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc, workerBody); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD); return sol.recoveredNode; } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_NAME); return sol.recoveredNode; } } /** * Parse documentation string. * <p> * <code>DocumentationString := DocumentationLine +</code> * <p> * Refer {@link BallerinaLexer * * @return Parsed node */ private STNode parseDocumentationString() { List<STNode> docLines = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_LINE) { docLines.add(consume()); nextToken = peek(); } STNode documentationLines = STNodeFactory.createNodeList(docLines); return STNodeFactory.createDocumentationStringNode(documentationLines); } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt ;</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LOCK_KEYWORD); return sol.recoveredNode; } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return true; default: return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case XML_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case SERVICE_KEYWORD: case VAR_KEYWORD: return true; case TYPE_DESC: return true; default: return false; } } private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; default: return SyntaxKind.TYPE_DESC; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FORK_KEYWORD); return sol.recoveredNode; } } /** * Parse multiple named worker declarations. * * @return named-worker-declarations node array */ private STNode parseMultileNamedWorkerDeclarations() { STToken token = peek(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: this.errorHandler.reportInvalidNode(null, "Only named-workers are allowed here"); break; } token = peek(); } if (workers.isEmpty()) { this.errorHandler.reportInvalidNode(null, "Fork Statement must contain atleast one named-worker"); } STNode namedWorkers = STNodeFactory.createNodeList(workers); return namedWorkers; } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); STNode namedWorkerDeclarations = parseMultileNamedWorkerDeclarations(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); } /** * Parse decimal floating point literal. * * @return Parsed node */ private STNode parseDecimalFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.DECIMAL_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse hex floating point literal. * * @return Parsed node */ private STNode parseHexFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.HEX_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param isRhsExpr * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTrapExpressionNode(trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TRAP_KEYWORD); return sol.recoveredNode; } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ }
class BallerinaParser { private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.ACTION; private final BallerinaParserErrorHandler errorHandler; private final AbstractTokenReader tokenReader; private ParserRuleContext currentParamKind = ParserRuleContext.REQUIRED_PARAM; protected BallerinaParser(AbstractTokenReader tokenReader) { this.tokenReader = tokenReader; this.errorHandler = new BallerinaParserErrorHandler(tokenReader, this); } /** * Start parsing the given input. * * @return Parsed node */ public STNode parse() { return parseCompUnit(); } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /** * Resume the parsing from the given context. * * @param context Context to resume parsing * @param args Arguments that requires to continue parsing from the given parser context * @return Parsed node */ public STNode resumeParsing(ParserRuleContext context, Object... args) { switch (context) { case COMP_UNIT: return parseCompUnit(); case EXTERNAL_FUNC_BODY: return parseExternalFunctionBody(); case FUNC_BODY: return parseFunctionBody(); case OPEN_BRACE: return parseOpenBrace(); case CLOSE_BRACE: return parseCloseBrace(); case FUNC_NAME: return parseFunctionName(); case OPEN_PARENTHESIS: return parseOpenParenthesis(); case PARAM_LIST: return parseParamList(); case RETURN_TYPE_DESCRIPTOR: return parseReturnTypeDescriptor(); case SIMPLE_TYPE_DESCRIPTOR: return parseTypeDescriptor(); case ASSIGN_OP: return parseAssignOp(); case EXTERNAL_KEYWORD: return parseExternalKeyword(); case FUNC_BODY_BLOCK: return parseFunctionBodyBlock(); case SEMICOLON: return parseSemicolon(); case CLOSE_PARENTHESIS: return parseCloseParenthesis(); case VARIABLE_NAME: return parseVariableName(); case TERMINAL_EXPRESSION: return parseTerminalExpression((boolean) args[0], (boolean) args[1]); case STATEMENT: return parseStatement(); case STATEMENT_WITHOUT_ANNOTS: return parseStatement((STNode) args[0]); case EXPRESSION_RHS: return parseExpressionRhs((OperatorPrecedence) args[1], (STNode) args[0], (boolean) args[2], (boolean) args[3]); case PARAMETER: return parseParameter((STNode) args[0], (int) args[1]); case PARAMETER_WITHOUT_ANNOTS: return parseParamGivenAnnots((STNode) args[0], (STNode) args[1], (int) args[2]); case AFTER_PARAMETER_TYPE: return parseAfterParamType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case PARAMETER_RHS: return parseParameterRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case TOP_LEVEL_NODE: return parseTopLevelNode(); case TOP_LEVEL_NODE_WITHOUT_METADATA: return parseTopLevelNode((STNode) args[0]); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return parseTopLevelNode((STNode) args[0], (STNode) args[1]); case STATEMENT_START_IDENTIFIER: return parseStatementStartIdentifier(); case VAR_DECL_STMT_RHS: return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (boolean) args[4]); case TYPE_REFERENCE: return parseTypeReference(); case FIELD_DESCRIPTOR_RHS: return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]); case NAMED_OR_POSITIONAL_ARG_RHS: return parseNamedOrPositionalArg((STNode) args[0]); case RECORD_BODY_START: return parseRecordBodyStartDelimiter(); case TYPE_DESCRIPTOR: return parseTypeDescriptor(); case OBJECT_MEMBER: return parseObjectMember(); case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]); case OBJECT_FIELD_RHS: return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case OBJECT_TYPE_FIRST_QUALIFIER: return parseObjectTypeQualifiers(); case OBJECT_TYPE_SECOND_QUALIFIER: return parseObjectTypeSecondQualifier((STNode) args[0]); case OBJECT_KEYWORD: return parseObjectKeyword(); case TYPE_NAME: return parseTypeName(); case IF_KEYWORD: return parseIfKeyword(); case ELSE_KEYWORD: return parseElseKeyword(); case ELSE_BODY: return parseElseBody(); case WHILE_KEYWORD: return parseWhileKeyword(); case PANIC_KEYWORD: return parsePanicKeyword(); case MAJOR_VERSION: return parseMajorVersion(); case IMPORT_DECL_RHS: return parseImportDecl((STNode) args[0], (STNode) args[1]); case IMPORT_PREFIX: return parseImportPrefix(); case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case VARIABLE_REF: case FIELD_OR_FUNC_NAME: case SERVICE_NAME: return parseIdentifier(context); case IMPORT_KEYWORD: return parseImportKeyword(); case SLASH: return parseSlashToken(); case DOT: return parseDotToken(); case IMPORT_VERSION_DECL: return parseVersion(); case VERSION_KEYWORD: return parseVersionKeywrod(); case VERSION_NUMBER: return parseVersionNumber(); case DECIMAL_INTEGER_LITERAL: return parseDecimalIntLiteral(context); case IMPORT_SUB_VERSION: return parseSubVersion(context); case IMPORT_PREFIX_DECL: return parseImportPrefixDecl(); case AS_KEYWORD: return parseAsKeyword(); case CONTINUE_KEYWORD: return parseContinueKeyword(); case BREAK_KEYWORD: return parseBreakKeyword(); case RETURN_KEYWORD: return parseReturnKeyword(); case MAPPING_FIELD: return parseMappingField((STNode) args[0]); case SPECIFIC_FIELD_RHS: return parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]); case STRING_LITERAL: return parseStringLiteral(); case COLON: return parseColon(); case OPEN_BRACKET: return parseOpenBracket(); case RESOURCE_DEF: return parseResource(); case OPTIONAL_SERVICE_NAME: return parseServiceName(); case SERVICE_KEYWORD: return parseServiceKeyword(); case ON_KEYWORD: return parseOnKeyword(); case RESOURCE_KEYWORD: return parseResourceKeyword(); case LISTENER_KEYWORD: return parseListenerKeyword(); case NIL_TYPE_DESCRIPTOR: return parseNilTypeDescriptor(); case COMPOUND_ASSIGNMENT_STMT: return parseCompoundAssignmentStmt(); case TYPEOF_KEYWORD: return parseTypeofKeyword(); case ARRAY_TYPE_DESCRIPTOR: return parseArrayTypeDescriptor((STNode) args[0]); case ARRAY_LENGTH: return parseArrayLength(); case FUNC_DEFINITION: case REQUIRED_PARAM: case ANNOT_REFERENCE: return parseIdentifier(context); case IS_KEYWORD: return parseIsKeyword(); case STMT_START_WITH_EXPR_RHS: return parseStamentStartWithExpr((STNode) args[0]); case COMMA: return parseComma(); case CONST_DECL_TYPE: return parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]); case STMT_START_WITH_IDENTIFIER: return parseStatementStartsWithIdentifier((STNode) args[0], (STNode) args[1]); case PARAMETERIZED_TYPE_DESCRIPTOR: return parseParameterizedTypeDescriptor(); case LT: return parseLTToken(); case GT: return parseGTToken(); case NIL_LITERAL: return parseNilLiteral(); case RECORD_FIELD_OR_RECORD_END: return parseFieldOrRestDescriptor((boolean) args[0]); case ANNOTATION_KEYWORD: return parseAnnotationKeyword(); case ANNOT_DECL_OPTIONAL_TYPE: return parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case ANNOT_DECL_RHS: return parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case ANNOT_OPTIONAL_ATTACH_POINTS: return parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (STNode) args[5]); case SOURCE_KEYWORD: return parseSourceKeyword(); case ATTACH_POINT_IDENT: return parseAttachPointIdent((STNode) args[0]); case IDENT_AFTER_OBJECT_IDENT: return parseIdentAfterObjectIdent(); case FUNCTION_IDENT: return parseFunctionIdent(); case FIELD_IDENT: return parseFieldIdent(); case ATTACH_POINT_END: return parseAttachPointEnd(); case XMLNS_KEYWORD: return parseXMLNSKeyword(); case XML_NAMESPACE_PREFIX_DECL: return parseXMLDeclRhs((STNode) args[0], (STNode) args[1]); case NAMESPACE_PREFIX: return parseNamespacePrefix(); case WORKER_KEYWORD: return parseWorkerKeyword(); case WORKER_NAME: return parseWorkerName(); case FORK_KEYWORD: return parseForkKeyword(); case DECIMAL_FLOATING_POINT_LITERAL: return parseDecimalFloatingPointLiteral(); case HEX_FLOATING_POINT_LITERAL: return parseHexFloatingPointLiteral(); case TRAP_KEYWORD: return parseTrapKeyword(); default: throw new IllegalStateException("Cannot re-parse rule: " + context); } } /* * Private methods */ private STToken peek() { return this.tokenReader.peek(); } private STToken peek(int k) { return this.tokenReader.peek(k); } private STToken consume() { return this.tokenReader.read(); } private Solution recover(STToken token, ParserRuleContext currentCtx, Object... parsedNodes) { return this.errorHandler.recover(currentCtx, token, parsedNodes); } private void startContext(ParserRuleContext context) { this.errorHandler.startContext(context); } private void endContext() { this.errorHandler.endContext(); } /** * Switch the current context to the provided one. This will replace the * existing context. * * @param context Context to switch to. */ private void switchContext(ParserRuleContext context) { this.errorHandler.switchContext(context); } /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); STToken token = peek(); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(token.kind); if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { otherDecls.add(decl); this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations"); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ private STNode parseTopLevelNode() { STToken token = peek(); return parseTopLevelNode(token.kind); } protected STNode parseTopLevelNode(SyntaxKind tokenKind) { STNode metadata; switch (tokenKind) { case EOF_TOKEN: return consume(); case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(tokenKind); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case SERVICE_KEYWORD: metadata = createEmptyMetadata(); break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(createEmptyMetadata(), null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { metadata = createEmptyMetadata(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind); } return parseTopLevelNode(tokenKind, metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); return parseTopLevelNode(nextToken.kind, metadata); } private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) { STNode qualifier = null; switch (tokenKind) { case EOF_TOKEN: if (metadata != null) { this.errorHandler.reportInvalidNode(null, "invalid metadata"); } return consume(); case PUBLIC_KEYWORD: qualifier = parseQualifier(); tokenKind = peek().kind; break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } return parseTopLevelNode(solution.tokenKind, metadata); } return parseTopLevelNode(tokenKind, metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isModuleVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case OPEN_BRACKET_TOKEN: case QUESTION_MARK_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } case COLON_TOKEN: if (lookahead > 1) { return false; } if (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) { return false; } return isModuleVarDeclStart(lookahead + 2); default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.switchMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STToken token = peek(); STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier); this.tokenReader.resetMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD); return sol.recoveredNode; } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); return parseImportDecl(nextToken.kind, importKeyword, identifier); } private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) { STNode orgName; STNode moduleName; STNode version; STNode alias; switch (tokenKind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgNameNode(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportDecl(solution.tokenKind, importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SLASH); return sol.recoveredNode; } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken nextToken = peek(); return parseDotToken(nextToken.kind); } private STNode parseDotToken(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.DOT_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.DOT); return sol.recoveredNode; } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(peek().kind, moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @param moduleNameStart Starting identifier of the module name * @return Parsed node */ private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); while (!isEndOfImportModuleName(nextTokenKind)) { moduleNameParts.add(parseDotToken()); moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME)); nextTokenKind = peek().kind; } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) { return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); return parseVersion(nextToken.kind); } private STNode parseVersion(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeywrod(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersion(solution.tokenKind); } } /** * Parse version keywrod. * * @return Parsed node */ private STNode parseVersionKeywrod() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD); return sol.recoveredNode; } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); return parseVersionNumber(nextToken.kind); } private STNode parseVersionNumber(SyntaxKind nextTokenKind) { STNode majorVersion; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: majorVersion = parseMajorVersion(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersionNumber(solution.tokenKind); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersion = parseMinorVersion(); if (minorVersion != null) { versionParts.add(minorVersion); STNode patchVersion = parsePatchVersion(); if (patchVersion != null) { versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseSubVersion(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseSubVersion(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) { return consume(); } else { Solution sol = recover(peek(), context); return sol.recoveredNode; } } /** * Parse sub version. i.e: minor-version/patch-version. * * @param context Context indicating what kind of sub-version is being parsed. * @return Parsed node */ private STNode parseSubVersion(ParserRuleContext context) { STToken nextToken = peek(); return parseSubVersion(nextToken.kind, context); } private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) { switch (nextTokenKind) { case AS_KEYWORD: case SEMICOLON_TOKEN: return null; case DOT_TOKEN: STNode leadingDot = parseDotToken(); STNode versionNumber = parseDecimalIntLiteral(context); return STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSubVersion(solution.tokenKind, context); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken token = peek(); return parseImportPrefixDecl(token.kind); } private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefixNode(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportPrefixDecl(solution.tokenKind); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD); return sol.recoveredNode; } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX); return sol.recoveredNode; } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken token = peek(); return parseTopLevelNode(token.kind, metadata, qualifier); } /** * Parse top level node given the next token kind and the modifier that precedes it. * * @param tokenKind Next token kind * @param qualifier Qualifier that precedes the top level node * @return Parsed top-level node */ private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) { switch (tokenKind) { case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, getQualifier(qualifier)); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case ANNOTATION_KEYWORD: STNode constKeyword = STNodeFactory.createEmptyNode(); return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case XMLNS_KEYWORD: reportInvalidQualifier(qualifier); return parseXMLNamepsaceDeclaration(); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { return parseModuleVarDecl(metadata, qualifier); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { return parseModuleVarDecl(metadata, qualifier); } return parseTopLevelNode(solution.tokenKind, metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { this.errorHandler.reportInvalidNode((STToken) qualifier, "invalid qualifier '" + qualifier.toString().trim() + "'"); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse function definition. A function definition has the following structure. * </p> * <code> * function-defn := FUNCTION identifier function-signature function-body * </code> * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @return Parsed node */ private STNode parseFunctionDefinition(STNode metadata, STNode visibilityQualifier) { startContext(ParserRuleContext.FUNC_DEFINITION); STNode functionKeyword = parseFunctionKeyword(); STNode name = parseFunctionName(); STNode openParenthesis = parseOpenParenthesis(); STNode parameters = parseParamList(); STNode closeParenthesis = parseCloseParenthesis(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode body = parseFunctionBody(); endContext(); return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name, openParenthesis, parameters, closeParenthesis, returnTypeDesc, body); } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD); return sol.recoveredNode; } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNC_NAME); return sol.recoveredNode; } } /** * Parse open parenthesis. * * @return Parsed node */ private STNode parseOpenParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_PARENTHESIS); return sol.recoveredNode; } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return sol.recoveredNode; } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @return Parsed node */ private STNode parseParamList() { startContext(ParserRuleContext.PARAM_LIST); ArrayList<STNode> paramsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } STNode startingComma = STNodeFactory.createEmptyNode(); this.currentParamKind = ParserRuleContext.REQUIRED_PARAM; paramsList.add(parseParameter(startingComma)); token = peek(); while (!isEndOfParametersList(token.kind)) { STNode leadingComma = parseComma(); STNode param = parseParameter(leadingComma); paramsList.add(param); token = peek(); } STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param leadingComma Comma that occurs before the param * @return Parsed node */ private STNode parseParameter(STNode leadingComma) { STToken token = peek(); if (this.currentParamKind == ParserRuleContext.REST_PARAM) { this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter"); startContext(ParserRuleContext.REQUIRED_PARAM); } else { startContext(this.currentParamKind); } return parseParameter(token.kind, leadingComma, 1); } private STNode parseParameter(STNode leadingComma, int nextTokenOffset) { return parseParameter(peek().kind, leadingComma, nextTokenOffset); } private STNode parseParameter(SyntaxKind nextTokenKind, STNode leadingComma, int nextTokenOffset) { STNode annots; switch (nextTokenKind) { case AT_TOKEN: annots = parseAnnotations(nextTokenKind); nextTokenKind = peek().kind; break; case PUBLIC_KEYWORD: annots = STNodeFactory.createNodeList(new ArrayList<>()); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); STNode qualifier = STNodeFactory.createEmptyNode(); return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } default: if (nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN && isTypeStartingToken(nextTokenKind)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER, leadingComma, nextTokenOffset); if (solution.action == Action.KEEP) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameter(solution.tokenKind, leadingComma, 0); } return parseParamGivenAnnots(nextTokenKind, leadingComma, annots, 1); } private STNode parseParamGivenAnnots(STNode leadingComma, STNode annots, int nextNextTokenOffset) { return parseParamGivenAnnots(peek().kind, leadingComma, annots, nextNextTokenOffset); } private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, STNode leadingComma, STNode annots, int nextTokenOffset) { STNode qualifier; switch (nextTokenKind) { case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { qualifier = STNodeFactory.createEmptyNode(); break; } case AT_TOKEN: default: if (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) { qualifier = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, leadingComma, annots, nextTokenOffset); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParamGivenAnnots(solution.tokenKind, leadingComma, annots, 0); } return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } private STNode parseParamGivenAnnotsAndQualifier(STNode leadingComma, STNode annots, STNode qualifier) { STNode type = parseTypeDescriptor(); STNode param = parseAfterParamType(leadingComma, annots, qualifier, type); endContext(); return param; } /** * Check whether the cursor is at the start of a parameter that doesn't have annotations. * * @param tokenOffset Offset of the token to check * @return <code>true</code> if the cursor is at the start of a parameter. <code>false</code> otherwise. */ private boolean isParamWithoutAnnotStart(int tokenOffset) { STToken nextToken = peek(tokenOffset + 1); switch (nextToken.kind) { case PUBLIC_KEYWORD: return isParamWithoutAnnotStart(tokenOffset + 1); case ELLIPSIS_TOKEN: return true; case IDENTIFIER_TOKEN: return true; default: return false; } } private STNode parseAfterParamType(STNode leadingComma, STNode annots, STNode qualifier, STNode type) { STToken token = peek(); return parseAfterParamType(token.kind, leadingComma, annots, qualifier, type); } private STNode parseAfterParamType(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type) { switch (tokenKind) { case ELLIPSIS_TOKEN: this.currentParamKind = ParserRuleContext.REST_PARAM; switchContext(ParserRuleContext.REST_PARAM); reportInvalidQualifier(qualifier); STNode ellipsis = parseEllipsis(); STNode paramName = parseVariableName(); return STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(leadingComma, annots, qualifier, type, paramName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, leadingComma, annots, qualifier, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAfterParamType(solution.tokenKind, leadingComma, annots, qualifier, type); } } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELLIPSIS); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param leadingComma Comma that precedes this parameter * @param annots Annotations attached to the parameter * @param qualifier Visibility qualifier * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { STToken token = peek(); return parseParameterRhs(token.kind, leadingComma, annots, qualifier, type, paramName); } private STNode parseParameterRhs(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { if (isEndOfParameter(tokenKind)) { if (this.currentParamKind == ParserRuleContext.DEFAULTABLE_PARAM) { this.errorHandler.reportInvalidNode(peek(), "cannot have a required parameter after a defaultable parameter"); } return STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName); } else if (tokenKind == SyntaxKind.EQUAL_TOKEN) { if (this.currentParamKind == ParserRuleContext.REQUIRED_PARAM) { this.currentParamKind = ParserRuleContext.DEFAULTABLE_PARAM; switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal, expr); } else { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_RHS, leadingComma, annots, qualifier, type, paramName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind, leadingComma, annots, qualifier, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMA); return sol.recoveredNode; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case AT_TOKEN: return true; default: return false; } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: return true; default: return false; } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseReturnTypeDescriptor() { startContext(ParserRuleContext.RETURN_TYPE_DESCRIPTOR); STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { endContext(); return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(); endContext(); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor() { STToken token = peek(); STNode typeDesc = parseTypeDescriptor(token.kind); return parseComplexTypeDescriptor(typeDesc); } /** * This will handle the parsing of optional,array,union type desc to infinite length. * * @param typeDesc * * @return Parsed type descriptor node */ private STNode parseComplexTypeDescriptor(STNode typeDesc) { STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc)); case OPEN_BRACKET_TOKEN: return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc)); default: return typeDesc; } } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTypeDescriptor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseTypeReference(); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilTypeDescriptor(); case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return parseParameterizedTypeDescriptor(); default: if (isSimpleType(tokenKind)) { return parseSimpleTypeDescriptor(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypeDescriptor(solution.tokenKind); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken node = peek(); if (isSimpleType(node.kind)) { STToken token = consume(); SyntaxKind typeKind = getTypeSyntaxKind(token.kind); return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token); } else { Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return sol.recoveredNode; } } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init, named-worker-decl+] default-worker } * </code> * * @return Parsed node */ private STNode parseFunctionBody() { STToken token = peek(); return parseFunctionBody(token.kind); } /** * Parse function body, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ protected STNode parseFunctionBody(SyntaxKind tokenKind) { switch (tokenKind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNC_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.NONE) { return STNodeFactory.createMissingToken(solution.tokenKind); } return parseFunctionBody(solution.tokenKind); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @return Parsed node */ private STNode parseFunctionBodyBlock() { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STToken token = peek(); ArrayList<STNode> firstStmtList = new ArrayList<>(); ArrayList<STNode> workers = new ArrayList<>(); ArrayList<STNode> secondStmtList = new ArrayList<>(); ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT; boolean hasNamedWorkers = false; while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (currentCtx) { case DEFAULT_WORKER_INIT: if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) { firstStmtList.add(stmt); break; } currentCtx = ParserRuleContext.NAMED_WORKERS; hasNamedWorkers = true; case NAMED_WORKERS: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { workers.add(stmt); break; } currentCtx = ParserRuleContext.DEFAULT_WORKER; case DEFAULT_WORKER: default: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } secondStmtList.add(stmt); break; } token = peek(); } STNode namedWorkersList; STNode statements; if (hasNamedWorkers) { STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList); STNode namedWorkers = STNodeFactory.createNodeList(workers); namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers); statements = STNodeFactory.createNodeList(secondStmtList); } else { namedWorkersList = STNodeFactory.createEmptyNode(); statements = STNodeFactory.createNodeList(firstStmtList); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace); } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.RECORD_FIELD, 1); default: return false; } } private boolean isEndOfObjectTypeNode(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); default: return false; } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); return parseVariableName(token.kind); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME); return sol.recoveredNode; } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACE); return sol.recoveredNode; } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE); return sol.recoveredNode; } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); STNode annotation = parseAnnotations(); STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SEMICOLON); return sol.recoveredNode; } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return sol.recoveredNode; } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ASSIGN_OP); return sol.recoveredNode; } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case EQUAL_GT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; case RIGHT_ARROW_TOKEN: return OperatorPrecedence.ACTION; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case UNARY: case ACTION: case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD); return sol.recoveredNode; } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_NAME); return sol.recoveredNode; } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; STNode fields = parseFieldDescriptors(isInclusive); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind); endContext(); return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken token = peek(); return parseRecordBodyStartDelimiter(token.kind); } private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) { switch (kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyStartDelimiter(solution.tokenKind); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return sol.recoveredNode; } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) { switch (startingDelimeter) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyEnd(); case OPEN_BRACE_TOKEN: return parseCloseBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyCloseDelimiter(solution.tokenKind); } } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return sol.recoveredNode; } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse field descriptors. * </p> * * @return Parsed node */ private STNode parseFieldDescriptors(boolean isInclusive) { ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); boolean endOfFields = false; while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); if (field == null) { endOfFields = true; break; } recordFields.add(field); token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { break; } } while (!endOfFields && !isEndOfRecordTypeNode(token.kind)) { parseFieldOrRestDescriptor(isInclusive); this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor"); token = peek(); } return STNodeFactory.createNodeList(recordFields); } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { return parseFieldOrRestDescriptor(peek().kind, isInclusive); } private STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: return null; case ASTERISK_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); case AT_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode metadata = parseMetaData(nextTokenKind); type = parseTypeDescriptor(); STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; default: if (isTypeStartingToken(nextTokenKind)) { startContext(ParserRuleContext.RECORD_FIELD); metadata = createEmptyMetadata(); type = parseTypeDescriptor(nextTokenKind); fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptor(solution.tokenKind, isInclusive); } } private STNode parseFieldDescriptor(boolean isInclusive, STNode type, STNode metadata) { if (isInclusive) { STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); } else { return parseFieldOrRestDescriptorRhs(metadata, type); } } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeRefOrPkgRef = consume(); return parseQualifiedIdentifier(typeRefOrPkgRef); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken colon = consume(); STToken varOrFuncName = consume(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); } else { this.errorHandler.removeInvalidToken(); return parseQualifiedIdentifier(identifier); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken token = peek(); return parseFieldOrRestDescriptorRhs(token.kind, metadata, type); } private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) { switch (kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) { STToken token = peek(); return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName); } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param kind Kind of the next token * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) { switch (kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, type, fieldName, equalsToken, expression, semicolonToken); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.QUESTION_MARK); return sol.recoveredNode; } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { STToken token = peek(); ArrayList<STNode> stmts = new ArrayList<>(); while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } stmts.add(stmt); token = peek(); } return STNodeFactory.createNodeList(stmts); } private boolean isEndOfStatements(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.STATEMENT, 1); default: return false; } } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken token = peek(); return parseStatement(token.kind); } private STNode parseStatement(SyntaxKind tokenKind) { STNode annots = null; switch (tokenKind) { case CLOSE_BRACE_TOKEN: return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(); case AT_TOKEN: annots = parseAnnotations(tokenKind); tokenKind = peek().kind; break; case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case PANIC_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case WORKER_KEYWORD: break; default: if (isTypeStartingToken(tokenKind)) { break; } if (isValidLHSExpression(tokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind); } return parseStatement(tokenKind, annots); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createNodeList(new ArrayList<>()); } private STNode parseStatement(STNode annots) { return parseStatement(peek().kind, annots); } /** * Parse a single statement, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatement(SyntaxKind tokenKind, STNode annots) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: this.errorHandler.reportInvalidNode(null, "invalid annotations"); return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(tokenKind, annots); case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseStamentStartsWithExpr(tokenKind); case IDENTIFIER_TOKEN: STToken nextToken = peek(2); if (nextToken.kind == SyntaxKind.QUESTION_MARK_TOKEN) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } return parseStatementStartsWithIdentifier(getAnnotations(annots)); case LOCK_KEYWORD: return parseLockStatement(); case OPEN_BRACE_TOKEN: return parseBlockNode(); case WORKER_KEYWORD: return parseNamedWorkerDeclaration(getAnnotations(annots)); case FORK_KEYWORD: return parseForkStatement(); default: if (isTypeStartingToken(tokenKind)) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, annots); } } private STNode getNextNextToken(SyntaxKind tokenKind) { return peek(1).kind == tokenKind ? peek(2) : peek(1); } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode type = parseTypeDescriptor(); STNode varName = parseVariableName(); STNode varDecl = parseVarDeclRhs(annots, finalKeyword, type, varName, isModuleVar); endContext(); return varDecl; } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STToken token = peek(); return parseVarDeclRhs(token.kind, metadata, finalKeyword, type, varName, isModuleVar); } /** * Parse the right hand side of a variable declaration statement, given the * next token kind. * * @param tokenKind Next token kind * @param metadata Metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @param isModuleVar flag indicating whether the var is module level * @return Parsed node */ private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; switch (tokenKind) { case EQUAL_TOKEN: assign = parseAssignOp(); if (isModuleVar) { expr = parseExpression(); } else { expr = parseActionOrExpression(); } semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: if (isModuleVar) { this.errorHandler.reportMissingTokenError("assignment required"); } assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, type, varName, isModuleVar); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, type, varName, isModuleVar); } if (isModuleVar) { return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode assign = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, false); } /** * Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseActionOrExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(SyntaxKind tokenKind) { return parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true); } /** * Parse expression. * * @param isRhsExpr Flag indicating whether this is a rhs expression * @return Parsed node */ private STNode parseExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false); } private void validateLVExpr(STNode expression) { if (isValidLVExpr(expression)) { return; } this.errorHandler.reportInvalidNode(null, "invalid expression for assignment lhs"); } private boolean isValidLVExpr(STNode expression) { switch (expression.kind) { case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case FIELD_ACCESS: return isValidLVExpr(((STFieldAccessExpressionNode) expression).expression); case MEMBER_ACCESS: return isValidLVExpr(((STMemberAccessExpressionNode) expression).containerExpression); default: return false; } } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @param isRhsExpr Flag indicating whether this is a rhs expression * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STToken token = peek(); return parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions); } private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions); } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @param isRhsExpr Is a rhs expression * @param allowActions Allow actions * @return Parsed node */ private STNode parseTerminalExpression(boolean isRhsExpr, boolean allowActions) { return parseTerminalExpression(peek().kind, isRhsExpr, allowActions); } private STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case OPEN_PAREN_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } return parseBracedExpression(isRhsExpr, allowActions); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(isRhsExpr, allowActions); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(isRhsExpr); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(isRhsExpr); case TRAP_KEYWORD: return parseTrapExpression(isRhsExpr); case OPEN_BRACKET_TOKEN: return parseListConstructorExpr(); default: Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, isRhsExpr, allowActions); if (solution.recoveredNode.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseQualifiedIdentifier(solution.recoveredNode); } if (solution.recoveredNode.kind == SyntaxKind.OPEN_PAREN_TOKEN && peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } if (solution.recoveredNode.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return parseListConstructorExpr(); } return solution.recoveredNode; } } private STNode parseActionOrExpressionInLhs(STNode lhsExpr) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, lhsExpr, false, true); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @param isLVExpr Flag indicating whether this is on a lhsExpr of a statement * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isLVExpr, boolean allowActions) { STToken token = peek(); return parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isLVExpr, allowActions); } /** * Parse the right hand side of an expression given the next token kind. * * @param tokenKind Next token kind * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression * @param isRhsExpr Flag indicating whether this is a rhs expr or not * @param allowActions Flag indicating whether to allow actions or not * @return Parsed node */ private STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { if (isEndOfExpression(tokenKind, isRhsExpr)) { return lhsExpr; } if (!isValidExprRhsStart(tokenKind)) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel); return parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } else { return parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind); if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence)) { return lhsExpr; } STNode newLhsExpr; switch (tokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr); break; case IS_KEYWORD: newLhsExpr = parseTypeTestExpression(lhsExpr); break; case RIGHT_ARROW_TOKEN: newLhsExpr = parseAction(tokenKind, lhsExpr); if (!allowActions) { this.errorHandler.reportInvalidNode(null, "actions are not allowed here"); } break; default: STNode operator = parseBinaryOperator(); STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false); newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions); } private boolean isValidExprRhsStart(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr) { STNode openBracket = consume(); STNode keyExpr; if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { this.errorHandler.reportMissingTokenError("missing expression"); keyExpr = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { keyExpr = parseExpression(); } STNode closeBracket = parseCloseBracket(); return STNodeFactory.createMemberAccessExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET); return sol.recoveredNode; } } /** * Parse field access expression and method call expression. * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) { STNode dotToken = parseDotToken(); STNode fieldOrMethodName = parseIdentifier(ParserRuleContext.FIELD_OR_FUNC_NAME); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Allow actions * @return Parsed node */ private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) { STNode openParen = parseOpenParenthesis(); STNode expr; if (allowActions) { expr = parseActionOrExpression(isRhsExpr); } else { expr = parseExpression(isRhsExpr); } STNode closeParen = parseCloseParenthesis(); if (isAction(expr)) { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } else { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } } /** * Check whether a given node is an action node. * * @param node Node to check * @return <code>true</code> if the node is an action node. <code>false</code> otherwise */ private boolean isAction(STNode node) { switch (node.kind) { case REMOTE_METHOD_CALL_ACTION: case BRACED_ACTION: case CHECK_ACTION: return true; default: return false; } } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr) { if (!isRhsExpr) { if (isCompoundBinaryOperator(tokenKind)) { return true; } return !isValidExprRhsStart(tokenKind); } switch (tokenKind) { case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case CONST_KEYWORD: case LISTENER_KEYWORD: case EQUAL_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case AS_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse basic literals. It is assumed that we come here after validation. * * @return Parsed node */ private STNode parseBasicLiteral() { STToken literalToken = consume(); return STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen); } /** * Parse function call argument list. * * @return Parsed agrs list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); ArrayList<STNode> argsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } SyntaxKind lastProcessedArgKind = parseFirstArg(argsList); parseFollowUpArg(argsList, lastProcessedArgKind); STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } /** * Parse the first argument of a function call. * * @param argsList Arguments list to which the parsed argument must be added * @return Kind of the argument first argument. */ private SyntaxKind parseFirstArg(ArrayList<STNode> argsList) { startContext(ParserRuleContext.ARG); STNode leadingComma = STNodeFactory.createEmptyNode(); STNode arg = parseArg(leadingComma); endContext(); if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) { argsList.add(arg); return arg.kind; } else { reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind); return SyntaxKind.POSITIONAL_ARG; } } /** * Parse follow up arguments. * * @param argsList Arguments list to which the parsed argument must be added * @param lastProcessedArgKind Kind of the argument processed prior to this */ private void parseFollowUpArg(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) { STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { startContext(ParserRuleContext.ARG); STNode leadingComma = parseComma(); nextToken = peek(); if (isEndOfParametersList(nextToken.kind)) { this.errorHandler.reportInvalidNode((STToken) leadingComma, "invalid token " + leadingComma); endContext(); break; } STNode arg = parseArg(nextToken.kind, leadingComma); if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) { if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) { this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg"); } else { argsList.add(arg); lastProcessedArgKind = arg.kind; } } else { reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind); } nextToken = peek(); endContext(); } } /** * Report invalid order of args. * * @param token Staring token of the arg. * @param lastArgKind Kind of the previously processed arg * @param argKind Current arg */ private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) { this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind); } /** * Parse function call argument. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseArg(STNode leadingComma) { STToken token = peek(); return parseArg(token.kind, leadingComma); } private STNode parseArg(SyntaxKind kind, STNode leadingComma) { STNode arg; switch (kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgumentNode(leadingComma, ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(leadingComma); break; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: default: expr = parseExpression(); arg = STNodeFactory.createPositionalArgumentNode(leadingComma, expr); break; } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseNamedOrPositionalArg(STNode leadingComma) { STToken secondToken = peek(2); switch (secondToken.kind) { case EQUAL_TOKEN: STNode argNameOrVarRef = consume(); STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createNamedArgumentNode(leadingComma, argNameOrVarRef, equal, expr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: argNameOrVarRef = consume(); return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrVarRef); case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: default: expr = parseExpression(); return STNodeFactory.createPositionalArgumentNode(leadingComma, expr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STToken nextToken = peek(); return parseObjectTypeQualifiers(nextToken.kind); } private STNode parseObjectTypeQualifiers(SyntaxKind kind) { List<STNode> qualifiers = new ArrayList<>(); STNode firstQualifier; switch (kind) { case CLIENT_KEYWORD: STNode clientKeyword = parseClientKeyword(); firstQualifier = clientKeyword; break; case ABSTRACT_KEYWORD: STNode abstractKeyword = parseAbstractKeyword(); firstQualifier = abstractKeyword; break; case OBJECT_KEYWORD: return STNodeFactory.createNodeList(qualifiers); default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeQualifiers(solution.tokenKind); } STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier); qualifiers.add(firstQualifier); if (secondQualifier != null) { qualifiers.add(secondQualifier); } return STNodeFactory.createNodeList(qualifiers); } private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) { STToken nextToken = peek(); return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier); } private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) { if (firstQualifier.kind != kind) { switch (kind) { case CLIENT_KEYWORD: return parseClientKeyword(); case ABSTRACT_KEYWORD: return parseAbstractKeyword(); case OBJECT_KEYWORD: return null; default: break; } } Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD); return sol.recoveredNode; } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return sol.recoveredNode; } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD); return sol.recoveredNode; } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfObjectTypeNode(nextToken.kind)) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode member = parseObjectMember(nextToken.kind); endContext(); if (member == null) { break; } objectMembers.add(member); nextToken = peek(); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STToken nextToken = peek(); return parseObjectMember(nextToken.kind); } private STNode parseObjectMember(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isTypeStartingToken(nextTokenKind)) { metadata = createEmptyMetadata(); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return parseObjectMember(nextTokenKind, metadata); } private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) { STNode member; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode()); break; case FUNCTION_KEYWORD: member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode()); break; default: if (isTypeStartingToken(nextTokenKind)) { member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return member; } private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(1); STToken nextNextToken = peek(2); return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers); } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param visibilityQualifiers Visibility qualifiers. A modifier can be * a syntax node with either 'PUBLIC' or 'PRIVATE'. * @param nextTokenKind Next token kind * @param nextNextTokenKind Kind of the token after the * @param metadata Metadata * @param visibilityQualifiers Visibility qualifiers * @return Parse object member node */ private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata, STNode visibilityQualifiers) { switch (nextTokenKind) { case REMOTE_KEYWORD: STNode remoteKeyword = parseRemoteKeyword(); ArrayList<STNode> methodQualifiers = new ArrayList<>(); if (visibilityQualifiers.kind != SyntaxKind.NONE) { methodQualifiers.add(visibilityQualifiers); } methodQualifiers.add(remoteKeyword); return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers)); case FUNCTION_KEYWORD: return parseObjectMethod(metadata, visibilityQualifiers); case IDENTIFIER_TOKEN: if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifiers); } break; default: if (isTypeStartingToken(nextTokenKind)) { return parseObjectField(metadata, visibilityQualifiers); } break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifiers); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers); } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD); return sol.recoveredNode; } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STNode type = parseTypeDescriptor(); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param nextTokenKind Kind of the next token * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextTokenKind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName); } return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) { return parseFunctionDefinition(metadata, methodQualifiers); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IF_KEYWORD); return sol.recoveredNode; } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD); return sol.recoveredNode; } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlockNode(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); return parseElseBody(nextToken.kind); } private STNode parseElseBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ELSE_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseElseBody(solution.tokenKind); } } /** * Parse while statement. * <code>while-stmt := while expression block-stmt</code> * * @return While statement */ private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody); } /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD); return sol.recoveredNode; } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD); return sol.recoveredNode; } } /** * Parse check expression. This method is used to parse both check expression * as well as check action. * * <p> * <code> * checking-expr := checking-keyword expression * checking-action := checking-keyword action * </code> * * @param allowActions Allow actions * @param isRhsExpr Is rhs expression * @return Check expression node */ private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions) { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, allowActions); if (isAction(expr)) { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr); } else { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr); } } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD); return sol.recoveredNode; } } /** * * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD); return sol.recoveredNode; } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STNode expr; STNode semicolon; STToken token = peek(); switch (token.kind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseActionOrExpression(); break; } semicolon = parseSemicolon(); return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { List<STNode> fields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createNodeList(fields); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { leadingComma = parseComma(); field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return false; case EOF_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseMappingField(STNode leadingComma) { STToken nextToken = peek(); return parseMappingField(nextToken.kind, leadingComma); } private STNode parseMappingField(SyntaxKind tokenKind, STNode leadingComma) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionValue(leadingComma); case STRING_LITERAL: STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(leadingComma); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadFieldNode(leadingComma, ellipsis, expr); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD, leadingComma); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingField(solution.tokenKind, leadingComma); } } /** * Parse mapping constructor specific-field with an optional value. * * @param leadingComma * @return Parsed node */ private STNode parseSpecificFieldWithOptionValue(STNode leadingComma) { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(leadingComma, key); } private STNode parseSpecificFieldRhs(STNode leadingComma, STNode key) { STToken nextToken = peek(); return parseSpecificFieldRhs(nextToken.kind, leadingComma, key); } private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode leadingComma, STNode key) { STNode colon; STNode valueExpr; switch (tokenKind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(tokenKind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, leadingComma, key); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSpecificFieldRhs(solution.tokenKind, leadingComma, key); } return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_LITERAL); return sol.recoveredNode; } } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COLON); return sol.recoveredNode; } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseComputedField(STNode leadingComma) { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(leadingComma, openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET); return sol.recoveredNode; } } /** * <p> * Parse compound assignment statement, which takes the following format. * </p> * <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code> * * @return Parsed node */ private STNode parseCompoundAssignmentStmt() { startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT); STNode varName = parseVariableName(); STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName); endContext(); return compoundAssignmentStmt; } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return sol.recoveredNode; } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); STNode service = STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); return service; } private STNode parseServiceName() { STToken nextToken = peek(); return parseServiceName(nextToken.kind); } private STNode parseServiceName(SyntaxKind kind) { switch (kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseServiceName(solution.tokenKind); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD); return sol.recoveredNode; } } /** * Check whether the given token kind is a compound binary operator. * <p> * <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @param tokenKind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) { switch (tokenKind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ON_KEYWORD); return sol.recoveredNode; } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfExpressionsList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing expression"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); while (!isEndOfExpressionsList(nextToken.kind)) { leadingComma = parseComma(); exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfExpressionsList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case AT_TOKEN: case DOCUMENTATION_LINE: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse expression list item. * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseExpressionListItem(STNode leadingComma) { STNode expr = parseExpression(); return STNodeFactory.createExpressionListItemNode(leadingComma, expr); } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STToken nextToken = peek(); return parseResource(nextToken.kind); } private STNode parseResource(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isEndOfServiceDecl(nextTokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind); } return parseResource(nextTokenKind, metadata); } private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) { switch (nextTokenKind) { case RESOURCE_KEYWORD: STNode resourceKeyword = parseResourceKeyword(); return parseFunctionDefinition(metadata, resourceKeyword); case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, STNodeFactory.createEmptyNode()); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind, metadata); } } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD); return sol.recoveredNode; } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */ private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return false; case ON_KEYWORD: return true; default: ParserRuleContext sol = this.errorHandler.findBestPath(currentContext); return sol == ParserRuleContext.SERVICE_DECL || sol == ParserRuleContext.CLOSE_BRACE; } case ON_KEYWORD: return true; default: this.errorHandler.removeInvalidToken(); return false; } } /** * Parse listener declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD); return sol.recoveredNode; } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword); } private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: switchContext(ParserRuleContext.ANNOTATION_DECL); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STToken token = peek(); return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName); } private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STNode type; STNode variableName; switch (nextTokenKind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = typeOrVarName; type = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword, typeOrVarName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName); } STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD); return sol.recoveredNode; } } /** * Parse nil type descriptor. * <p> * <code>nil-type-descriptor := ( ) </code> * </p> * * @return Parsed node */ private STNode parseNilTypeDescriptor() { startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken); } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD); return sol.recoveredNode; } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param typeDescriptorNode * * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createArrayTypeDescriptorNode(typeDescriptorNode, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return consume(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: Solution sol = recover(token, ParserRuleContext.ARRAY_LENGTH); return sol.recoveredNode; } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseAnnotations() { STToken nextToken = peek(); return parseAnnotations(nextToken.kind); } private STNode parseAnnotations(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); while (nextTokenKind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextTokenKind = peek().kind; } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } STNode annotValue = parseMappingConstructorExpr(); return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.AT); return sol.recoveredNode; } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData(SyntaxKind nextTokenKind) { STNode docString; STNode annotations; switch (nextTokenKind) { case DOCUMENTATION_LINE: docString = parseDocumentationString(); annotations = parseAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(nextTokenKind); break; default: return createEmptyMetadata(); } return STNodeFactory.createMetadataNode(docString, annotations); } /** * Create empty metadata node. * * @return A metadata node with no doc string and no annotations */ private STNode createEmptyMetadata() { return STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createNodeList(new ArrayList<>())); } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr) { startContext(ParserRuleContext.TYPE_TEST_EXPRESSION); STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptor(); endContext(); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IS_KEYWORD); return sol.recoveredNode; } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Pass statements that starts with an identifier. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatementStartsWithIdentifier(STNode annots) { startContext(ParserRuleContext.STMT_START_WITH_IDENTIFIER); STNode identifier = parseStatementStartIdentifier(); STToken nextToken = peek(); STNode stmt = parseStatementStartsWithIdentifier(nextToken.kind, annots, identifier); endContext(); return stmt; } private STNode parseStatementStartsWithIdentifier(STNode annots, STNode identifier) { return parseStatementStartsWithIdentifier(peek().kind, annots, identifier); } private STNode parseStatementStartsWithIdentifier(SyntaxKind nextTokenKind, STNode annots, STNode identifier) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: switchContext(ParserRuleContext.VAR_DECL_STMT); STNode varName = parseVariableName(); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVarDeclRhs(annots, finalKeyword, identifier, varName, false); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStamentStartWithExpr(nextTokenKind, identifier); default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(identifier); } if (isValidExprRhsStart(nextTokenKind)) { STNode expression = parseActionOrExpressionInLhs(identifier); return parseStamentStartWithExpr(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_IDENTIFIER, annots, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartsWithIdentifier(solution.tokenKind, annots, identifier); } } /** * Parse statement which is only consists of an action or expression. * * @param nextTokenKind Next token kind * @return Parsed node */ private STNode parseStamentStartsWithExpr(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpression(nextTokenKind); STNode stmt = parseStamentStartWithExpr(expression); endContext(); return stmt; } /** * Parse statements that starts with an expression. * * @return Parsed node */ private STNode parseStamentStartWithExpr(STNode expression) { STToken nextToken = peek(); return parseStamentStartWithExpr(nextToken.kind, expression); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param nextTokenKind Kind of the next token * @return Parsed node */ private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode expression) { switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, expression); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStamentStartWithExpr(solution.tokenKind, expression); } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: return parseActionStatement(expression); default: this.errorHandler.reportInvalidNode(null, "left hand side of an assignment must be a variable reference"); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID, expression, semicolon); } } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { validateExprInCallStmt(expression); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private void validateExprInCallStmt(STNode expression) { switch (expression.kind) { case FUNCTION_CALL: case METHOD_CALL: break; case CHECK_EXPRESSION: validateExprInCallStmt(((STCheckExpressionNode) expression).expression); break; case REMOTE_METHOD_CALL_ACTION: break; case BRACED_EXPRESSION: validateExprInCallStmt(((STBracedExpressionNode) expression).expression); break; default: if (isMissingNode(expression)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the checking keyword must be a " + "func-call, a method-call or a check-expr"); break; } } /** * Check whether a node is a missing node. * * @param node Node to check * @return <code>true</code> if the node is a missing node. <code>false</code> otherwise */ private boolean isMissingNode(STNode node) { return node instanceof STMissingToken; } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } private STNode parseAction(SyntaxKind tokenKind, STNode lhsExpr) { switch (tokenKind) { case RIGHT_ARROW_TOKEN: return parseRemoteMethodCallAction(lhsExpr); default: return null; } } /** * Parse remote method call action, given the starting expression. * <p> * <code>remote-method-call-action := expression -> method-name ( arg-list )</code> * * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallAction(STNode expression) { STNode rightArrow = parseRightArrow(); STNode methodName = parseFunctionName(); STNode openParenToken = parseOpenParenthesis(); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, methodName, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW); return sol.recoveredNode; } } /** * Check whether this is a valid lhs expression. * * @param tokenKind Kind of the next token * @return <code>true</code>if this is a start of an expression. <code>false</code> otherwise */ private boolean isValidLHSExpression(SyntaxKind tokenKind) { switch (tokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return true; case PLUS_TOKEN: case MINUS_TOKEN: return !isCompoundBinaryOperator(tokenKind); case OPEN_PAREN_TOKEN: default: return false; } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor() { startContext(ParserRuleContext.PARAMETERIZED_TYPE_DESCRIPTOR); STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword(); STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(); STNode gtToken = parseGTToken(); endContext(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode, gtToken); } /** * Parse <code>map</code> or <code>future</code> or <code>typedesc</code> keyword token. * * @return Parsed node */ private STNode parseParameterizedTypeKeyword() { STToken nextToken = peek(); switch (nextToken.kind) { case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return consume(); default: Solution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE); return sol.recoveredNode; } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.GT); return sol.recoveredNode; } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.LT); return sol.recoveredNode; } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return sol.recoveredNode; } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); return parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword); } private STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(); STNode annotTag = parseAnnotationTag(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, annotTag, equalsToken, initializer, semicolonToken); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG); return sol.recoveredNode; } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseAnnotationTag(); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken token = peek(); return parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } private STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STNode typeDesc; STNode annotTag; switch (nextTokenKind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STToken nextToken = peek(); return parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; switch (nextTokenKind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNode(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * [object] type * | [object|resource] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing attach point"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { this.errorHandler.reportMissingTokenError("missing attach point"); attachPoint = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { STToken nextToken = peek(); return parseAttachPointEnd(nextToken.kind); } private STNode parseAttachPointEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: Solution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null; } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { return parseAnnotationAttachPoint(peek().kind); } private STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT); return solution.recoveredNode; } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD); return sol.recoveredNode; } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := [object] type | [object|resource] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { return parseAttachPointIdent(peek().kind, sourceKeyword); } private STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode secondIdent = STNodeFactory.createEmptyNode(); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } firstIdent = solution.recoveredNode; return parseDualAttachPointIdent(sourceKeyword, firstIdent); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: default: secondIdent = STNodeFactory.createEmptyNode(); break; } return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case TYPE_KEYWORD: case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: Solution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return sol.recoveredNode; } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_IDENT); return sol.recoveredNode; } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FIELD_IDENT); return sol.recoveredNode; } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamepsaceDeclaration() { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseXMLNamespaceUri(); STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD); return sol.recoveredNode; } } /** * Parse namespace uri. * * @return Parsed node */ private STNode parseXMLNamespaceUri() { STNode expr = parseConstExpr(); switch (expr.kind) { case STRING_LITERAL: case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: break; default: this.errorHandler.reportInvalidNode(null, "namespace uri must be a subtype of string"); } return expr; } /** * Parse constants expr. * * @return Parsed node */ private STNode parseConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STToken nextToken = peek(); STNode expr; switch (nextToken.kind) { case STRING_LITERAL: case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: expr = consume(); break; case IDENTIFIER_TOKEN: expr = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); break; case OPEN_BRACE_TOKEN: default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); expr = solution.recoveredNode; break; } endContext(); return expr; } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri) { return parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri); } private STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (nextTokenKind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri); } STNode semicolon = parseSemicolon(); return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return sol.recoveredNode; } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code> * * @param annots Annotations attached to the worker decl * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc, workerBody); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD); return sol.recoveredNode; } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_NAME); return sol.recoveredNode; } } /** * Parse documentation string. * <p> * <code>DocumentationString := DocumentationLine +</code> * <p> * Refer {@link BallerinaLexer * * @return Parsed node */ private STNode parseDocumentationString() { List<STNode> docLines = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_LINE) { docLines.add(consume()); nextToken = peek(); } STNode documentationLines = STNodeFactory.createNodeList(docLines); return STNodeFactory.createDocumentationStringNode(documentationLines); } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt ;</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LOCK_KEYWORD); return sol.recoveredNode; } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return true; default: return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case XML_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case SERVICE_KEYWORD: case VAR_KEYWORD: return true; case TYPE_DESC: return true; default: return false; } } private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; default: return SyntaxKind.TYPE_DESC; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FORK_KEYWORD); return sol.recoveredNode; } } /** * Parse multiple named worker declarations. * * @return named-worker-declarations node array */ private STNode parseMultileNamedWorkerDeclarations() { STToken token = peek(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: this.errorHandler.reportInvalidNode(null, "Only named-workers are allowed here"); break; } token = peek(); } if (workers.isEmpty()) { this.errorHandler.reportInvalidNode(null, "Fork Statement must contain atleast one named-worker"); } STNode namedWorkers = STNodeFactory.createNodeList(workers); return namedWorkers; } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); STNode namedWorkerDeclarations = parseMultileNamedWorkerDeclarations(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); } /** * Parse decimal floating point literal. * * @return Parsed node */ private STNode parseDecimalFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.DECIMAL_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse hex floating point literal. * * @return Parsed node */ private STNode parseHexFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.HEX_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param isRhsExpr * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTrapExpressionNode(trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TRAP_KEYWORD); return sol.recoveredNode; } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ }
Consider adding an assert for the number of dispatchers to ensure that we actually check something in the loop.
public void requireThatDispatchTuningIsApplied() throws ParseException { ContentCluster cluster = newContentCluster(joinLines("<search>", "</search>"), joinLines("<tuning>", "</tuning>")); for (Dispatch tld : cluster.getSearch().getIndexed().getTLDs()) { PartitionsConfig.Builder builder = new PartitionsConfig.Builder(); tld.getConfig(builder); PartitionsConfig config = new PartitionsConfig(builder); assertEquals(2, config.dataset(0).searchablecopies()); assertTrue(config.dataset(0).useroundrobinforfixedrow()); } }
for (Dispatch tld : cluster.getSearch().getIndexed().getTLDs()) {
public void requireThatDispatchTuningIsApplied() throws ParseException { ContentCluster cluster = newContentCluster(joinLines("<search>", "</search>"), joinLines("<tuning>", "</tuning>")); assertEquals(1, cluster.getSearch().getIndexed().getTLDs().size()); for (Dispatch tld : cluster.getSearch().getIndexed().getTLDs()) { PartitionsConfig.Builder builder = new PartitionsConfig.Builder(); tld.getConfig(builder); PartitionsConfig config = new PartitionsConfig(builder); assertEquals(2, config.dataset(0).searchablecopies()); assertTrue(config.dataset(0).useroundrobinforfixedrow()); } }
class ClusterTest { @Test public void requireThatContentSearchIsApplied() throws ParseException { ContentCluster cluster = newContentCluster(joinLines("<search>", " <query-timeout>1.1</query-timeout>", " <visibility-delay>2.3</visibility-delay>", "</search>")); IndexedSearchCluster searchCluster = cluster.getSearch().getIndexed(); assertNotNull(searchCluster); assertEquals(1.1, searchCluster.getQueryTimeout(), 1E-6); assertEquals(2.3, searchCluster.getVisibilityDelay(), 1E-6); ProtonConfig proton = getProtonConfig(cluster); assertEquals(searchCluster.getVisibilityDelay(), proton.documentdb(0).visibilitydelay(), 1E-6); } @Test public void requireThatSearchCoverageIsApplied() throws ParseException { ContentCluster cluster = newContentCluster(joinLines("<search>", " <coverage>", " <minimum>0.11</minimum>", " <min-wait-after-coverage-factor>0.23</min-wait-after-coverage-factor>", " <max-wait-after-coverage-factor>0.58</max-wait-after-coverage-factor>", " </coverage>", "</search>")); for (Dispatch tld : cluster.getSearch().getIndexed().getTLDs()) { PartitionsConfig.Builder builder = new PartitionsConfig.Builder(); tld.getConfig(builder); PartitionsConfig config = new PartitionsConfig(builder); assertEquals(11.0, config.dataset(0).minimal_searchcoverage(), 1E-6); assertEquals(0.23, config.dataset(0).higher_coverage_minsearchwait(), 1E-6); assertEquals(0.58, config.dataset(0).higher_coverage_maxsearchwait(), 1E-6); assertEquals(2, config.dataset(0).searchablecopies()); assertTrue(config.dataset(0).useroundrobinforfixedrow()); } } @Test @Test public void requireThatVisibilityDelayIsZeroForGlobalDocumentType() throws ParseException { ContentCluster cluster = newContentCluster(joinLines("<search>", " <visibility-delay>2.3</visibility-delay>", "</search>"), true); ProtonConfig proton = getProtonConfig(cluster); assertEquals(0.0, proton.documentdb(0).visibilitydelay(), 1E-6); } private static ContentCluster newContentCluster(String contentSearchXml) throws ParseException { return newContentCluster(contentSearchXml, "", false); } private static ContentCluster newContentCluster(String contentSearchXml, String searchNodeTuningXml) throws ParseException { return newContentCluster(contentSearchXml, searchNodeTuningXml, false); } private static ContentCluster newContentCluster(String contentSearchXml, boolean globalDocType) throws ParseException { return newContentCluster(contentSearchXml, "", globalDocType); } private static ContentCluster newContentCluster(String contentSearchXml, String searchNodeTuningXml, boolean globalDocType) throws ParseException { ApplicationPackage app = new MockApplicationPackage.Builder() .withHosts(joinLines("<hosts>", " <host name='localhost'><alias>my_host</alias></host>", "</hosts>")) .withServices(joinLines("<services version='1.0'>", " <admin version='2.0'>", " <adminserver hostalias='my_host' />", " </admin>", " <content version='1.0'>", " <redundancy>3</redundancy>", " <documents>", " " + getDocumentXml(globalDocType), " </documents>", " <engine>", " <proton>", " <searchable-copies>2</searchable-copies>", searchNodeTuningXml, " </proton>", " </engine>", " <group>", " <node hostalias='my_host' distribution-key='0' />", " </group>", contentSearchXml, " </content>", "</services>")) .withSearchDefinitions(ApplicationPackageUtils.generateSearchDefinition("my_document")) .build(); List<Content> contents = new TestDriver().buildModel(app).getConfigModels(Content.class); assertEquals(1, contents.size()); return contents.get(0).getCluster(); } private static String getDocumentXml(boolean globalDocType) { return "<document mode='index' type='my_document' " + (globalDocType ? "global='true' " : "") + "/>"; } private static SearchDefinition newSearchDefinition(String name) throws ParseException { SearchBuilder builder = new SearchBuilder(); builder.importString("search " + name + " { document " + name + " { } }"); builder.build(); return new SearchDefinition(name, builder.getSearch(name)); } private static ProtonConfig getProtonConfig(ContentCluster cluster) { ProtonConfig.Builder builder = new ProtonConfig.Builder(); cluster.getSearch().getConfig(builder); return new ProtonConfig(builder); } }
class ClusterTest { @Test public void requireThatContentSearchIsApplied() throws ParseException { ContentCluster cluster = newContentCluster(joinLines("<search>", " <query-timeout>1.1</query-timeout>", " <visibility-delay>2.3</visibility-delay>", "</search>")); IndexedSearchCluster searchCluster = cluster.getSearch().getIndexed(); assertNotNull(searchCluster); assertEquals(1.1, searchCluster.getQueryTimeout(), 1E-6); assertEquals(2.3, searchCluster.getVisibilityDelay(), 1E-6); ProtonConfig proton = getProtonConfig(cluster); assertEquals(searchCluster.getVisibilityDelay(), proton.documentdb(0).visibilitydelay(), 1E-6); } @Test public void requireThatSearchCoverageIsApplied() throws ParseException { ContentCluster cluster = newContentCluster(joinLines("<search>", " <coverage>", " <minimum>0.11</minimum>", " <min-wait-after-coverage-factor>0.23</min-wait-after-coverage-factor>", " <max-wait-after-coverage-factor>0.58</max-wait-after-coverage-factor>", " </coverage>", "</search>")); assertEquals(1, cluster.getSearch().getIndexed().getTLDs().size()); for (Dispatch tld : cluster.getSearch().getIndexed().getTLDs()) { PartitionsConfig.Builder builder = new PartitionsConfig.Builder(); tld.getConfig(builder); PartitionsConfig config = new PartitionsConfig(builder); assertEquals(11.0, config.dataset(0).minimal_searchcoverage(), 1E-6); assertEquals(0.23, config.dataset(0).higher_coverage_minsearchwait(), 1E-6); assertEquals(0.58, config.dataset(0).higher_coverage_maxsearchwait(), 1E-6); assertEquals(2, config.dataset(0).searchablecopies()); assertTrue(config.dataset(0).useroundrobinforfixedrow()); } } @Test @Test public void requireThatVisibilityDelayIsZeroForGlobalDocumentType() throws ParseException { ContentCluster cluster = newContentCluster(joinLines("<search>", " <visibility-delay>2.3</visibility-delay>", "</search>"), true); ProtonConfig proton = getProtonConfig(cluster); assertEquals(0.0, proton.documentdb(0).visibilitydelay(), 1E-6); } private static ContentCluster newContentCluster(String contentSearchXml) throws ParseException { return newContentCluster(contentSearchXml, "", false); } private static ContentCluster newContentCluster(String contentSearchXml, String searchNodeTuningXml) throws ParseException { return newContentCluster(contentSearchXml, searchNodeTuningXml, false); } private static ContentCluster newContentCluster(String contentSearchXml, boolean globalDocType) throws ParseException { return newContentCluster(contentSearchXml, "", globalDocType); } private static ContentCluster newContentCluster(String contentSearchXml, String searchNodeTuningXml, boolean globalDocType) throws ParseException { ApplicationPackage app = new MockApplicationPackage.Builder() .withHosts(joinLines( "<hosts>", " <host name='localhost'><alias>my_host</alias></host>", "</hosts>")) .withServices(joinLines( "<services version='1.0'>", " <admin version='2.0'>", " <adminserver hostalias='my_host' />", " </admin>", "<jdisc id='foo' version='1.0'>", " <search />", " <nodes><node hostalias='my_host' /></nodes>", "</jdisc>", " <content version='1.0'>", " <redundancy>3</redundancy>", " <documents>", " " + getDocumentXml(globalDocType), " </documents>", " <engine>", " <proton>", " <searchable-copies>2</searchable-copies>", searchNodeTuningXml, " </proton>", " </engine>", " <group>", " <node hostalias='my_host' distribution-key='0' />", " </group>", contentSearchXml, " </content>", "</services>")) .withSearchDefinitions(ApplicationPackageUtils.generateSearchDefinition("my_document")) .build(); List<Content> contents = new TestDriver().buildModel(app).getConfigModels(Content.class); assertEquals(1, contents.size()); return contents.get(0).getCluster(); } private static String getDocumentXml(boolean globalDocType) { return "<document mode='index' type='my_document' " + (globalDocType ? "global='true' " : "") + "/>"; } private static SearchDefinition newSearchDefinition(String name) throws ParseException { SearchBuilder builder = new SearchBuilder(); builder.importString("search " + name + " { document " + name + " { } }"); builder.build(); return new SearchDefinition(name, builder.getSearch(name)); } private static ProtonConfig getProtonConfig(ContentCluster cluster) { ProtonConfig.Builder builder = new ProtonConfig.Builder(); cluster.getSearch().getConfig(builder); return new ProtonConfig(builder); } }
non-nullity asserted already, some lines up
public Endpoint in(SystemName system) { if (system.isPublic() && routingMethod != RoutingMethod.exclusive) { throw new IllegalArgumentException("Public system only supports routing method " + RoutingMethod.exclusive); } if (routingMethod.isDirect() && !port.isDefault()) { throw new IllegalArgumentException("Routing method " + routingMethod + " can only use default port"); } URI url = createUrl(endpointOrClusterAsString(endpointId, cluster), Objects.requireNonNull(application, "application must be non-null"), Objects.requireNonNull(instance, "instance must be non-null"), Objects.requireNonNull(targets, "targets must be non-null"), Objects.requireNonNull(scope, "scope must be non-null"), Objects.requireNonNull(system, "system must be non-null"), Objects.requireNonNull(port, "port must be non-null"), legacy); return new Endpoint(application, instance, endpointId, cluster, url, targets, scope, port, legacy, routingMethod, certificateName); }
instance,
public Endpoint in(SystemName system) { if (system.isPublic() && routingMethod != RoutingMethod.exclusive) { throw new IllegalArgumentException("Public system only supports routing method " + RoutingMethod.exclusive); } if (routingMethod.isDirect() && !port.isDefault()) { throw new IllegalArgumentException("Routing method " + routingMethod + " can only use default port"); } URI url = createUrl(endpointOrClusterAsString(endpointId, cluster), Objects.requireNonNull(application, "application must be non-null"), Objects.requireNonNull(instance, "instance must be non-null"), Objects.requireNonNull(targets, "targets must be non-null"), Objects.requireNonNull(scope, "scope must be non-null"), Objects.requireNonNull(system, "system must be non-null"), Objects.requireNonNull(port, "port must be non-null"), legacy); return new Endpoint(application, instance, endpointId, cluster, url, targets, scope, port, legacy, routingMethod, certificateName); }
class EndpointBuilder { private final TenantAndApplicationId application; private final Optional<InstanceName> instance; private Scope scope; private List<Target> targets; private ClusterSpec.Id cluster; private EndpointId endpointId; private Port port; private RoutingMethod routingMethod = RoutingMethod.sharedLayer4; private boolean legacy = false; private boolean certificateName = false; private EndpointBuilder(TenantAndApplicationId application, Optional<InstanceName> instance) { this.application = Objects.requireNonNull(application); this.instance = Objects.requireNonNull(instance); } /** Sets the deployment target for this */ public EndpointBuilder target(ClusterSpec.Id cluster, DeploymentId deployment) { this.cluster = cluster; this.scope = requireUnset(Scope.zone); this.targets = List.of(new Target(deployment)); return this; } /** Sets the global target with given ID, deployments and cluster (as defined in deployments.xml) */ public EndpointBuilder target(EndpointId endpointId, ClusterSpec.Id cluster, List<DeploymentId> deployments) { this.endpointId = endpointId; this.cluster = cluster; this.targets = deployments.stream().map(Target::new).collect(Collectors.toUnmodifiableList()); this.scope = requireUnset(Scope.global); return this; } /** Sets the global target with given ID and pointing to the default cluster */ public EndpointBuilder target(EndpointId endpointId) { return target(endpointId, ClusterSpec.Id.from("default"), List.of()); } /** Sets the application target with given ID and pointing to the default cluster */ public EndpointBuilder targetApplication(EndpointId endpointId, DeploymentId deployment) { return targetApplication(endpointId, ClusterSpec.Id.from("default"), Map.of(deployment, 1)); } /** Sets the global wildcard target for this */ public EndpointBuilder wildcard() { return target(EndpointId.of("*"), ClusterSpec.Id.from("*"), List.of()); } /** Sets the application wildcard target for this */ public EndpointBuilder wildcardApplication(DeploymentId deployment) { return targetApplication(EndpointId.of("*"), ClusterSpec.Id.from("*"), Map.of(deployment, 1)); } /** Sets the zone wildcard target for this */ public EndpointBuilder wildcard(DeploymentId deployment) { return target(ClusterSpec.Id.from("*"), deployment); } /** Sets the application target with given ID, cluster, deployments and their weights */ public EndpointBuilder targetApplication(EndpointId endpointId, ClusterSpec.Id cluster, Map<DeploymentId, Integer> deployments) { this.endpointId = endpointId; this.cluster = cluster; this.targets = deployments.entrySet().stream() .map(kv -> new Target(kv.getKey(), kv.getValue())) .collect(Collectors.toUnmodifiableList()); this.scope = Scope.application; return this; } /** Sets the region target for this, deduced from given zone */ public EndpointBuilder targetRegion(ClusterSpec.Id cluster, ZoneId zone) { this.cluster = cluster; this.scope = requireUnset(Scope.weighted); this.targets = List.of(new Target(new DeploymentId(application.instance(instance.get()), effectiveZone(zone)))); return this; } /** Sets the port of this */ public EndpointBuilder on(Port port) { this.port = port; return this; } /** Marks this as a legacy endpoint */ public EndpointBuilder legacy() { this.legacy = true; return this; } /** Sets the routing method for this */ public EndpointBuilder routingMethod(RoutingMethod method) { this.routingMethod = method; return this; } /** Sets whether we're building a name for inclusion in a certificate */ public EndpointBuilder certificateName() { this.certificateName = true; return this; } /** Sets the system that owns this */ private Scope requireUnset(Scope scope) { if (this.scope != null) { throw new IllegalArgumentException("Cannot change endpoint scope. Already set to " + scope); } return scope; } }
class EndpointBuilder { private final TenantAndApplicationId application; private final Optional<InstanceName> instance; private Scope scope; private List<Target> targets; private ClusterSpec.Id cluster; private EndpointId endpointId; private Port port; private RoutingMethod routingMethod = RoutingMethod.sharedLayer4; private boolean legacy = false; private boolean certificateName = false; private EndpointBuilder(TenantAndApplicationId application, Optional<InstanceName> instance) { this.application = Objects.requireNonNull(application); this.instance = Objects.requireNonNull(instance); } /** Sets the deployment target for this */ public EndpointBuilder target(ClusterSpec.Id cluster, DeploymentId deployment) { this.cluster = cluster; this.scope = requireUnset(Scope.zone); this.targets = List.of(new Target(deployment)); return this; } /** Sets the global target with given ID, deployments and cluster (as defined in deployments.xml) */ public EndpointBuilder target(EndpointId endpointId, ClusterSpec.Id cluster, List<DeploymentId> deployments) { this.endpointId = endpointId; this.cluster = cluster; this.targets = deployments.stream().map(Target::new).collect(Collectors.toUnmodifiableList()); this.scope = requireUnset(Scope.global); return this; } /** Sets the global target with given ID and pointing to the default cluster */ public EndpointBuilder target(EndpointId endpointId) { return target(endpointId, ClusterSpec.Id.from("default"), List.of()); } /** Sets the application target with given ID and pointing to the default cluster */ public EndpointBuilder targetApplication(EndpointId endpointId, DeploymentId deployment) { return targetApplication(endpointId, ClusterSpec.Id.from("default"), Map.of(deployment, 1)); } /** Sets the global wildcard target for this */ public EndpointBuilder wildcard() { return target(EndpointId.of("*"), ClusterSpec.Id.from("*"), List.of()); } /** Sets the application wildcard target for this */ public EndpointBuilder wildcardApplication(DeploymentId deployment) { return targetApplication(EndpointId.of("*"), ClusterSpec.Id.from("*"), Map.of(deployment, 1)); } /** Sets the zone wildcard target for this */ public EndpointBuilder wildcard(DeploymentId deployment) { return target(ClusterSpec.Id.from("*"), deployment); } /** Sets the application target with given ID, cluster, deployments and their weights */ public EndpointBuilder targetApplication(EndpointId endpointId, ClusterSpec.Id cluster, Map<DeploymentId, Integer> deployments) { this.endpointId = endpointId; this.cluster = cluster; this.targets = deployments.entrySet().stream() .map(kv -> new Target(kv.getKey(), kv.getValue())) .collect(Collectors.toUnmodifiableList()); this.scope = Scope.application; return this; } /** Sets the region target for this, deduced from given zone */ public EndpointBuilder targetRegion(ClusterSpec.Id cluster, ZoneId zone) { this.cluster = cluster; this.scope = requireUnset(Scope.weighted); this.targets = List.of(new Target(new DeploymentId(application.instance(instance.get()), effectiveZone(zone)))); return this; } /** Sets the port of this */ public EndpointBuilder on(Port port) { this.port = port; return this; } /** Marks this as a legacy endpoint */ public EndpointBuilder legacy() { this.legacy = true; return this; } /** Sets the routing method for this */ public EndpointBuilder routingMethod(RoutingMethod method) { this.routingMethod = method; return this; } /** Sets whether we're building a name for inclusion in a certificate */ public EndpointBuilder certificateName() { this.certificateName = true; return this; } /** Sets the system that owns this */ private Scope requireUnset(Scope scope) { if (this.scope != null) { throw new IllegalArgumentException("Cannot change endpoint scope. Already set to " + scope); } return scope; } }
I don't have a preference except that it should be consistent, we can extract it as a separate matcher if having good error messages is something that we want.
public void testAvroSchemaFromBeamSchemaCanBeParsed() { String stringSchema = AvroUtils.toAvroSchema(getBeamSchema()).toString(); org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(stringSchema); assertEquals(stringSchema, schema.toString()); }
assertEquals(stringSchema, schema.toString());
public void testAvroSchemaFromBeamSchemaCanBeParsed() { org.apache.avro.Schema convertedSchema = AvroUtils.toAvroSchema(getBeamSchema()); org.apache.avro.Schema validatedSchema = new org.apache.avro.Schema.Parser().parse(convertedSchema.toString()); assertEquals(convertedSchema, validatedSchema); }
class AvroUtilsTest { private static final org.apache.avro.Schema NULL_SCHEMA = org.apache.avro.Schema.create(Type.NULL); @Property(trials = 1000) @SuppressWarnings("unchecked") public void supportsAnyAvroSchema( @From(RecordSchemaGenerator.class) org.apache.avro.Schema avroSchema) { assumeThat(avroSchema, not(containsField(AvroUtilsTest::hasNonNullUnion))); Schema schema = AvroUtils.toBeamSchema(avroSchema); Iterable iterable = new RandomData(avroSchema, 10); List<GenericRecord> records = Lists.newArrayList((Iterable<GenericRecord>) iterable); for (GenericRecord record : records) { AvroUtils.toBeamRowStrict(record, schema); } } @Property(trials = 1000) @SuppressWarnings("unchecked") public void avroToBeamRoundTrip( @From(RecordSchemaGenerator.class) org.apache.avro.Schema avroSchema) { assumeThat(avroSchema, not(containsField(AvroUtilsTest::hasNonNullUnion))); assumeThat(avroSchema, not(containsField(x -> x.getType() == Type.ENUM))); assumeThat(avroSchema, not(containsField(x -> x.getType() == Type.FIXED))); Schema schema = AvroUtils.toBeamSchema(avroSchema); Iterable iterable = new RandomData(avroSchema, 10); List<GenericRecord> records = Lists.newArrayList((Iterable<GenericRecord>) iterable); for (GenericRecord record : records) { Row row = AvroUtils.toBeamRowStrict(record, schema); GenericRecord out = AvroUtils.toGenericRecord(row, avroSchema); assertEquals(record, out); } } @Test public void testUnwrapNullableSchema() { org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createUnion( org.apache.avro.Schema.create(Type.NULL), org.apache.avro.Schema.create(Type.STRING)); TypeWithNullability typeWithNullability = new TypeWithNullability(avroSchema); assertTrue(typeWithNullability.nullable); assertEquals(org.apache.avro.Schema.create(Type.STRING), typeWithNullability.type); } @Test public void testUnwrapNullableSchemaReordered() { org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createUnion( org.apache.avro.Schema.create(Type.STRING), org.apache.avro.Schema.create(Type.NULL)); TypeWithNullability typeWithNullability = new TypeWithNullability(avroSchema); assertTrue(typeWithNullability.nullable); assertEquals(org.apache.avro.Schema.create(Type.STRING), typeWithNullability.type); } @Test public void testUnwrapNullableSchemaToUnion() { org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createUnion( org.apache.avro.Schema.create(Type.STRING), org.apache.avro.Schema.create(Type.LONG), org.apache.avro.Schema.create(Type.NULL)); TypeWithNullability typeWithNullability = new TypeWithNullability(avroSchema); assertTrue(typeWithNullability.nullable); assertEquals( org.apache.avro.Schema.createUnion( org.apache.avro.Schema.create(Type.STRING), org.apache.avro.Schema.create(Type.LONG)), typeWithNullability.type); } @Test public void testNullableArrayFieldToBeamArrayField() { org.apache.avro.Schema.Field avroField = new org.apache.avro.Schema.Field( "arrayField", ReflectData.makeNullable( org.apache.avro.Schema.createArray((org.apache.avro.Schema.create(Type.INT)))), "", null); Field expectedBeamField = Field.nullable("arrayField", FieldType.array(FieldType.INT32)); Field beamField = AvroUtils.toBeamField(avroField); assertEquals(expectedBeamField, beamField); } @Test public void testNullableBeamArrayFieldToAvroField() { Field beamField = Field.nullable("arrayField", FieldType.array(FieldType.INT32)); org.apache.avro.Schema.Field expectedAvroField = new org.apache.avro.Schema.Field( "arrayField", ReflectData.makeNullable( org.apache.avro.Schema.createArray((org.apache.avro.Schema.create(Type.INT)))), "", null); org.apache.avro.Schema.Field avroField = AvroUtils.toAvroField(beamField, "ignored"); assertEquals(expectedAvroField, avroField); } private static List<org.apache.avro.Schema.Field> getAvroSubSchemaFields() { List<org.apache.avro.Schema.Field> fields = Lists.newArrayList(); fields.add( new org.apache.avro.Schema.Field( "bool", org.apache.avro.Schema.create(Type.BOOLEAN), "", null)); fields.add( new org.apache.avro.Schema.Field("int", org.apache.avro.Schema.create(Type.INT), "", null)); return fields; } private static org.apache.avro.Schema getAvroSubSchema(String name) { return org.apache.avro.Schema.createRecord( name, null, "topLevelRecord", false, getAvroSubSchemaFields()); } private static org.apache.avro.Schema getAvroSchema() { List<org.apache.avro.Schema.Field> fields = Lists.newArrayList(); fields.add( new org.apache.avro.Schema.Field( "bool", org.apache.avro.Schema.create(Type.BOOLEAN), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "int", org.apache.avro.Schema.create(Type.INT), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "long", org.apache.avro.Schema.create(Type.LONG), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "float", org.apache.avro.Schema.create(Type.FLOAT), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "double", org.apache.avro.Schema.create(Type.DOUBLE), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "string", org.apache.avro.Schema.create(Type.STRING), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "bytes", org.apache.avro.Schema.create(Type.BYTES), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "decimal", LogicalTypes.decimal(Integer.MAX_VALUE) .addToSchema(org.apache.avro.Schema.create(Type.BYTES)), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "timestampMillis", LogicalTypes.timestampMillis().addToSchema(org.apache.avro.Schema.create(Type.LONG)), "", (Object) null)); fields.add(new org.apache.avro.Schema.Field("row", getAvroSubSchema("row"), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "array", org.apache.avro.Schema.createArray(getAvroSubSchema("array")), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "map", org.apache.avro.Schema.createMap(getAvroSubSchema("map")), "", (Object) null)); return org.apache.avro.Schema.createRecord("topLevelRecord", null, null, false, fields); } private static Schema getBeamSubSchema() { return new Schema.Builder() .addField(Field.of("bool", FieldType.BOOLEAN)) .addField(Field.of("int", FieldType.INT32)) .build(); } private Schema getBeamSchema() { Schema subSchema = getBeamSubSchema(); return new Schema.Builder() .addField(Field.of("bool", FieldType.BOOLEAN)) .addField(Field.of("int", FieldType.INT32)) .addField(Field.of("long", FieldType.INT64)) .addField(Field.of("float", FieldType.FLOAT)) .addField(Field.of("double", FieldType.DOUBLE)) .addField(Field.of("string", FieldType.STRING)) .addField(Field.of("bytes", FieldType.BYTES)) .addField(Field.of("decimal", FieldType.DECIMAL)) .addField(Field.of("timestampMillis", FieldType.DATETIME)) .addField(Field.of("row", FieldType.row(subSchema))) .addField(Field.of("array", FieldType.array(FieldType.row(subSchema)))) .addField(Field.of("map", FieldType.map(FieldType.STRING, FieldType.row(subSchema)))) .build(); } private static final byte[] BYTE_ARRAY = new byte[] {1, 2, 3, 4}; private static final DateTime DATE_TIME = new DateTime().withDate(1979, 3, 14).withTime(1, 2, 3, 4).withZone(DateTimeZone.UTC); private static final BigDecimal BIG_DECIMAL = new BigDecimal(3600); private Row getBeamRow() { Row subRow = Row.withSchema(getBeamSubSchema()).addValues(true, 42).build(); return Row.withSchema(getBeamSchema()) .addValue(true) .addValue(43) .addValue(44L) .addValue((float) 44.1) .addValue((double) 44.2) .addValue("string") .addValue(BYTE_ARRAY) .addValue(BIG_DECIMAL) .addValue(DATE_TIME) .addValue(subRow) .addValue(ImmutableList.of(subRow, subRow)) .addValue(ImmutableMap.of("k1", subRow, "k2", subRow)) .build(); } private static GenericRecord getSubGenericRecord(String name) { return new GenericRecordBuilder(getAvroSubSchema(name)) .set("bool", true) .set("int", 42) .build(); } private static GenericRecord getGenericRecord() { LogicalType decimalType = LogicalTypes.decimal(Integer.MAX_VALUE) .addToSchema(org.apache.avro.Schema.create(Type.BYTES)) .getLogicalType(); ByteBuffer encodedDecimal = new Conversions.DecimalConversion().toBytes(BIG_DECIMAL, null, decimalType); return new GenericRecordBuilder(getAvroSchema()) .set("bool", true) .set("int", 43) .set("long", 44L) .set("float", (float) 44.1) .set("double", (double) 44.2) .set("string", new Utf8("string")) .set("bytes", ByteBuffer.wrap(BYTE_ARRAY)) .set("decimal", encodedDecimal) .set("timestampMillis", DATE_TIME.getMillis()) .set("row", getSubGenericRecord("row")) .set("array", ImmutableList.of(getSubGenericRecord("array"), getSubGenericRecord("array"))) .set( "map", ImmutableMap.of( new Utf8("k1"), getSubGenericRecord("map"), new Utf8("k2"), getSubGenericRecord("map"))) .build(); } @Test public void testFromAvroSchema() { assertEquals(getBeamSchema(), AvroUtils.toBeamSchema(getAvroSchema())); } @Test public void testFromBeamSchema() { Schema beamSchema = getBeamSchema(); org.apache.avro.Schema avroSchema = AvroUtils.toAvroSchema(beamSchema); assertEquals(getAvroSchema(), avroSchema); } @Test @Test public void testAvroSchemaFromBeamSchemaWithFieldCollisionCanBeParsed() { Schema contact = new Schema.Builder() .addField(Field.of("name", FieldType.STRING)) .addField( Field.of( "address", FieldType.row( new Schema.Builder() .addField(Field.of("street", FieldType.STRING)) .addField(Field.of("city", FieldType.STRING)) .build()))) .build(); Schema contactMultiline = new Schema.Builder() .addField(Field.of("name", FieldType.STRING)) .addField( Field.of( "address", FieldType.row( new Schema.Builder() .addField(Field.of("street", FieldType.array(FieldType.STRING))) .addField(Field.of("city", FieldType.STRING)) .build()))) .build(); Schema beamSchema = new Schema.Builder() .addField(Field.of("home", FieldType.row(contact))) .addField(Field.of("work", FieldType.row(contactMultiline))) .addField(Field.of("address", FieldType.row(contact))) .addField(Field.of("topLevelRecord", FieldType.row(contactMultiline))) .build(); String stringSchema = AvroUtils.toAvroSchema(beamSchema).toString(); org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(stringSchema); assertEquals(stringSchema, schema.toString()); } @Test public void testNullableFieldInAvroSchema() { List<org.apache.avro.Schema.Field> fields = Lists.newArrayList(); fields.add( new org.apache.avro.Schema.Field( "int", ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT)), "", null)); fields.add( new org.apache.avro.Schema.Field( "array", org.apache.avro.Schema.createArray( ReflectData.makeNullable(org.apache.avro.Schema.create(Type.BYTES))), "", null)); fields.add( new org.apache.avro.Schema.Field( "map", org.apache.avro.Schema.createMap( ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT))), "", null)); org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createRecord("topLevelRecord", null, null, false, fields); Schema expectedSchema = Schema.builder() .addNullableField("int", FieldType.INT32) .addArrayField("array", FieldType.BYTES.withNullable(true)) .addMapField("map", FieldType.STRING, FieldType.INT32.withNullable(true)) .build(); assertEquals(expectedSchema, AvroUtils.toBeamSchema(avroSchema)); Map<String, Object> nullMap = Maps.newHashMap(); nullMap.put("k1", null); GenericRecord genericRecord = new GenericRecordBuilder(avroSchema) .set("int", null) .set("array", Lists.newArrayList((Object) null)) .set("map", nullMap) .build(); Row expectedRow = Row.withSchema(expectedSchema) .addValue(null) .addValue(Lists.newArrayList((Object) null)) .addValue(nullMap) .build(); assertEquals(expectedRow, AvroUtils.toBeamRowStrict(genericRecord, expectedSchema)); } @Test public void testNullableFieldsInBeamSchema() { Schema beamSchema = Schema.builder() .addNullableField("int", FieldType.INT32) .addArrayField("array", FieldType.INT32.withNullable(true)) .addMapField("map", FieldType.STRING, FieldType.INT32.withNullable(true)) .build(); List<org.apache.avro.Schema.Field> fields = Lists.newArrayList(); fields.add( new org.apache.avro.Schema.Field( "int", ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT)), "", null)); fields.add( new org.apache.avro.Schema.Field( "array", org.apache.avro.Schema.createArray( ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT))), "", null)); fields.add( new org.apache.avro.Schema.Field( "map", org.apache.avro.Schema.createMap( ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT))), "", null)); org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createRecord("topLevelRecord", null, null, false, fields); assertEquals(avroSchema, AvroUtils.toAvroSchema(beamSchema)); Map<Utf8, Object> nullMapUtf8 = Maps.newHashMap(); nullMapUtf8.put(new Utf8("k1"), null); Map<String, Object> nullMapString = Maps.newHashMap(); nullMapString.put("k1", null); GenericRecord expectedGenericRecord = new GenericRecordBuilder(avroSchema) .set("int", null) .set("array", Lists.newArrayList((Object) null)) .set("map", nullMapUtf8) .build(); Row row = Row.withSchema(beamSchema) .addValue(null) .addValue(Lists.newArrayList((Object) null)) .addValue(nullMapString) .build(); assertEquals(expectedGenericRecord, AvroUtils.toGenericRecord(row, avroSchema)); } @Test public void testBeamRowToGenericRecord() { GenericRecord genericRecord = AvroUtils.toGenericRecord(getBeamRow(), null); assertEquals(getAvroSchema(), genericRecord.getSchema()); assertEquals(getGenericRecord(), genericRecord); } @Test public void testGenericRecordToBeamRow() { Row row = AvroUtils.toBeamRowStrict(getGenericRecord(), null); assertEquals(getBeamRow(), row); } @Test public void testAvroSchemaCoders() { Pipeline pipeline = Pipeline.create(); org.apache.avro.Schema schema = org.apache.avro.Schema.createRecord( "TestSubRecord", "TestSubRecord doc", "org.apache.beam.sdk.schemas.utils", false, getAvroSubSchemaFields()); GenericRecord record = new GenericRecordBuilder(getAvroSubSchema("simple")) .set("bool", true) .set("int", 42) .build(); PCollection<GenericRecord> records = pipeline.apply(Create.of(record).withCoder(AvroCoder.of(schema))); assertFalse(records.hasSchema()); records.setCoder(AvroUtils.schemaCoder(schema)); assertTrue(records.hasSchema()); AvroGeneratedUser user = new AvroGeneratedUser("foo", 42, "green"); PCollection<AvroGeneratedUser> users = pipeline.apply(Create.of(user).withCoder(AvroCoder.of(AvroGeneratedUser.class))); assertFalse(users.hasSchema()); users.setCoder(AvroUtils.schemaCoder((AvroCoder<AvroGeneratedUser>) users.getCoder())); assertTrue(users.hasSchema()); } public static ContainsField containsField(Function<org.apache.avro.Schema, Boolean> predicate) { return new ContainsField(predicate); } public static boolean hasNonNullUnion(org.apache.avro.Schema schema) { if (schema.getType() == Type.UNION) { final List<org.apache.avro.Schema> types = schema.getTypes(); if (types.size() == 2) { return !types.contains(NULL_SCHEMA); } else { return true; } } return false; } static class ContainsField extends BaseMatcher<org.apache.avro.Schema> { private final Function<org.apache.avro.Schema, Boolean> predicate; ContainsField(final Function<org.apache.avro.Schema, Boolean> predicate) { this.predicate = predicate; } @Override public boolean matches(final Object item0) { if (!(item0 instanceof org.apache.avro.Schema)) { return false; } org.apache.avro.Schema item = (org.apache.avro.Schema) item0; if (predicate.apply(item)) { return true; } switch (item.getType()) { case RECORD: return item.getFields().stream().anyMatch(x -> matches(x.schema())); case UNION: return item.getTypes().stream().anyMatch(this::matches); case ARRAY: return matches(item.getElementType()); case MAP: return matches(item.getValueType()); default: return false; } } @Override public void describeTo(final Description description) {} } }
class AvroUtilsTest { private static final org.apache.avro.Schema NULL_SCHEMA = org.apache.avro.Schema.create(Type.NULL); @Property(trials = 1000) @SuppressWarnings("unchecked") public void supportsAnyAvroSchema( @From(RecordSchemaGenerator.class) org.apache.avro.Schema avroSchema) { assumeThat(avroSchema, not(containsField(AvroUtilsTest::hasNonNullUnion))); Schema schema = AvroUtils.toBeamSchema(avroSchema); Iterable iterable = new RandomData(avroSchema, 10); List<GenericRecord> records = Lists.newArrayList((Iterable<GenericRecord>) iterable); for (GenericRecord record : records) { AvroUtils.toBeamRowStrict(record, schema); } } @Property(trials = 1000) @SuppressWarnings("unchecked") public void avroToBeamRoundTrip( @From(RecordSchemaGenerator.class) org.apache.avro.Schema avroSchema) { assumeThat(avroSchema, not(containsField(AvroUtilsTest::hasNonNullUnion))); assumeThat(avroSchema, not(containsField(x -> x.getType() == Type.ENUM))); assumeThat(avroSchema, not(containsField(x -> x.getType() == Type.FIXED))); Schema schema = AvroUtils.toBeamSchema(avroSchema); Iterable iterable = new RandomData(avroSchema, 10); List<GenericRecord> records = Lists.newArrayList((Iterable<GenericRecord>) iterable); for (GenericRecord record : records) { Row row = AvroUtils.toBeamRowStrict(record, schema); GenericRecord out = AvroUtils.toGenericRecord(row, avroSchema); assertEquals(record, out); } } @Test public void testUnwrapNullableSchema() { org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createUnion( org.apache.avro.Schema.create(Type.NULL), org.apache.avro.Schema.create(Type.STRING)); TypeWithNullability typeWithNullability = new TypeWithNullability(avroSchema); assertTrue(typeWithNullability.nullable); assertEquals(org.apache.avro.Schema.create(Type.STRING), typeWithNullability.type); } @Test public void testUnwrapNullableSchemaReordered() { org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createUnion( org.apache.avro.Schema.create(Type.STRING), org.apache.avro.Schema.create(Type.NULL)); TypeWithNullability typeWithNullability = new TypeWithNullability(avroSchema); assertTrue(typeWithNullability.nullable); assertEquals(org.apache.avro.Schema.create(Type.STRING), typeWithNullability.type); } @Test public void testUnwrapNullableSchemaToUnion() { org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createUnion( org.apache.avro.Schema.create(Type.STRING), org.apache.avro.Schema.create(Type.LONG), org.apache.avro.Schema.create(Type.NULL)); TypeWithNullability typeWithNullability = new TypeWithNullability(avroSchema); assertTrue(typeWithNullability.nullable); assertEquals( org.apache.avro.Schema.createUnion( org.apache.avro.Schema.create(Type.STRING), org.apache.avro.Schema.create(Type.LONG)), typeWithNullability.type); } @Test public void testNullableArrayFieldToBeamArrayField() { org.apache.avro.Schema.Field avroField = new org.apache.avro.Schema.Field( "arrayField", ReflectData.makeNullable( org.apache.avro.Schema.createArray((org.apache.avro.Schema.create(Type.INT)))), "", null); Field expectedBeamField = Field.nullable("arrayField", FieldType.array(FieldType.INT32)); Field beamField = AvroUtils.toBeamField(avroField); assertEquals(expectedBeamField, beamField); } @Test public void testNullableBeamArrayFieldToAvroField() { Field beamField = Field.nullable("arrayField", FieldType.array(FieldType.INT32)); org.apache.avro.Schema.Field expectedAvroField = new org.apache.avro.Schema.Field( "arrayField", ReflectData.makeNullable( org.apache.avro.Schema.createArray((org.apache.avro.Schema.create(Type.INT)))), "", null); org.apache.avro.Schema.Field avroField = AvroUtils.toAvroField(beamField, "ignored"); assertEquals(expectedAvroField, avroField); } private static List<org.apache.avro.Schema.Field> getAvroSubSchemaFields() { List<org.apache.avro.Schema.Field> fields = Lists.newArrayList(); fields.add( new org.apache.avro.Schema.Field( "bool", org.apache.avro.Schema.create(Type.BOOLEAN), "", null)); fields.add( new org.apache.avro.Schema.Field("int", org.apache.avro.Schema.create(Type.INT), "", null)); return fields; } private static org.apache.avro.Schema getAvroSubSchema(String name) { return org.apache.avro.Schema.createRecord( name, null, "topLevelRecord", false, getAvroSubSchemaFields()); } private static org.apache.avro.Schema getAvroSchema() { List<org.apache.avro.Schema.Field> fields = Lists.newArrayList(); fields.add( new org.apache.avro.Schema.Field( "bool", org.apache.avro.Schema.create(Type.BOOLEAN), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "int", org.apache.avro.Schema.create(Type.INT), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "long", org.apache.avro.Schema.create(Type.LONG), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "float", org.apache.avro.Schema.create(Type.FLOAT), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "double", org.apache.avro.Schema.create(Type.DOUBLE), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "string", org.apache.avro.Schema.create(Type.STRING), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "bytes", org.apache.avro.Schema.create(Type.BYTES), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "decimal", LogicalTypes.decimal(Integer.MAX_VALUE) .addToSchema(org.apache.avro.Schema.create(Type.BYTES)), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "timestampMillis", LogicalTypes.timestampMillis().addToSchema(org.apache.avro.Schema.create(Type.LONG)), "", (Object) null)); fields.add(new org.apache.avro.Schema.Field("row", getAvroSubSchema("row"), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "array", org.apache.avro.Schema.createArray(getAvroSubSchema("array")), "", (Object) null)); fields.add( new org.apache.avro.Schema.Field( "map", org.apache.avro.Schema.createMap(getAvroSubSchema("map")), "", (Object) null)); return org.apache.avro.Schema.createRecord("topLevelRecord", null, null, false, fields); } private static Schema getBeamSubSchema() { return new Schema.Builder() .addField(Field.of("bool", FieldType.BOOLEAN)) .addField(Field.of("int", FieldType.INT32)) .build(); } private Schema getBeamSchema() { Schema subSchema = getBeamSubSchema(); return new Schema.Builder() .addField(Field.of("bool", FieldType.BOOLEAN)) .addField(Field.of("int", FieldType.INT32)) .addField(Field.of("long", FieldType.INT64)) .addField(Field.of("float", FieldType.FLOAT)) .addField(Field.of("double", FieldType.DOUBLE)) .addField(Field.of("string", FieldType.STRING)) .addField(Field.of("bytes", FieldType.BYTES)) .addField(Field.of("decimal", FieldType.DECIMAL)) .addField(Field.of("timestampMillis", FieldType.DATETIME)) .addField(Field.of("row", FieldType.row(subSchema))) .addField(Field.of("array", FieldType.array(FieldType.row(subSchema)))) .addField(Field.of("map", FieldType.map(FieldType.STRING, FieldType.row(subSchema)))) .build(); } private static final byte[] BYTE_ARRAY = new byte[] {1, 2, 3, 4}; private static final DateTime DATE_TIME = new DateTime().withDate(1979, 3, 14).withTime(1, 2, 3, 4).withZone(DateTimeZone.UTC); private static final BigDecimal BIG_DECIMAL = new BigDecimal(3600); private Row getBeamRow() { Row subRow = Row.withSchema(getBeamSubSchema()).addValues(true, 42).build(); return Row.withSchema(getBeamSchema()) .addValue(true) .addValue(43) .addValue(44L) .addValue((float) 44.1) .addValue((double) 44.2) .addValue("string") .addValue(BYTE_ARRAY) .addValue(BIG_DECIMAL) .addValue(DATE_TIME) .addValue(subRow) .addValue(ImmutableList.of(subRow, subRow)) .addValue(ImmutableMap.of("k1", subRow, "k2", subRow)) .build(); } private static GenericRecord getSubGenericRecord(String name) { return new GenericRecordBuilder(getAvroSubSchema(name)) .set("bool", true) .set("int", 42) .build(); } private static GenericRecord getGenericRecord() { LogicalType decimalType = LogicalTypes.decimal(Integer.MAX_VALUE) .addToSchema(org.apache.avro.Schema.create(Type.BYTES)) .getLogicalType(); ByteBuffer encodedDecimal = new Conversions.DecimalConversion().toBytes(BIG_DECIMAL, null, decimalType); return new GenericRecordBuilder(getAvroSchema()) .set("bool", true) .set("int", 43) .set("long", 44L) .set("float", (float) 44.1) .set("double", (double) 44.2) .set("string", new Utf8("string")) .set("bytes", ByteBuffer.wrap(BYTE_ARRAY)) .set("decimal", encodedDecimal) .set("timestampMillis", DATE_TIME.getMillis()) .set("row", getSubGenericRecord("row")) .set("array", ImmutableList.of(getSubGenericRecord("array"), getSubGenericRecord("array"))) .set( "map", ImmutableMap.of( new Utf8("k1"), getSubGenericRecord("map"), new Utf8("k2"), getSubGenericRecord("map"))) .build(); } @Test public void testFromAvroSchema() { assertEquals(getBeamSchema(), AvroUtils.toBeamSchema(getAvroSchema())); } @Test public void testFromBeamSchema() { Schema beamSchema = getBeamSchema(); org.apache.avro.Schema avroSchema = AvroUtils.toAvroSchema(beamSchema); assertEquals(getAvroSchema(), avroSchema); } @Test @Test public void testAvroSchemaFromBeamSchemaWithFieldCollisionCanBeParsed() { Schema contact = new Schema.Builder() .addField(Field.of("name", FieldType.STRING)) .addField( Field.of( "address", FieldType.row( new Schema.Builder() .addField(Field.of("street", FieldType.STRING)) .addField(Field.of("city", FieldType.STRING)) .build()))) .build(); Schema contactMultiline = new Schema.Builder() .addField(Field.of("name", FieldType.STRING)) .addField( Field.of( "address", FieldType.row( new Schema.Builder() .addField(Field.of("street", FieldType.array(FieldType.STRING))) .addField(Field.of("city", FieldType.STRING)) .build()))) .build(); Schema beamSchema = new Schema.Builder() .addField(Field.of("home", FieldType.row(contact))) .addField(Field.of("work", FieldType.row(contactMultiline))) .addField(Field.of("address", FieldType.row(contact))) .addField(Field.of("topLevelRecord", FieldType.row(contactMultiline))) .build(); org.apache.avro.Schema convertedSchema = AvroUtils.toAvroSchema(beamSchema); org.apache.avro.Schema validatedSchema = new org.apache.avro.Schema.Parser().parse(convertedSchema.toString()); assertEquals(convertedSchema, validatedSchema); } @Test public void testNullableFieldInAvroSchema() { List<org.apache.avro.Schema.Field> fields = Lists.newArrayList(); fields.add( new org.apache.avro.Schema.Field( "int", ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT)), "", null)); fields.add( new org.apache.avro.Schema.Field( "array", org.apache.avro.Schema.createArray( ReflectData.makeNullable(org.apache.avro.Schema.create(Type.BYTES))), "", null)); fields.add( new org.apache.avro.Schema.Field( "map", org.apache.avro.Schema.createMap( ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT))), "", null)); org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createRecord("topLevelRecord", null, null, false, fields); Schema expectedSchema = Schema.builder() .addNullableField("int", FieldType.INT32) .addArrayField("array", FieldType.BYTES.withNullable(true)) .addMapField("map", FieldType.STRING, FieldType.INT32.withNullable(true)) .build(); assertEquals(expectedSchema, AvroUtils.toBeamSchema(avroSchema)); Map<String, Object> nullMap = Maps.newHashMap(); nullMap.put("k1", null); GenericRecord genericRecord = new GenericRecordBuilder(avroSchema) .set("int", null) .set("array", Lists.newArrayList((Object) null)) .set("map", nullMap) .build(); Row expectedRow = Row.withSchema(expectedSchema) .addValue(null) .addValue(Lists.newArrayList((Object) null)) .addValue(nullMap) .build(); assertEquals(expectedRow, AvroUtils.toBeamRowStrict(genericRecord, expectedSchema)); } @Test public void testNullableFieldsInBeamSchema() { Schema beamSchema = Schema.builder() .addNullableField("int", FieldType.INT32) .addArrayField("array", FieldType.INT32.withNullable(true)) .addMapField("map", FieldType.STRING, FieldType.INT32.withNullable(true)) .build(); List<org.apache.avro.Schema.Field> fields = Lists.newArrayList(); fields.add( new org.apache.avro.Schema.Field( "int", ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT)), "", null)); fields.add( new org.apache.avro.Schema.Field( "array", org.apache.avro.Schema.createArray( ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT))), "", null)); fields.add( new org.apache.avro.Schema.Field( "map", org.apache.avro.Schema.createMap( ReflectData.makeNullable(org.apache.avro.Schema.create(Type.INT))), "", null)); org.apache.avro.Schema avroSchema = org.apache.avro.Schema.createRecord("topLevelRecord", null, null, false, fields); assertEquals(avroSchema, AvroUtils.toAvroSchema(beamSchema)); Map<Utf8, Object> nullMapUtf8 = Maps.newHashMap(); nullMapUtf8.put(new Utf8("k1"), null); Map<String, Object> nullMapString = Maps.newHashMap(); nullMapString.put("k1", null); GenericRecord expectedGenericRecord = new GenericRecordBuilder(avroSchema) .set("int", null) .set("array", Lists.newArrayList((Object) null)) .set("map", nullMapUtf8) .build(); Row row = Row.withSchema(beamSchema) .addValue(null) .addValue(Lists.newArrayList((Object) null)) .addValue(nullMapString) .build(); assertEquals(expectedGenericRecord, AvroUtils.toGenericRecord(row, avroSchema)); } @Test public void testBeamRowToGenericRecord() { GenericRecord genericRecord = AvroUtils.toGenericRecord(getBeamRow(), null); assertEquals(getAvroSchema(), genericRecord.getSchema()); assertEquals(getGenericRecord(), genericRecord); } @Test public void testGenericRecordToBeamRow() { Row row = AvroUtils.toBeamRowStrict(getGenericRecord(), null); assertEquals(getBeamRow(), row); } @Test public void testAvroSchemaCoders() { Pipeline pipeline = Pipeline.create(); org.apache.avro.Schema schema = org.apache.avro.Schema.createRecord( "TestSubRecord", "TestSubRecord doc", "org.apache.beam.sdk.schemas.utils", false, getAvroSubSchemaFields()); GenericRecord record = new GenericRecordBuilder(getAvroSubSchema("simple")) .set("bool", true) .set("int", 42) .build(); PCollection<GenericRecord> records = pipeline.apply(Create.of(record).withCoder(AvroCoder.of(schema))); assertFalse(records.hasSchema()); records.setCoder(AvroUtils.schemaCoder(schema)); assertTrue(records.hasSchema()); AvroGeneratedUser user = new AvroGeneratedUser("foo", 42, "green"); PCollection<AvroGeneratedUser> users = pipeline.apply(Create.of(user).withCoder(AvroCoder.of(AvroGeneratedUser.class))); assertFalse(users.hasSchema()); users.setCoder(AvroUtils.schemaCoder((AvroCoder<AvroGeneratedUser>) users.getCoder())); assertTrue(users.hasSchema()); } public static ContainsField containsField(Function<org.apache.avro.Schema, Boolean> predicate) { return new ContainsField(predicate); } public static boolean hasNonNullUnion(org.apache.avro.Schema schema) { if (schema.getType() == Type.UNION) { final List<org.apache.avro.Schema> types = schema.getTypes(); if (types.size() == 2) { return !types.contains(NULL_SCHEMA); } else { return true; } } return false; } static class ContainsField extends BaseMatcher<org.apache.avro.Schema> { private final Function<org.apache.avro.Schema, Boolean> predicate; ContainsField(final Function<org.apache.avro.Schema, Boolean> predicate) { this.predicate = predicate; } @Override public boolean matches(final Object item0) { if (!(item0 instanceof org.apache.avro.Schema)) { return false; } org.apache.avro.Schema item = (org.apache.avro.Schema) item0; if (predicate.apply(item)) { return true; } switch (item.getType()) { case RECORD: return item.getFields().stream().anyMatch(x -> matches(x.schema())); case UNION: return item.getTypes().stream().anyMatch(this::matches); case ARRAY: return matches(item.getElementType()); case MAP: return matches(item.getValueType()); default: return false; } } @Override public void describeTo(final Description description) {} } }
Are there perhaps environments that need a longer time to spin up the services? Would only lower to 1 minute.
public RemoteEnvironment createEnvironment(Environment environment) throws Exception { Preconditions.checkState( environment .getUrn() .equals(BeamUrns.getUrn(RunnerApi.StandardEnvironments.Environments.DOCKER)), "The passed environment does not contain a DockerPayload."); final RunnerApi.DockerPayload dockerPayload = RunnerApi.DockerPayload.parseFrom(environment.getPayload()); final String workerId = idGenerator.getId(); String containerImage = dockerPayload.getContainerImage(); String loggingEndpoint = loggingServiceServer.getApiServiceDescriptor().getUrl(); String artifactEndpoint = retrievalServiceServer.getApiServiceDescriptor().getUrl(); String provisionEndpoint = provisioningServiceServer.getApiServiceDescriptor().getUrl(); String controlEndpoint = controlServiceServer.getApiServiceDescriptor().getUrl(); ImmutableList.Builder<String> dockerArgsBuilder = ImmutableList.<String>builder() .addAll(gcsCredentialArgs()) .add("--network=host") .add("--env=DOCKER_MAC_CONTAINER=" + System.getenv("DOCKER_MAC_CONTAINER")); if (!retainDockerContainer) { dockerArgsBuilder.add("--rm"); } List<String> args = ImmutableList.of( String.format("--id=%s", workerId), String.format("--logging_endpoint=%s", loggingEndpoint), String.format("--artifact_endpoint=%s", artifactEndpoint), String.format("--provision_endpoint=%s", provisionEndpoint), String.format("--control_endpoint=%s", controlEndpoint)); LOG.debug("Creating Docker Container with ID {}", workerId); String containerId = null; InstructionRequestHandler instructionHandler = null; try { containerId = docker.runImage(containerImage, dockerArgsBuilder.build(), args); LOG.debug("Created Docker Container with Container ID {}", containerId); while (instructionHandler == null) { Preconditions.checkArgument( docker.isContainerRunning(containerId), "No container running for id " + containerId); try { instructionHandler = clientSource.take(workerId, Duration.ofSeconds(30)); } catch (TimeoutException timeoutEx) { LOG.info( "Still waiting for startup of environment {} for worker id {}", dockerPayload.getContainerImage(), workerId); } catch (InterruptedException interruptEx) { Thread.currentThread().interrupt(); throw new RuntimeException(interruptEx); } } } catch (Exception e) { if (containerId != null) { try { docker.killContainer(containerId); } catch (Exception dockerException) { e.addSuppressed(dockerException); } } throw e; } return DockerContainerEnvironment.create(docker, environment, containerId, instructionHandler); }
instructionHandler = clientSource.take(workerId, Duration.ofSeconds(30));
public RemoteEnvironment createEnvironment(Environment environment) throws Exception { Preconditions.checkState( environment .getUrn() .equals(BeamUrns.getUrn(RunnerApi.StandardEnvironments.Environments.DOCKER)), "The passed environment does not contain a DockerPayload."); final RunnerApi.DockerPayload dockerPayload = RunnerApi.DockerPayload.parseFrom(environment.getPayload()); final String workerId = idGenerator.getId(); String containerImage = dockerPayload.getContainerImage(); String loggingEndpoint = loggingServiceServer.getApiServiceDescriptor().getUrl(); String artifactEndpoint = retrievalServiceServer.getApiServiceDescriptor().getUrl(); String provisionEndpoint = provisioningServiceServer.getApiServiceDescriptor().getUrl(); String controlEndpoint = controlServiceServer.getApiServiceDescriptor().getUrl(); ImmutableList.Builder<String> dockerArgsBuilder = ImmutableList.<String>builder() .addAll(gcsCredentialArgs()) .add("--network=host") .add("--env=DOCKER_MAC_CONTAINER=" + System.getenv("DOCKER_MAC_CONTAINER")); if (!retainDockerContainer) { dockerArgsBuilder.add("--rm"); } List<String> args = ImmutableList.of( String.format("--id=%s", workerId), String.format("--logging_endpoint=%s", loggingEndpoint), String.format("--artifact_endpoint=%s", artifactEndpoint), String.format("--provision_endpoint=%s", provisionEndpoint), String.format("--control_endpoint=%s", controlEndpoint)); LOG.debug("Creating Docker Container with ID {}", workerId); String containerId = null; InstructionRequestHandler instructionHandler = null; try { containerId = docker.runImage(containerImage, dockerArgsBuilder.build(), args); LOG.debug("Created Docker Container with Container ID {}", containerId); while (instructionHandler == null) { try { instructionHandler = clientSource.take(workerId, Duration.ofMinutes(1)); } catch (TimeoutException timeoutEx) { Preconditions.checkArgument( docker.isContainerRunning(containerId), "No container running for id " + containerId); LOG.info( "Still waiting for startup of environment {} for worker id {}", dockerPayload.getContainerImage(), workerId); } catch (InterruptedException interruptEx) { Thread.currentThread().interrupt(); throw new RuntimeException(interruptEx); } } } catch (Exception e) { if (containerId != null) { try { docker.killContainer(containerId); } catch (Exception dockerException) { e.addSuppressed(dockerException); } } throw e; } return DockerContainerEnvironment.create(docker, environment, containerId, instructionHandler); }
class DockerEnvironmentFactory implements EnvironmentFactory { private static final Logger LOG = LoggerFactory.getLogger(DockerEnvironmentFactory.class); static DockerEnvironmentFactory forServicesWithDocker( DockerCommand docker, GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, ControlClientPool.Source clientSource, IdGenerator idGenerator, boolean retainDockerContainer) { return new DockerEnvironmentFactory( docker, controlServiceServer, loggingServiceServer, retrievalServiceServer, provisioningServiceServer, idGenerator, clientSource, retainDockerContainer); } private final DockerCommand docker; private final GrpcFnServer<FnApiControlClientPoolService> controlServiceServer; private final GrpcFnServer<GrpcLoggingService> loggingServiceServer; private final GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer; private final GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer; private final IdGenerator idGenerator; private final ControlClientPool.Source clientSource; private final boolean retainDockerContainer; private DockerEnvironmentFactory( DockerCommand docker, GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, IdGenerator idGenerator, ControlClientPool.Source clientSource, boolean retainDockerContainer) { this.docker = docker; this.controlServiceServer = controlServiceServer; this.loggingServiceServer = loggingServiceServer; this.retrievalServiceServer = retrievalServiceServer; this.provisioningServiceServer = provisioningServiceServer; this.idGenerator = idGenerator; this.clientSource = clientSource; this.retainDockerContainer = retainDockerContainer; } /** Creates a new, active {@link RemoteEnvironment} backed by a local Docker container. */ @Override private List<String> gcsCredentialArgs() { String dockerGcloudConfig = "/root/.config/gcloud"; String localGcloudConfig = firstNonNull( System.getenv("CLOUDSDK_CONFIG"), Paths.get(System.getProperty("user.home"), ".config", "gcloud").toString()); if (Files.exists(Paths.get(localGcloudConfig))) { return ImmutableList.of( "--mount", String.format("type=bind,src=%s,dst=%s", localGcloudConfig, dockerGcloudConfig)); } else { return ImmutableList.of(); } } /** * NOTE: Deployment on Macs is intended for local development. As of 18.03, Docker-for-Mac does * not implement host networking (--networking=host is effectively a no-op). Instead, we use a * special DNS entry that points to the host: * https: * hostname has historically changed between versions, so this is subject to breakages and will * likely only support the latest version at any time. */ private static class DockerOnMac { private static final String DOCKER_FOR_MAC_HOST = "host.docker.internal"; private static final boolean RUNNING_INSIDE_DOCKER_ON_MAC = "1".equals(System.getenv("DOCKER_MAC_CONTAINER")); private static final int MAC_PORT_START = 8100; private static final int MAC_PORT_END = 8200; private static final AtomicInteger MAC_PORT = new AtomicInteger(MAC_PORT_START); private static ServerFactory getServerFactory() { ServerFactory.UrlFactory dockerUrlFactory = (host, port) -> HostAndPort.fromParts(DOCKER_FOR_MAC_HOST, port).toString(); if (RUNNING_INSIDE_DOCKER_ON_MAC) { return ServerFactory.createWithUrlFactoryAndPortSupplier( dockerUrlFactory, () -> MAC_PORT.getAndUpdate(val -> val == MAC_PORT_END ? MAC_PORT_START : val + 1)); } else { return ServerFactory.createWithUrlFactory(dockerUrlFactory); } } } /** Provider for DockerEnvironmentFactory. */ public static class Provider implements EnvironmentFactory.Provider { private final boolean retainDockerContainer; public Provider(PipelineOptions options) { this.retainDockerContainer = options.as(ManualDockerEnvironmentOptions.class).getRetainDockerContainers(); } @Override public EnvironmentFactory createEnvironmentFactory( GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, ControlClientPool clientPool, IdGenerator idGenerator) { return DockerEnvironmentFactory.forServicesWithDocker( DockerCommand.getDefault(), controlServiceServer, loggingServiceServer, retrievalServiceServer, provisioningServiceServer, clientPool.getSource(), idGenerator, retainDockerContainer); } @Override public ServerFactory getServerFactory() { switch (getPlatform()) { case LINUX: return ServerFactory.createDefault(); case MAC: return DockerOnMac.getServerFactory(); default: LOG.warn("Unknown Docker platform. Falling back to default server factory"); return ServerFactory.createDefault(); } } private static Platform getPlatform() { String osName = System.getProperty("os.name").toLowerCase(); if (osName.startsWith("mac") || DockerOnMac.RUNNING_INSIDE_DOCKER_ON_MAC) { return Platform.MAC; } else if (osName.startsWith("linux")) { return Platform.LINUX; } return Platform.OTHER; } private enum Platform { MAC, LINUX, OTHER, } } }
class DockerEnvironmentFactory implements EnvironmentFactory { private static final Logger LOG = LoggerFactory.getLogger(DockerEnvironmentFactory.class); static DockerEnvironmentFactory forServicesWithDocker( DockerCommand docker, GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, ControlClientPool.Source clientSource, IdGenerator idGenerator, boolean retainDockerContainer) { return new DockerEnvironmentFactory( docker, controlServiceServer, loggingServiceServer, retrievalServiceServer, provisioningServiceServer, idGenerator, clientSource, retainDockerContainer); } private final DockerCommand docker; private final GrpcFnServer<FnApiControlClientPoolService> controlServiceServer; private final GrpcFnServer<GrpcLoggingService> loggingServiceServer; private final GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer; private final GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer; private final IdGenerator idGenerator; private final ControlClientPool.Source clientSource; private final boolean retainDockerContainer; private DockerEnvironmentFactory( DockerCommand docker, GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, IdGenerator idGenerator, ControlClientPool.Source clientSource, boolean retainDockerContainer) { this.docker = docker; this.controlServiceServer = controlServiceServer; this.loggingServiceServer = loggingServiceServer; this.retrievalServiceServer = retrievalServiceServer; this.provisioningServiceServer = provisioningServiceServer; this.idGenerator = idGenerator; this.clientSource = clientSource; this.retainDockerContainer = retainDockerContainer; } /** Creates a new, active {@link RemoteEnvironment} backed by a local Docker container. */ @Override private List<String> gcsCredentialArgs() { String dockerGcloudConfig = "/root/.config/gcloud"; String localGcloudConfig = firstNonNull( System.getenv("CLOUDSDK_CONFIG"), Paths.get(System.getProperty("user.home"), ".config", "gcloud").toString()); if (Files.exists(Paths.get(localGcloudConfig))) { return ImmutableList.of( "--mount", String.format("type=bind,src=%s,dst=%s", localGcloudConfig, dockerGcloudConfig)); } else { return ImmutableList.of(); } } /** * NOTE: Deployment on Macs is intended for local development. As of 18.03, Docker-for-Mac does * not implement host networking (--networking=host is effectively a no-op). Instead, we use a * special DNS entry that points to the host: * https: * hostname has historically changed between versions, so this is subject to breakages and will * likely only support the latest version at any time. */ private static class DockerOnMac { private static final String DOCKER_FOR_MAC_HOST = "host.docker.internal"; private static final boolean RUNNING_INSIDE_DOCKER_ON_MAC = "1".equals(System.getenv("DOCKER_MAC_CONTAINER")); private static final int MAC_PORT_START = 8100; private static final int MAC_PORT_END = 8200; private static final AtomicInteger MAC_PORT = new AtomicInteger(MAC_PORT_START); private static ServerFactory getServerFactory() { ServerFactory.UrlFactory dockerUrlFactory = (host, port) -> HostAndPort.fromParts(DOCKER_FOR_MAC_HOST, port).toString(); if (RUNNING_INSIDE_DOCKER_ON_MAC) { return ServerFactory.createWithUrlFactoryAndPortSupplier( dockerUrlFactory, () -> MAC_PORT.getAndUpdate(val -> val == MAC_PORT_END ? MAC_PORT_START : val + 1)); } else { return ServerFactory.createWithUrlFactory(dockerUrlFactory); } } } /** Provider for DockerEnvironmentFactory. */ public static class Provider implements EnvironmentFactory.Provider { private final boolean retainDockerContainer; public Provider(PipelineOptions options) { this.retainDockerContainer = options.as(ManualDockerEnvironmentOptions.class).getRetainDockerContainers(); } @Override public EnvironmentFactory createEnvironmentFactory( GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, ControlClientPool clientPool, IdGenerator idGenerator) { return DockerEnvironmentFactory.forServicesWithDocker( DockerCommand.getDefault(), controlServiceServer, loggingServiceServer, retrievalServiceServer, provisioningServiceServer, clientPool.getSource(), idGenerator, retainDockerContainer); } @Override public ServerFactory getServerFactory() { switch (getPlatform()) { case LINUX: return ServerFactory.createDefault(); case MAC: return DockerOnMac.getServerFactory(); default: LOG.warn("Unknown Docker platform. Falling back to default server factory"); return ServerFactory.createDefault(); } } private static Platform getPlatform() { String osName = System.getProperty("os.name").toLowerCase(); if (osName.startsWith("mac") || DockerOnMac.RUNNING_INSIDE_DOCKER_ON_MAC) { return Platform.MAC; } else if (osName.startsWith("linux")) { return Platform.LINUX; } return Platform.OTHER; } private enum Platform { MAC, LINUX, OTHER, } } }