comment
stringlengths 16
8.84k
| method_body
stringlengths 37
239k
| target_code
stringlengths 0
242
| method_body_after
stringlengths 29
239k
| context_before
stringlengths 14
424k
| context_after
stringlengths 14
284k
|
|---|---|---|---|---|---|
I am not sure I get how we obtain this executor but can we be sure that it never rejects (`RejectedExecutionException`) the task? Or are we just accepting that intentionally and propagate it?
|
public CompletionStage<Void> invoke(ScheduledExecution execution) throws Exception {
long delay = ThreadLocalRandom.current().nextLong(maxDelay);
DelayedExecution delayedExecution = new DelayedExecution(execution, delay);
try {
event.fire(delayedExecution);
event.fireAsync(delayedExecution);
} catch (Exception e) {
LOG.errorf("Error while firing DelayedExecution event", e);
}
CompletableFuture<Void> ret = new CompletableFuture<>();
executor.schedule(new Runnable() {
@Override
public void run() {
try {
delegate.invoke(execution);
ret.complete(null);
} catch (Exception e) {
ret.completeExceptionally(e);
}
}
}, delay, TimeUnit.MILLISECONDS);
return ret;
}
|
executor.schedule(new Runnable() {
|
public CompletionStage<Void> invoke(ScheduledExecution execution) throws Exception {
long delay = ThreadLocalRandom.current().nextLong(maxDelay);
DelayedExecution delayedExecution = new DelayedExecution(execution, delay);
try {
event.fire(delayedExecution);
event.fireAsync(delayedExecution);
} catch (Exception e) {
LOG.errorf("Error while firing DelayedExecution event", e);
}
CompletableFuture<Void> ret = new CompletableFuture<>();
executor.schedule(new Runnable() {
@Override
public void run() {
try {
delegate.invoke(execution);
ret.complete(null);
} catch (Exception e) {
ret.completeExceptionally(e);
}
}
}, delay, TimeUnit.MILLISECONDS);
return ret;
}
|
class DelayedExecutionInvoker extends DelegateInvoker {
private static final Logger LOG = Logger.getLogger(DelayedExecutionInvoker.class);
private final long maxDelay;
private final ScheduledExecutorService executor;
private final Event<DelayedExecution> event;
public DelayedExecutionInvoker(ScheduledInvoker delegate, long maxDelay, ScheduledExecutorService executor,
Event<DelayedExecution> event) {
super(delegate);
this.maxDelay = maxDelay;
this.executor = executor;
this.event = event;
}
@Override
}
|
class DelayedExecutionInvoker extends DelegateInvoker {
private static final Logger LOG = Logger.getLogger(DelayedExecutionInvoker.class);
private final long maxDelay;
private final ScheduledExecutorService executor;
private final Event<DelayedExecution> event;
public DelayedExecutionInvoker(ScheduledInvoker delegate, long maxDelay, ScheduledExecutorService executor,
Event<DelayedExecution> event) {
super(delegate);
this.maxDelay = maxDelay;
this.executor = executor;
this.event = event;
}
@Override
}
|
This is done to prevent an if condition inside for loop. Since we know if rest bp exists, it will be at the last, we should get the advantage of that without checking the if for each loop.
|
private BLangMatchPattern transformMatchPattern(Node matchPattern) {
Location matchPatternPos = matchPattern.location();
SyntaxKind kind = matchPattern.kind();
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().isMissing()) {
dlog.error(matchPatternPos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.IDENTIFIER_TOKEN && ((IdentifierToken) matchPattern).text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.NUMERIC_LITERAL ||
kind == SyntaxKind.STRING_LITERAL ||
kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||
kind == SyntaxKind.IDENTIFIER_TOKEN ||
kind == SyntaxKind.NULL_LITERAL ||
kind == SyntaxKind.NIL_LITERAL ||
kind == SyntaxKind.BOOLEAN_LITERAL) {
BLangConstPattern bLangConstMatchPattern =
(BLangConstPattern) TreeBuilder.createConstMatchPattern();
bLangConstMatchPattern.setExpression(createExpression(matchPattern));
bLangConstMatchPattern.pos = matchPatternPos;
return bLangConstMatchPattern;
}
if (kind == SyntaxKind.TYPED_BINDING_PATTERN) {
TypedBindingPatternNode typedBindingPatternNode = (TypedBindingPatternNode) matchPattern;
BLangVarBindingPatternMatchPattern bLangVarBindingPattern =
(BLangVarBindingPatternMatchPattern) TreeBuilder.createVarBindingPattern();
bLangVarBindingPattern.pos = matchPatternPos;
bLangVarBindingPattern.setBindingPattern(transformBindingPattern(typedBindingPatternNode.bindingPattern()));
return bLangVarBindingPattern;
}
if (kind == SyntaxKind.ERROR_MATCH_PATTERN) {
ErrorMatchPatternNode errorMatchPatternNode = (ErrorMatchPatternNode) matchPattern;
BLangErrorMatchPattern bLangErrorMatchPattern =
(BLangErrorMatchPattern) TreeBuilder.createErrorMatchPattern();
bLangErrorMatchPattern.pos = matchPatternPos;
NameReferenceNode nameReferenceNode;
if (errorMatchPatternNode.typeReference().isPresent()) {
nameReferenceNode = errorMatchPatternNode.typeReference().get();
bLangErrorMatchPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorMatchPatternNode.argListMatchPatternNode().size() == 0) {
return bLangErrorMatchPattern;
}
Node node = errorMatchPatternNode.argListMatchPatternNode().get(0);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(0, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorMessageMatchPattern = createErrorMessageMatchPattern(node);
if (errorMatchPatternNode.argListMatchPatternNode().size() == 1) {
return bLangErrorMatchPattern;
}
node = errorMatchPatternNode.argListMatchPatternNode().get(1);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(1, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorCauseMatchPattern = createErrorCauseMatchPattern(node);
createErrorFieldMatchPatterns(2, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
if (kind == SyntaxKind.NAMED_ARG_MATCH_PATTERN) {
NamedArgMatchPatternNode namedArgMatchPatternNode = (NamedArgMatchPatternNode) matchPattern;
BLangNamedArgMatchPattern bLangNamedArgMatchPattern =
(BLangNamedArgMatchPattern) TreeBuilder.createNamedArgMatchPattern();
bLangNamedArgMatchPattern.argName = createIdentifier(namedArgMatchPatternNode.identifier());
bLangNamedArgMatchPattern.matchPattern = transformMatchPattern(namedArgMatchPatternNode.matchPattern());
return bLangNamedArgMatchPattern;
}
if (kind == SyntaxKind.LIST_MATCH_PATTERN) {
ListMatchPatternNode listMatchPatternNode = (ListMatchPatternNode) matchPattern;
BLangListMatchPattern bLangListMatchPattern =
(BLangListMatchPattern) TreeBuilder.createListMatchPattern();
bLangListMatchPattern.pos = matchPatternPos;
SeparatedNodeList<Node> matchPatterns = listMatchPatternNode.matchPatterns();
int matchPatternListSize = matchPatterns.size();
for (int i = 0; i < matchPatternListSize - 1; i++) {
BLangMatchPattern bLangMemberMatchPattern = transformMatchPattern(matchPatterns.get(i));
if (bLangMemberMatchPattern == null) {
continue;
}
bLangListMatchPattern.addMatchPattern(bLangMemberMatchPattern);
}
BLangMatchPattern lastMember = transformMatchPattern(matchPatterns.get(matchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangListMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangListMatchPattern.addMatchPattern(lastMember);
}
return bLangListMatchPattern;
}
if (kind == SyntaxKind.REST_MATCH_PATTERN) {
RestMatchPatternNode restMatchPatternNode = (RestMatchPatternNode) matchPattern;
BLangRestMatchPattern bLangRestMatchPattern = (BLangRestMatchPattern) TreeBuilder.createRestMatchPattern();
bLangRestMatchPattern.pos = matchPatternPos;
SimpleNameReferenceNode variableName = restMatchPatternNode.variableName();
bLangRestMatchPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestMatchPattern;
}
if (kind == SyntaxKind.MAPPING_MATCH_PATTERN) {
MappingMatchPatternNode mappingMatchPatternNode = (MappingMatchPatternNode) matchPattern;
BLangMappingMatchPattern bLangMappingMatchPattern =
(BLangMappingMatchPattern) TreeBuilder.createMappingMatchPattern();
bLangMappingMatchPattern.pos = matchPatternPos;
SeparatedNodeList<FieldMatchPatternNode> fieldMatchPatterns = mappingMatchPatternNode.fieldMatchPatterns();
int fieldMatchPatternListSize = fieldMatchPatterns.size();
for (int i = 0; i < fieldMatchPatternListSize - 1; i++) {
bLangMappingMatchPattern.fieldMatchPatterns.add((BLangFieldMatchPattern)
transformMatchPattern(fieldMatchPatterns.get(i)));
}
BLangMatchPattern lastMember = transformMatchPattern(fieldMatchPatterns.get(fieldMatchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangMappingMatchPattern.restMatchPattern = (BLangRestMatchPattern) lastMember;
} else {
bLangMappingMatchPattern.fieldMatchPatterns.add((BLangFieldMatchPattern) lastMember);
}
return bLangMappingMatchPattern;
}
if (kind == SyntaxKind.FIELD_MATCH_PATTERN) {
FieldMatchPatternNode fieldMatchPatternNode = (FieldMatchPatternNode) matchPattern;
BLangFieldMatchPattern bLangFieldMatchPattern =
(BLangFieldMatchPattern) TreeBuilder.createFieldMatchPattern();
bLangFieldMatchPattern.fieldName =
createIdentifier(fieldMatchPatternNode.fieldNameNode());
bLangFieldMatchPattern.matchPattern = transformMatchPattern(fieldMatchPatternNode.matchPattern());
return bLangFieldMatchPattern;
}
dlog.error(matchPatternPos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
|
}
|
private BLangMatchPattern transformMatchPattern(Node matchPattern) {
Location matchPatternPos = matchPattern.location();
SyntaxKind kind = matchPattern.kind();
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().isMissing()) {
dlog.error(matchPatternPos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.IDENTIFIER_TOKEN && ((IdentifierToken) matchPattern).text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.NUMERIC_LITERAL ||
kind == SyntaxKind.STRING_LITERAL ||
kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||
kind == SyntaxKind.IDENTIFIER_TOKEN ||
kind == SyntaxKind.NULL_LITERAL ||
kind == SyntaxKind.NIL_LITERAL ||
kind == SyntaxKind.BOOLEAN_LITERAL) {
BLangConstPattern bLangConstMatchPattern =
(BLangConstPattern) TreeBuilder.createConstMatchPattern();
bLangConstMatchPattern.setExpression(createExpression(matchPattern));
bLangConstMatchPattern.pos = matchPatternPos;
return bLangConstMatchPattern;
}
if (kind == SyntaxKind.TYPED_BINDING_PATTERN) {
TypedBindingPatternNode typedBindingPatternNode = (TypedBindingPatternNode) matchPattern;
BLangVarBindingPatternMatchPattern bLangVarBindingPattern =
(BLangVarBindingPatternMatchPattern) TreeBuilder.createVarBindingPattern();
bLangVarBindingPattern.pos = matchPatternPos;
bLangVarBindingPattern.setBindingPattern(transformBindingPattern(typedBindingPatternNode.bindingPattern()));
return bLangVarBindingPattern;
}
if (kind == SyntaxKind.ERROR_MATCH_PATTERN) {
ErrorMatchPatternNode errorMatchPatternNode = (ErrorMatchPatternNode) matchPattern;
BLangErrorMatchPattern bLangErrorMatchPattern =
(BLangErrorMatchPattern) TreeBuilder.createErrorMatchPattern();
bLangErrorMatchPattern.pos = matchPatternPos;
NameReferenceNode nameReferenceNode;
if (errorMatchPatternNode.typeReference().isPresent()) {
nameReferenceNode = errorMatchPatternNode.typeReference().get();
bLangErrorMatchPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorMatchPatternNode.argListMatchPatternNode().size() == 0) {
return bLangErrorMatchPattern;
}
Node node = errorMatchPatternNode.argListMatchPatternNode().get(0);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(0, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorMessageMatchPattern = createErrorMessageMatchPattern(node);
if (errorMatchPatternNode.argListMatchPatternNode().size() == 1) {
return bLangErrorMatchPattern;
}
node = errorMatchPatternNode.argListMatchPatternNode().get(1);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(1, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorCauseMatchPattern = createErrorCauseMatchPattern(node);
createErrorFieldMatchPatterns(2, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
if (kind == SyntaxKind.NAMED_ARG_MATCH_PATTERN) {
NamedArgMatchPatternNode namedArgMatchPatternNode = (NamedArgMatchPatternNode) matchPattern;
BLangNamedArgMatchPattern bLangNamedArgMatchPattern =
(BLangNamedArgMatchPattern) TreeBuilder.createNamedArgMatchPattern();
bLangNamedArgMatchPattern.argName = createIdentifier(namedArgMatchPatternNode.identifier());
bLangNamedArgMatchPattern.matchPattern = transformMatchPattern(namedArgMatchPatternNode.matchPattern());
return bLangNamedArgMatchPattern;
}
if (kind == SyntaxKind.LIST_MATCH_PATTERN) {
ListMatchPatternNode listMatchPatternNode = (ListMatchPatternNode) matchPattern;
BLangListMatchPattern bLangListMatchPattern =
(BLangListMatchPattern) TreeBuilder.createListMatchPattern();
bLangListMatchPattern.pos = matchPatternPos;
SeparatedNodeList<Node> matchPatterns = listMatchPatternNode.matchPatterns();
int matchPatternListSize = matchPatterns.size();
if (matchPatternListSize == 0) {
return bLangListMatchPattern;
}
for (int i = 0; i < matchPatternListSize - 1; i++) {
BLangMatchPattern bLangMemberMatchPattern = transformMatchPattern(matchPatterns.get(i));
if (bLangMemberMatchPattern == null) {
continue;
}
bLangListMatchPattern.addMatchPattern(bLangMemberMatchPattern);
}
BLangMatchPattern lastMember = transformMatchPattern(matchPatterns.get(matchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangListMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangListMatchPattern.addMatchPattern(lastMember);
}
return bLangListMatchPattern;
}
if (kind == SyntaxKind.REST_MATCH_PATTERN) {
RestMatchPatternNode restMatchPatternNode = (RestMatchPatternNode) matchPattern;
BLangRestMatchPattern bLangRestMatchPattern = (BLangRestMatchPattern) TreeBuilder.createRestMatchPattern();
bLangRestMatchPattern.pos = matchPatternPos;
SimpleNameReferenceNode variableName = restMatchPatternNode.variableName();
bLangRestMatchPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestMatchPattern;
}
if (kind == SyntaxKind.MAPPING_MATCH_PATTERN) {
MappingMatchPatternNode mappingMatchPatternNode = (MappingMatchPatternNode) matchPattern;
BLangMappingMatchPattern bLangMappingMatchPattern =
(BLangMappingMatchPattern) TreeBuilder.createMappingMatchPattern();
bLangMappingMatchPattern.pos = matchPatternPos;
SeparatedNodeList<Node> fieldMatchPatterns = mappingMatchPatternNode.fieldMatchPatterns();
int fieldMatchPatternListSize = fieldMatchPatterns.size();
if (fieldMatchPatternListSize == 0) {
return bLangMappingMatchPattern;
}
for (int i = 0; i < fieldMatchPatternListSize - 1; i++) {
bLangMappingMatchPattern.fieldMatchPatterns.add((BLangFieldMatchPattern)
transformMatchPattern(fieldMatchPatterns.get(i)));
}
BLangMatchPattern lastMember = transformMatchPattern(fieldMatchPatterns.get(fieldMatchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangMappingMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangMappingMatchPattern.addFieldMatchPattern((BLangFieldMatchPattern) lastMember);
}
return bLangMappingMatchPattern;
}
if (kind == SyntaxKind.FIELD_MATCH_PATTERN) {
FieldMatchPatternNode fieldMatchPatternNode = (FieldMatchPatternNode) matchPattern;
BLangFieldMatchPattern bLangFieldMatchPattern =
(BLangFieldMatchPattern) TreeBuilder.createFieldMatchPattern();
bLangFieldMatchPattern.fieldName =
createIdentifier(fieldMatchPatternNode.fieldNameNode());
bLangFieldMatchPattern.matchPattern = transformMatchPattern(fieldMatchPatternNode.matchPattern());
return bLangFieldMatchPattern;
}
dlog.error(matchPatternPos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
|
class definition
*/
@Override
public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) {
Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode);
BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members());
anonClass.pos = pos;
BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression();
objectCtorExpression.pos = pos;
objectCtorExpression.classNode = anonClass;
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
anonClass.setName(anonTypeGenName);
anonClass.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference();
typeReference.ifPresent(typeReferenceNode -> {
objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode));
});
anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations());
addToTop(anonClass);
NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers();
for (Token qualifier : objectConstructorQualifierList) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
anonClass.flagSet.add(Flag.CLIENT);
objectCtorExpression.isClient = true;
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
anonClass.flagSet.add(Flag.ISOLATED);
} else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) {
anonClass.flagSet.add(SERVICE);
objectCtorExpression.isService = true;
} else {
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
objectCtorExpression.typeInit = initNode;
return objectCtorExpression;
}
|
class definition
*/
@Override
public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) {
Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode);
BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members());
anonClass.pos = pos;
BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression();
objectCtorExpression.pos = pos;
objectCtorExpression.classNode = anonClass;
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
anonClass.setName(anonTypeGenName);
anonClass.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference();
typeReference.ifPresent(typeReferenceNode -> {
objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode));
});
anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations());
addToTop(anonClass);
NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers();
for (Token qualifier : objectConstructorQualifierList) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
anonClass.flagSet.add(Flag.CLIENT);
objectCtorExpression.isClient = true;
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
anonClass.flagSet.add(Flag.ISOLATED);
} else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) {
anonClass.flagSet.add(SERVICE);
objectCtorExpression.isService = true;
} else {
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
objectCtorExpression.typeInit = initNode;
return objectCtorExpression;
}
|
I mean the `KeyGenerator` instance, but if it's done once, then that's okay
|
public static SecretKey generateSecretKey() throws Exception {
KeyGenerator keyGenerator = KeyGenerator.getInstance("AES");
keyGenerator.init(256);
return keyGenerator.generateKey();
}
|
keyGenerator.init(256);
|
public static SecretKey generateSecretKey() throws Exception {
KeyGenerator keyGenerator = KeyGenerator.getInstance("AES");
keyGenerator.init(256);
return keyGenerator.generateKey();
}
|
class OidcCommonUtils {
public static final Duration CONNECTION_BACKOFF_DURATION = Duration.ofSeconds(2);
static final byte AMP = '&';
static final byte EQ = '=';
static final String HTTP_SCHEME = "http";
private static final Logger LOG = Logger.getLogger(OidcCommonUtils.class);
private OidcCommonUtils() {
}
public static void verifyEndpointUrl(String endpointUrl) {
try {
URI.create(endpointUrl).toURL();
} catch (Throwable ex) {
throw new ConfigurationException(
String.format("'%s' is invalid", endpointUrl), ex);
}
}
public static void verifyCommonConfiguration(OidcCommonConfig oidcConfig, boolean clientIdOptional,
boolean isServerConfig) {
final String configPrefix = isServerConfig ? "quarkus.oidc." : "quarkus.oidc-client.";
if (!clientIdOptional && !oidcConfig.getClientId().isPresent()) {
throw new ConfigurationException(
String.format("'%sclient-id' property must be configured", configPrefix));
}
Credentials creds = oidcConfig.getCredentials();
if (creds.secret.isPresent() && creds.clientSecret.value.isPresent()) {
throw new ConfigurationException(
String.format(
"'%1$scredentials.secret' and '%1$scredentials.client-secret' properties are mutually exclusive",
configPrefix));
}
if ((creds.secret.isPresent() || creds.clientSecret.value.isPresent()) && creds.jwt.secret.isPresent()) {
throw new ConfigurationException(
String.format(
"Use only '%1$scredentials.secret' or '%1$scredentials.client-secret' or '%1$scredentials.jwt.secret' property",
configPrefix));
}
}
public static String prependSlash(String path) {
return !path.startsWith("/") ? "/" + path : path;
}
public static Buffer encodeForm(MultiMap form) {
Buffer buffer = Buffer.buffer();
for (Map.Entry<String, String> entry : form) {
if (buffer.length() != 0) {
buffer.appendByte(AMP);
}
buffer.appendString(entry.getKey());
buffer.appendByte(EQ);
buffer.appendString(urlEncode(entry.getValue()));
}
return buffer;
}
public static String urlEncode(String value) {
try {
return URLEncoder.encode(value, StandardCharsets.UTF_8);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public static void setHttpClientOptions(OidcCommonConfig oidcConfig, HttpClientOptions options,
TlsConfiguration defaultTlsConfiguration) {
var globalTrustAll = defaultTlsConfiguration != null && defaultTlsConfiguration.isTrustAll();
boolean trustAll = oidcConfig.tls.verification.isPresent() ? oidcConfig.tls.verification.get() == Verification.NONE
: globalTrustAll;
if (trustAll) {
options.setTrustAll(true);
options.setVerifyHost(false);
} else if (oidcConfig.tls.trustStoreFile.isPresent()) {
try {
byte[] trustStoreData = getFileContent(oidcConfig.tls.trustStoreFile.get());
io.vertx.core.net.KeyStoreOptions trustStoreOptions = new KeyStoreOptions()
.setPassword(oidcConfig.tls.getTrustStorePassword().orElse("password"))
.setAlias(oidcConfig.tls.getTrustStoreCertAlias().orElse(null))
.setValue(io.vertx.core.buffer.Buffer.buffer(trustStoreData))
.setType(getKeyStoreType(oidcConfig.tls.trustStoreFileType, oidcConfig.tls.trustStoreFile.get()))
.setProvider(oidcConfig.tls.trustStoreProvider.orElse(null));
options.setTrustOptions(trustStoreOptions);
if (Verification.CERTIFICATE_VALIDATION == oidcConfig.tls.verification.orElse(Verification.REQUIRED)) {
options.setVerifyHost(false);
}
} catch (IOException ex) {
throw new ConfigurationException(String.format(
"OIDC truststore file does not exist or can not be read",
oidcConfig.tls.trustStoreFile.get().toString()), ex);
}
}
if (oidcConfig.tls.keyStoreFile.isPresent()) {
try {
byte[] keyStoreData = getFileContent(oidcConfig.tls.keyStoreFile.get());
io.vertx.core.net.KeyStoreOptions keyStoreOptions = new KeyStoreOptions()
.setAlias(oidcConfig.tls.keyStoreKeyAlias.orElse(null))
.setAliasPassword(oidcConfig.tls.keyStoreKeyPassword.orElse(null))
.setValue(io.vertx.core.buffer.Buffer.buffer(keyStoreData))
.setType(getKeyStoreType(oidcConfig.tls.keyStoreFileType, oidcConfig.tls.keyStoreFile.get()))
.setProvider(oidcConfig.tls.keyStoreProvider.orElse(null));
if (oidcConfig.tls.keyStorePassword.isPresent()) {
keyStoreOptions.setPassword(oidcConfig.tls.keyStorePassword.get());
}
options.setKeyCertOptions(keyStoreOptions);
} catch (IOException ex) {
throw new ConfigurationException(String.format(
"OIDC keystore file does not exist or can not be read",
oidcConfig.tls.keyStoreFile.get().toString()), ex);
}
}
Optional<ProxyOptions> proxyOpt = toProxyOptions(oidcConfig.getProxy());
if (proxyOpt.isPresent()) {
options.setProxyOptions(proxyOpt.get());
}
OptionalInt maxPoolSize = oidcConfig.maxPoolSize;
if (maxPoolSize.isPresent()) {
options.setMaxPoolSize(maxPoolSize.getAsInt());
}
options.setConnectTimeout((int) oidcConfig.getConnectionTimeout().toMillis());
}
public static String getKeyStoreType(Optional<String> fileType, Path storePath) {
if (fileType.isPresent()) {
return fileType.get().toUpperCase();
}
final String pathName = storePath.toString();
if (pathName.endsWith(".p12") || pathName.endsWith(".pkcs12") || pathName.endsWith(".pfx")) {
return "PKCS12";
} else {
return "JKS";
}
}
public static String getAuthServerUrl(OidcCommonConfig oidcConfig) {
return removeLastPathSeparator(oidcConfig.getAuthServerUrl().get());
}
private static String removeLastPathSeparator(String value) {
return value.endsWith("/") ? value.substring(0, value.length() - 1) : value;
}
public static String getOidcEndpointUrl(String authServerUrl, Optional<String> endpointPath) {
if (endpointPath != null && endpointPath.isPresent()) {
return isAbsoluteUrl(endpointPath) ? endpointPath.get() : authServerUrl + prependSlash(endpointPath.get());
} else {
return null;
}
}
public static boolean isAbsoluteUrl(Optional<String> endpointUrl) {
return endpointUrl.isPresent() && endpointUrl.get().startsWith(HTTP_SCHEME);
}
private static long getConnectionDelay(OidcCommonConfig oidcConfig) {
return oidcConfig.getConnectionDelay().isPresent()
? oidcConfig.getConnectionDelay().get().getSeconds()
: 0;
}
public static long getConnectionDelayInMillis(OidcCommonConfig oidcConfig) {
final long connectionDelayInSecs = getConnectionDelay(oidcConfig);
final long connectionRetryCount = connectionDelayInSecs > 1 ? connectionDelayInSecs / 2 : 1;
if (connectionRetryCount > 1) {
LOG.infof("Connecting to OpenId Connect Provider for up to %d times every 2 seconds", connectionRetryCount);
}
return connectionDelayInSecs * 1000;
}
public static Optional<ProxyOptions> toProxyOptions(OidcCommonConfig.Proxy proxyConfig) {
if (!proxyConfig.host.isPresent()) {
return Optional.empty();
}
JsonObject jsonOptions = new JsonObject();
String host = URI.create(proxyConfig.host.get()).getHost();
if (host == null) {
host = proxyConfig.host.get();
}
jsonOptions.put("host", host);
jsonOptions.put("port", proxyConfig.port);
if (proxyConfig.username.isPresent()) {
jsonOptions.put("username", proxyConfig.username.get());
}
if (proxyConfig.password.isPresent()) {
jsonOptions.put("password", proxyConfig.password.get());
}
return Optional.of(new ProxyOptions(jsonOptions));
}
public static String formatConnectionErrorMessage(String authServerUrlString) {
return String.format("OIDC server is not available at the '%s' URL. "
+ "Please make sure it is correct. Note it has to end with a realm value if you work with Keycloak, for example:"
+ " 'https:
}
public static boolean isClientSecretBasicAuthRequired(Credentials creds) {
return creds.secret.isPresent() ||
((creds.clientSecret.value.isPresent() || creds.clientSecret.provider.key.isPresent())
&& clientSecretMethod(creds) == Secret.Method.BASIC);
}
public static boolean isClientJwtAuthRequired(Credentials creds) {
return creds.jwt.secret.isPresent() || creds.jwt.secretProvider.key.isPresent() || creds.jwt.key.isPresent()
|| creds.jwt.keyFile.isPresent() || creds.jwt.keyStoreFile.isPresent();
}
public static boolean isClientSecretPostAuthRequired(Credentials creds) {
return (creds.clientSecret.value.isPresent() || creds.clientSecret.provider.key.isPresent())
&& clientSecretMethod(creds) == Secret.Method.POST;
}
public static boolean isClientSecretPostJwtAuthRequired(Credentials creds) {
return clientSecretMethod(creds) == Secret.Method.POST_JWT && isClientJwtAuthRequired(creds);
}
public static boolean isJwtAssertion(Credentials creds) {
return creds.getJwt().isAssertion();
}
public static String clientSecret(Credentials creds) {
return creds.secret.orElse(creds.clientSecret.value.orElseGet(fromCredentialsProvider(creds.clientSecret.provider)));
}
public static String jwtSecret(Credentials creds) {
return creds.jwt.secret.orElseGet(fromCredentialsProvider(creds.jwt.secretProvider));
}
public static String getClientOrJwtSecret(Credentials creds) {
LOG.debug("Trying to get the configured client secret");
String encSecret = clientSecret(creds);
if (encSecret == null) {
LOG.debug("Client secret is not configured, "
+ "trying to get the configured 'client_jwt_secret' secret");
encSecret = jwtSecret(creds);
}
return encSecret;
}
public static Secret.Method clientSecretMethod(Credentials creds) {
return creds.clientSecret.method.orElseGet(() -> Secret.Method.BASIC);
}
private static Supplier<? extends String> fromCredentialsProvider(Provider provider) {
return new Supplier<String>() {
@Override
public String get() {
if (provider.key.isPresent()) {
String providerName = provider.name.orElse(null);
String keyringName = provider.keyringName.orElse(null);
CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(providerName);
return credentialsProvider.getCredentials(keyringName).get(provider.key.get());
}
return null;
}
};
}
public static Key clientJwtKey(Credentials creds) {
if (creds.jwt.secret.isPresent() || creds.jwt.secretProvider.key.isPresent()) {
return KeyUtils
.createSecretKeyFromSecret(jwtSecret(creds));
} else {
Key key = null;
try {
if (creds.jwt.getKey().isPresent()) {
key = KeyUtils.tryAsPemSigningPrivateKey(creds.jwt.getKey().get(),
getSignatureAlgorithm(creds, SignatureAlgorithm.RS256));
} else if (creds.jwt.getKeyFile().isPresent()) {
key = KeyUtils.readSigningKey(creds.jwt.getKeyFile().get(), creds.jwt.keyId.orElse(null),
getSignatureAlgorithm(creds, SignatureAlgorithm.RS256));
} else if (creds.jwt.keyStoreFile.isPresent()) {
KeyStore ks = KeyStore.getInstance("JKS");
InputStream is = ResourceUtils.getResourceStream(creds.jwt.keyStoreFile.get());
if (creds.jwt.keyStorePassword.isPresent()) {
ks.load(is, creds.jwt.keyStorePassword.get().toCharArray());
} else {
ks.load(is, null);
}
if (creds.jwt.keyPassword.isPresent()) {
key = ks.getKey(creds.jwt.keyId.get(), creds.jwt.keyPassword.get().toCharArray());
} else {
throw new ConfigurationException(
"When using a key store, the `quarkus.oidc-client.credentials.jwt.key-password` property must be set");
}
}
} catch (Exception ex) {
throw new ConfigurationException("Key can not be loaded", ex);
}
if (key == null) {
throw new ConfigurationException("Key is null");
}
return key;
}
}
public static String signJwtWithKey(OidcCommonConfig oidcConfig, String tokenRequestUri, Key key) {
JwtSignatureBuilder builder = Jwt
.claims(additionalClaims(oidcConfig.credentials.jwt.getClaims()))
.issuer(oidcConfig.credentials.jwt.issuer.orElse(oidcConfig.clientId.get()))
.subject(oidcConfig.credentials.jwt.subject.orElse(oidcConfig.clientId.get()))
.audience(oidcConfig.credentials.jwt.getAudience().isPresent()
? removeLastPathSeparator(oidcConfig.credentials.jwt.getAudience().get())
: tokenRequestUri)
.expiresIn(oidcConfig.credentials.jwt.lifespan)
.jws();
if (oidcConfig.credentials.jwt.getTokenKeyId().isPresent()) {
builder.keyId(oidcConfig.credentials.jwt.getTokenKeyId().get());
}
SignatureAlgorithm signatureAlgorithm = getSignatureAlgorithm(oidcConfig.credentials, null);
if (signatureAlgorithm != null) {
builder.algorithm(signatureAlgorithm);
}
if (key instanceof SecretKey) {
return builder.sign((SecretKey) key);
} else {
return builder.sign((PrivateKey) key);
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private static Map<String, Object> additionalClaims(Map<String, String> claims) {
return (Map) claims;
}
private static SignatureAlgorithm getSignatureAlgorithm(Credentials credentials, SignatureAlgorithm defaultAlgorithm) {
if (credentials.jwt.getSignatureAlgorithm().isPresent()) {
try {
return SignatureAlgorithm.fromAlgorithm(credentials.jwt.getSignatureAlgorithm().get());
} catch (Exception ex) {
throw new ConfigurationException("Unsupported signature algorithm");
}
} else {
return defaultAlgorithm;
}
}
public static void verifyConfigurationId(String defaultId, String configKey, Optional<String> configId) {
if (configKey.equals(defaultId)) {
throw new ConfigurationException("configuration id '" + configKey + "' duplicates the default configuration id");
}
if (configId.isPresent() && !configKey.equals(configId.get())) {
throw new ConfigurationException("Configuration has 2 different id values: '"
+ configKey + "' and '" + configId.get() + "'");
}
}
public static String initClientSecretBasicAuth(OidcCommonConfig oidcConfig) {
if (isClientSecretBasicAuthRequired(oidcConfig.credentials)) {
return basicSchemeValue(oidcConfig.getClientId().get(), clientSecret(oidcConfig.credentials));
}
return null;
}
public static String basicSchemeValue(String name, String secret) {
return OidcConstants.BASIC_SCHEME + " "
+ Base64.getEncoder().encodeToString((name + ":" + secret).getBytes(StandardCharsets.UTF_8));
}
public static Key initClientJwtKey(OidcCommonConfig oidcConfig) {
if (isClientJwtAuthRequired(oidcConfig.credentials)) {
return clientJwtKey(oidcConfig.credentials);
}
return null;
}
public static Predicate<? super Throwable> oidcEndpointNotAvailable() {
return t -> (t instanceof ConnectException
|| (t instanceof OidcEndpointAccessException && ((OidcEndpointAccessException) t).getErrorStatus() == 404));
}
public static Uni<JsonObject> discoverMetadata(WebClient client, Map<OidcEndpoint.Type, List<OidcRequestFilter>> filters,
OidcRequestContextProperties contextProperties, String authServerUrl,
long connectionDelayInMillisecs, Vertx vertx, boolean blockingDnsLookup) {
final String discoveryUrl = getDiscoveryUri(authServerUrl);
HttpRequest<Buffer> request = client.getAbs(discoveryUrl);
if (!filters.isEmpty()) {
Map<String, Object> newProperties = contextProperties == null ? new HashMap<>()
: new HashMap<>(contextProperties.getAll());
newProperties.put(OidcRequestContextProperties.DISCOVERY_ENDPOINT, discoveryUrl);
OidcRequestContextProperties requestProps = new OidcRequestContextProperties(newProperties);
for (OidcRequestFilter filter : getMatchingOidcRequestFilters(filters, OidcEndpoint.Type.DISCOVERY)) {
filter.filter(request, null, requestProps);
}
}
return sendRequest(vertx, request, blockingDnsLookup).onItem().transform(resp -> {
if (resp.statusCode() == 200) {
return resp.bodyAsJsonObject();
} else {
String errorMessage = resp.bodyAsString();
if (errorMessage != null && !errorMessage.isEmpty()) {
LOG.warnf("Discovery request %s has failed, status code: %d, error message: %s", discoveryUrl,
resp.statusCode(), errorMessage);
} else {
LOG.warnf("Discovery request %s has failed, status code: %d", discoveryUrl, resp.statusCode());
}
throw new OidcEndpointAccessException(resp.statusCode());
}
}).onFailure(oidcEndpointNotAvailable())
.retry()
.withBackOff(CONNECTION_BACKOFF_DURATION, CONNECTION_BACKOFF_DURATION)
.expireIn(connectionDelayInMillisecs)
.onFailure().transform(t -> {
LOG.warn("OIDC Server is not available:", t.getCause() != null ? t.getCause() : t);
return new RuntimeException("OIDC Server is not available");
});
}
public static String getDiscoveryUri(String authServerUrl) {
return authServerUrl + OidcConstants.WELL_KNOWN_CONFIGURATION;
}
private static byte[] getFileContent(Path path) throws IOException {
byte[] data;
final InputStream resource = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(ClassPathUtils.toResourceName(path));
if (resource != null) {
try (InputStream is = resource) {
data = doRead(is);
}
} else {
try (InputStream is = Files.newInputStream(path)) {
data = doRead(is);
}
}
return data;
}
private static byte[] doRead(InputStream is) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buf = new byte[1024];
int r;
while ((r = is.read(buf)) > 0) {
out.write(buf, 0, r);
}
return out.toByteArray();
}
public static Map<OidcEndpoint.Type, List<OidcRequestFilter>> getOidcRequestFilters() {
ArcContainer container = Arc.container();
if (container != null) {
Map<OidcEndpoint.Type, List<OidcRequestFilter>> map = new HashMap<>();
for (OidcRequestFilter filter : container.listAll(OidcRequestFilter.class).stream().map(handle -> handle.get())
.collect(Collectors.toList())) {
OidcEndpoint endpoint = ClientProxy.unwrap(filter).getClass().getAnnotation(OidcEndpoint.class);
if (endpoint != null) {
for (OidcEndpoint.Type type : endpoint.value()) {
map.computeIfAbsent(type, k -> new ArrayList<OidcRequestFilter>()).add(filter);
}
} else {
map.computeIfAbsent(OidcEndpoint.Type.ALL, k -> new ArrayList<OidcRequestFilter>()).add(filter);
}
}
return map;
}
return Map.of();
}
public static List<OidcRequestFilter> getMatchingOidcRequestFilters(Map<OidcEndpoint.Type, List<OidcRequestFilter>> filters,
OidcEndpoint.Type type) {
List<OidcRequestFilter> typeSpecific = filters.get(type);
List<OidcRequestFilter> all = filters.get(OidcEndpoint.Type.ALL);
if (typeSpecific == null && all == null) {
return List.of();
}
if (typeSpecific != null && all == null) {
return typeSpecific;
} else if (typeSpecific == null && all != null) {
return all;
} else {
List<OidcRequestFilter> combined = new ArrayList<>(typeSpecific.size() + all.size());
combined.addAll(typeSpecific);
combined.addAll(all);
return combined;
}
}
public static Uni<HttpResponse<Buffer>> sendRequest(io.vertx.core.Vertx vertx, HttpRequest<Buffer> request,
boolean blockingDnsLookup) {
if (blockingDnsLookup) {
return sendRequest(new Vertx(vertx), request, true);
} else {
return request.send();
}
}
public static Uni<HttpResponse<Buffer>> sendRequest(Vertx vertx, HttpRequest<Buffer> request, boolean blockingDnsLookup) {
if (blockingDnsLookup) {
return vertx.executeBlocking(new Callable<Void>() {
@Override
public Void call() {
try {
InetAddress.getByName(request.host());
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
return null;
}
}).flatMap(new Function<Void, Uni<? extends HttpResponse<Buffer>>>() {
@Override
public Uni<? extends HttpResponse<Buffer>> apply(Void unused) {
return request.send();
}
});
} else {
return request.send();
}
}
}
|
class OidcCommonUtils {
public static final Duration CONNECTION_BACKOFF_DURATION = Duration.ofSeconds(2);
static final byte AMP = '&';
static final byte EQ = '=';
static final String HTTP_SCHEME = "http";
private static final Logger LOG = Logger.getLogger(OidcCommonUtils.class);
private OidcCommonUtils() {
}
public static void verifyEndpointUrl(String endpointUrl) {
try {
URI.create(endpointUrl).toURL();
} catch (Throwable ex) {
throw new ConfigurationException(
String.format("'%s' is invalid", endpointUrl), ex);
}
}
public static void verifyCommonConfiguration(OidcCommonConfig oidcConfig, boolean clientIdOptional,
boolean isServerConfig) {
final String configPrefix = isServerConfig ? "quarkus.oidc." : "quarkus.oidc-client.";
if (!clientIdOptional && !oidcConfig.getClientId().isPresent()) {
throw new ConfigurationException(
String.format("'%sclient-id' property must be configured", configPrefix));
}
Credentials creds = oidcConfig.getCredentials();
if (creds.secret.isPresent() && creds.clientSecret.value.isPresent()) {
throw new ConfigurationException(
String.format(
"'%1$scredentials.secret' and '%1$scredentials.client-secret' properties are mutually exclusive",
configPrefix));
}
if ((creds.secret.isPresent() || creds.clientSecret.value.isPresent()) && creds.jwt.secret.isPresent()) {
throw new ConfigurationException(
String.format(
"Use only '%1$scredentials.secret' or '%1$scredentials.client-secret' or '%1$scredentials.jwt.secret' property",
configPrefix));
}
}
public static String prependSlash(String path) {
return !path.startsWith("/") ? "/" + path : path;
}
public static Buffer encodeForm(MultiMap form) {
Buffer buffer = Buffer.buffer();
for (Map.Entry<String, String> entry : form) {
if (buffer.length() != 0) {
buffer.appendByte(AMP);
}
buffer.appendString(entry.getKey());
buffer.appendByte(EQ);
buffer.appendString(urlEncode(entry.getValue()));
}
return buffer;
}
public static String urlEncode(String value) {
try {
return URLEncoder.encode(value, StandardCharsets.UTF_8);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public static void setHttpClientOptions(OidcCommonConfig oidcConfig, HttpClientOptions options,
TlsConfiguration defaultTlsConfiguration) {
var globalTrustAll = defaultTlsConfiguration != null && defaultTlsConfiguration.isTrustAll();
boolean trustAll = oidcConfig.tls.verification.isPresent() ? oidcConfig.tls.verification.get() == Verification.NONE
: globalTrustAll;
if (trustAll) {
options.setTrustAll(true);
options.setVerifyHost(false);
} else if (oidcConfig.tls.trustStoreFile.isPresent()) {
try {
byte[] trustStoreData = getFileContent(oidcConfig.tls.trustStoreFile.get());
io.vertx.core.net.KeyStoreOptions trustStoreOptions = new KeyStoreOptions()
.setPassword(oidcConfig.tls.getTrustStorePassword().orElse("password"))
.setAlias(oidcConfig.tls.getTrustStoreCertAlias().orElse(null))
.setValue(io.vertx.core.buffer.Buffer.buffer(trustStoreData))
.setType(getKeyStoreType(oidcConfig.tls.trustStoreFileType, oidcConfig.tls.trustStoreFile.get()))
.setProvider(oidcConfig.tls.trustStoreProvider.orElse(null));
options.setTrustOptions(trustStoreOptions);
if (Verification.CERTIFICATE_VALIDATION == oidcConfig.tls.verification.orElse(Verification.REQUIRED)) {
options.setVerifyHost(false);
}
} catch (IOException ex) {
throw new ConfigurationException(String.format(
"OIDC truststore file does not exist or can not be read",
oidcConfig.tls.trustStoreFile.get().toString()), ex);
}
}
if (oidcConfig.tls.keyStoreFile.isPresent()) {
try {
byte[] keyStoreData = getFileContent(oidcConfig.tls.keyStoreFile.get());
io.vertx.core.net.KeyStoreOptions keyStoreOptions = new KeyStoreOptions()
.setAlias(oidcConfig.tls.keyStoreKeyAlias.orElse(null))
.setAliasPassword(oidcConfig.tls.keyStoreKeyPassword.orElse(null))
.setValue(io.vertx.core.buffer.Buffer.buffer(keyStoreData))
.setType(getKeyStoreType(oidcConfig.tls.keyStoreFileType, oidcConfig.tls.keyStoreFile.get()))
.setProvider(oidcConfig.tls.keyStoreProvider.orElse(null));
if (oidcConfig.tls.keyStorePassword.isPresent()) {
keyStoreOptions.setPassword(oidcConfig.tls.keyStorePassword.get());
}
options.setKeyCertOptions(keyStoreOptions);
} catch (IOException ex) {
throw new ConfigurationException(String.format(
"OIDC keystore file does not exist or can not be read",
oidcConfig.tls.keyStoreFile.get().toString()), ex);
}
}
Optional<ProxyOptions> proxyOpt = toProxyOptions(oidcConfig.getProxy());
if (proxyOpt.isPresent()) {
options.setProxyOptions(proxyOpt.get());
}
OptionalInt maxPoolSize = oidcConfig.maxPoolSize;
if (maxPoolSize.isPresent()) {
options.setMaxPoolSize(maxPoolSize.getAsInt());
}
options.setConnectTimeout((int) oidcConfig.getConnectionTimeout().toMillis());
}
public static String getKeyStoreType(Optional<String> fileType, Path storePath) {
if (fileType.isPresent()) {
return fileType.get().toUpperCase();
}
final String pathName = storePath.toString();
if (pathName.endsWith(".p12") || pathName.endsWith(".pkcs12") || pathName.endsWith(".pfx")) {
return "PKCS12";
} else {
return "JKS";
}
}
public static String getAuthServerUrl(OidcCommonConfig oidcConfig) {
return removeLastPathSeparator(oidcConfig.getAuthServerUrl().get());
}
private static String removeLastPathSeparator(String value) {
return value.endsWith("/") ? value.substring(0, value.length() - 1) : value;
}
public static String getOidcEndpointUrl(String authServerUrl, Optional<String> endpointPath) {
if (endpointPath != null && endpointPath.isPresent()) {
return isAbsoluteUrl(endpointPath) ? endpointPath.get() : authServerUrl + prependSlash(endpointPath.get());
} else {
return null;
}
}
public static boolean isAbsoluteUrl(Optional<String> endpointUrl) {
return endpointUrl.isPresent() && endpointUrl.get().startsWith(HTTP_SCHEME);
}
private static long getConnectionDelay(OidcCommonConfig oidcConfig) {
return oidcConfig.getConnectionDelay().isPresent()
? oidcConfig.getConnectionDelay().get().getSeconds()
: 0;
}
public static long getConnectionDelayInMillis(OidcCommonConfig oidcConfig) {
final long connectionDelayInSecs = getConnectionDelay(oidcConfig);
final long connectionRetryCount = connectionDelayInSecs > 1 ? connectionDelayInSecs / 2 : 1;
if (connectionRetryCount > 1) {
LOG.infof("Connecting to OpenId Connect Provider for up to %d times every 2 seconds", connectionRetryCount);
}
return connectionDelayInSecs * 1000;
}
public static Optional<ProxyOptions> toProxyOptions(OidcCommonConfig.Proxy proxyConfig) {
if (!proxyConfig.host.isPresent()) {
return Optional.empty();
}
JsonObject jsonOptions = new JsonObject();
String host = URI.create(proxyConfig.host.get()).getHost();
if (host == null) {
host = proxyConfig.host.get();
}
jsonOptions.put("host", host);
jsonOptions.put("port", proxyConfig.port);
if (proxyConfig.username.isPresent()) {
jsonOptions.put("username", proxyConfig.username.get());
}
if (proxyConfig.password.isPresent()) {
jsonOptions.put("password", proxyConfig.password.get());
}
return Optional.of(new ProxyOptions(jsonOptions));
}
public static String formatConnectionErrorMessage(String authServerUrlString) {
return String.format("OIDC server is not available at the '%s' URL. "
+ "Please make sure it is correct. Note it has to end with a realm value if you work with Keycloak, for example:"
+ " 'https:
}
public static boolean isClientSecretBasicAuthRequired(Credentials creds) {
return creds.secret.isPresent() ||
((creds.clientSecret.value.isPresent() || creds.clientSecret.provider.key.isPresent())
&& clientSecretMethod(creds) == Secret.Method.BASIC);
}
public static boolean isClientJwtAuthRequired(Credentials creds) {
return creds.jwt.secret.isPresent() || creds.jwt.secretProvider.key.isPresent() || creds.jwt.key.isPresent()
|| creds.jwt.keyFile.isPresent() || creds.jwt.keyStoreFile.isPresent();
}
public static boolean isClientSecretPostAuthRequired(Credentials creds) {
return (creds.clientSecret.value.isPresent() || creds.clientSecret.provider.key.isPresent())
&& clientSecretMethod(creds) == Secret.Method.POST;
}
public static boolean isClientSecretPostJwtAuthRequired(Credentials creds) {
return clientSecretMethod(creds) == Secret.Method.POST_JWT && isClientJwtAuthRequired(creds);
}
public static boolean isJwtAssertion(Credentials creds) {
return creds.getJwt().isAssertion();
}
public static String clientSecret(Credentials creds) {
return creds.secret.orElse(creds.clientSecret.value.orElseGet(fromCredentialsProvider(creds.clientSecret.provider)));
}
public static String jwtSecret(Credentials creds) {
return creds.jwt.secret.orElseGet(fromCredentialsProvider(creds.jwt.secretProvider));
}
public static String getClientOrJwtSecret(Credentials creds) {
LOG.debug("Trying to get the configured client secret");
String encSecret = clientSecret(creds);
if (encSecret == null) {
LOG.debug("Client secret is not configured, "
+ "trying to get the configured 'client_jwt_secret' secret");
encSecret = jwtSecret(creds);
}
return encSecret;
}
public static Secret.Method clientSecretMethod(Credentials creds) {
return creds.clientSecret.method.orElseGet(() -> Secret.Method.BASIC);
}
private static Supplier<? extends String> fromCredentialsProvider(Provider provider) {
return new Supplier<String>() {
@Override
public String get() {
if (provider.key.isPresent()) {
String providerName = provider.name.orElse(null);
String keyringName = provider.keyringName.orElse(null);
CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(providerName);
return credentialsProvider.getCredentials(keyringName).get(provider.key.get());
}
return null;
}
};
}
public static Key clientJwtKey(Credentials creds) {
if (creds.jwt.secret.isPresent() || creds.jwt.secretProvider.key.isPresent()) {
return KeyUtils
.createSecretKeyFromSecret(jwtSecret(creds));
} else {
Key key = null;
try {
if (creds.jwt.getKey().isPresent()) {
key = KeyUtils.tryAsPemSigningPrivateKey(creds.jwt.getKey().get(),
getSignatureAlgorithm(creds, SignatureAlgorithm.RS256));
} else if (creds.jwt.getKeyFile().isPresent()) {
key = KeyUtils.readSigningKey(creds.jwt.getKeyFile().get(), creds.jwt.keyId.orElse(null),
getSignatureAlgorithm(creds, SignatureAlgorithm.RS256));
} else if (creds.jwt.keyStoreFile.isPresent()) {
KeyStore ks = KeyStore.getInstance("JKS");
InputStream is = ResourceUtils.getResourceStream(creds.jwt.keyStoreFile.get());
if (creds.jwt.keyStorePassword.isPresent()) {
ks.load(is, creds.jwt.keyStorePassword.get().toCharArray());
} else {
ks.load(is, null);
}
if (creds.jwt.keyPassword.isPresent()) {
key = ks.getKey(creds.jwt.keyId.get(), creds.jwt.keyPassword.get().toCharArray());
} else {
throw new ConfigurationException(
"When using a key store, the `quarkus.oidc-client.credentials.jwt.key-password` property must be set");
}
}
} catch (Exception ex) {
throw new ConfigurationException("Key can not be loaded", ex);
}
if (key == null) {
throw new ConfigurationException("Key is null");
}
return key;
}
}
public static String signJwtWithKey(OidcCommonConfig oidcConfig, String tokenRequestUri, Key key) {
JwtSignatureBuilder builder = Jwt
.claims(additionalClaims(oidcConfig.credentials.jwt.getClaims()))
.issuer(oidcConfig.credentials.jwt.issuer.orElse(oidcConfig.clientId.get()))
.subject(oidcConfig.credentials.jwt.subject.orElse(oidcConfig.clientId.get()))
.audience(oidcConfig.credentials.jwt.getAudience().isPresent()
? removeLastPathSeparator(oidcConfig.credentials.jwt.getAudience().get())
: tokenRequestUri)
.expiresIn(oidcConfig.credentials.jwt.lifespan)
.jws();
if (oidcConfig.credentials.jwt.getTokenKeyId().isPresent()) {
builder.keyId(oidcConfig.credentials.jwt.getTokenKeyId().get());
}
SignatureAlgorithm signatureAlgorithm = getSignatureAlgorithm(oidcConfig.credentials, null);
if (signatureAlgorithm != null) {
builder.algorithm(signatureAlgorithm);
}
if (key instanceof SecretKey) {
return builder.sign((SecretKey) key);
} else {
return builder.sign((PrivateKey) key);
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private static Map<String, Object> additionalClaims(Map<String, String> claims) {
return (Map) claims;
}
private static SignatureAlgorithm getSignatureAlgorithm(Credentials credentials, SignatureAlgorithm defaultAlgorithm) {
if (credentials.jwt.getSignatureAlgorithm().isPresent()) {
try {
return SignatureAlgorithm.fromAlgorithm(credentials.jwt.getSignatureAlgorithm().get());
} catch (Exception ex) {
throw new ConfigurationException("Unsupported signature algorithm");
}
} else {
return defaultAlgorithm;
}
}
public static void verifyConfigurationId(String defaultId, String configKey, Optional<String> configId) {
if (configKey.equals(defaultId)) {
throw new ConfigurationException("configuration id '" + configKey + "' duplicates the default configuration id");
}
if (configId.isPresent() && !configKey.equals(configId.get())) {
throw new ConfigurationException("Configuration has 2 different id values: '"
+ configKey + "' and '" + configId.get() + "'");
}
}
public static String initClientSecretBasicAuth(OidcCommonConfig oidcConfig) {
if (isClientSecretBasicAuthRequired(oidcConfig.credentials)) {
return basicSchemeValue(oidcConfig.getClientId().get(), clientSecret(oidcConfig.credentials));
}
return null;
}
public static String basicSchemeValue(String name, String secret) {
return OidcConstants.BASIC_SCHEME + " "
+ Base64.getEncoder().encodeToString((name + ":" + secret).getBytes(StandardCharsets.UTF_8));
}
public static Key initClientJwtKey(OidcCommonConfig oidcConfig) {
if (isClientJwtAuthRequired(oidcConfig.credentials)) {
return clientJwtKey(oidcConfig.credentials);
}
return null;
}
public static Predicate<? super Throwable> oidcEndpointNotAvailable() {
return t -> (t instanceof ConnectException
|| (t instanceof OidcEndpointAccessException && ((OidcEndpointAccessException) t).getErrorStatus() == 404));
}
public static Uni<JsonObject> discoverMetadata(WebClient client, Map<OidcEndpoint.Type, List<OidcRequestFilter>> filters,
OidcRequestContextProperties contextProperties, String authServerUrl,
long connectionDelayInMillisecs, Vertx vertx, boolean blockingDnsLookup) {
final String discoveryUrl = getDiscoveryUri(authServerUrl);
HttpRequest<Buffer> request = client.getAbs(discoveryUrl);
if (!filters.isEmpty()) {
Map<String, Object> newProperties = contextProperties == null ? new HashMap<>()
: new HashMap<>(contextProperties.getAll());
newProperties.put(OidcRequestContextProperties.DISCOVERY_ENDPOINT, discoveryUrl);
OidcRequestContextProperties requestProps = new OidcRequestContextProperties(newProperties);
for (OidcRequestFilter filter : getMatchingOidcRequestFilters(filters, OidcEndpoint.Type.DISCOVERY)) {
filter.filter(request, null, requestProps);
}
}
return sendRequest(vertx, request, blockingDnsLookup).onItem().transform(resp -> {
if (resp.statusCode() == 200) {
return resp.bodyAsJsonObject();
} else {
String errorMessage = resp.bodyAsString();
if (errorMessage != null && !errorMessage.isEmpty()) {
LOG.warnf("Discovery request %s has failed, status code: %d, error message: %s", discoveryUrl,
resp.statusCode(), errorMessage);
} else {
LOG.warnf("Discovery request %s has failed, status code: %d", discoveryUrl, resp.statusCode());
}
throw new OidcEndpointAccessException(resp.statusCode());
}
}).onFailure(oidcEndpointNotAvailable())
.retry()
.withBackOff(CONNECTION_BACKOFF_DURATION, CONNECTION_BACKOFF_DURATION)
.expireIn(connectionDelayInMillisecs)
.onFailure().transform(t -> {
LOG.warn("OIDC Server is not available:", t.getCause() != null ? t.getCause() : t);
return new RuntimeException("OIDC Server is not available");
});
}
public static String getDiscoveryUri(String authServerUrl) {
return authServerUrl + OidcConstants.WELL_KNOWN_CONFIGURATION;
}
private static byte[] getFileContent(Path path) throws IOException {
byte[] data;
final InputStream resource = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(ClassPathUtils.toResourceName(path));
if (resource != null) {
try (InputStream is = resource) {
data = doRead(is);
}
} else {
try (InputStream is = Files.newInputStream(path)) {
data = doRead(is);
}
}
return data;
}
private static byte[] doRead(InputStream is) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buf = new byte[1024];
int r;
while ((r = is.read(buf)) > 0) {
out.write(buf, 0, r);
}
return out.toByteArray();
}
public static Map<OidcEndpoint.Type, List<OidcRequestFilter>> getOidcRequestFilters() {
ArcContainer container = Arc.container();
if (container != null) {
Map<OidcEndpoint.Type, List<OidcRequestFilter>> map = new HashMap<>();
for (OidcRequestFilter filter : container.listAll(OidcRequestFilter.class).stream().map(handle -> handle.get())
.collect(Collectors.toList())) {
OidcEndpoint endpoint = ClientProxy.unwrap(filter).getClass().getAnnotation(OidcEndpoint.class);
if (endpoint != null) {
for (OidcEndpoint.Type type : endpoint.value()) {
map.computeIfAbsent(type, k -> new ArrayList<OidcRequestFilter>()).add(filter);
}
} else {
map.computeIfAbsent(OidcEndpoint.Type.ALL, k -> new ArrayList<OidcRequestFilter>()).add(filter);
}
}
return map;
}
return Map.of();
}
public static List<OidcRequestFilter> getMatchingOidcRequestFilters(Map<OidcEndpoint.Type, List<OidcRequestFilter>> filters,
OidcEndpoint.Type type) {
List<OidcRequestFilter> typeSpecific = filters.get(type);
List<OidcRequestFilter> all = filters.get(OidcEndpoint.Type.ALL);
if (typeSpecific == null && all == null) {
return List.of();
}
if (typeSpecific != null && all == null) {
return typeSpecific;
} else if (typeSpecific == null && all != null) {
return all;
} else {
List<OidcRequestFilter> combined = new ArrayList<>(typeSpecific.size() + all.size());
combined.addAll(typeSpecific);
combined.addAll(all);
return combined;
}
}
public static Uni<HttpResponse<Buffer>> sendRequest(io.vertx.core.Vertx vertx, HttpRequest<Buffer> request,
boolean blockingDnsLookup) {
if (blockingDnsLookup) {
return sendRequest(new Vertx(vertx), request, true);
} else {
return request.send();
}
}
public static Uni<HttpResponse<Buffer>> sendRequest(Vertx vertx, HttpRequest<Buffer> request, boolean blockingDnsLookup) {
if (blockingDnsLookup) {
return vertx.executeBlocking(new Callable<Void>() {
@Override
public Void call() {
try {
InetAddress.getByName(request.host());
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
return null;
}
}).flatMap(new Function<Void, Uni<? extends HttpResponse<Buffer>>>() {
@Override
public Uni<? extends HttpResponse<Buffer>> apply(Void unused) {
return request.send();
}
});
} else {
return request.send();
}
}
}
|
Is it intended that we neglect the non-record type member types in the union types? If so shall we clearly mention that in the documentations?
|
public static String getModifiedSignature(DocumentServiceContext context, String signature) {
Matcher matcher = TYPE_NAME_DECOMPOSE_PATTERN.matcher(signature);
while (matcher.find()) {
String orgName = matcher.group(1);
String moduleName = matcher.group(2);
String matchedString = matcher.group();
String modulePrefix = getModulePrefix(context, orgName, moduleName);
String replaceText = modulePrefix.isEmpty() ? matchedString + Names.VERSION_SEPARATOR : matchedString;
signature = signature.replace(replaceText, modulePrefix);
}
return signature;
}
|
}
|
public static String getModifiedSignature(DocumentServiceContext context, String signature) {
Matcher matcher = TYPE_NAME_DECOMPOSE_PATTERN.matcher(signature);
while (matcher.find()) {
String orgName = matcher.group(1);
String moduleName = matcher.group(2);
String matchedString = matcher.group();
String modulePrefix = getModulePrefix(context, orgName, moduleName);
String replaceText = modulePrefix.isEmpty() ? matchedString + Names.VERSION_SEPARATOR : matchedString;
signature = signature.replace(replaceText, modulePrefix);
}
return signature;
}
|
class CommonUtil {
public static final String MD_LINE_SEPARATOR = " " + System.lineSeparator();
public static final String LINE_SEPARATOR = System.lineSeparator();
public static final String FILE_SEPARATOR = File.separator;
public static final Pattern MD_NEW_LINE_PATTERN = Pattern.compile("\\s\\s\\r\\n?|\\s\\s\\n|\\r\\n?|\\n");
public static final String BALLERINA_HOME;
public static final boolean COMPILE_OFFLINE;
public static final String BALLERINA_CMD;
public static final String URI_SCHEME_BALA = "bala";
public static final String URI_SCHEME_EXPR = "expr";
public static final String URI_SCHEME_FILE = "file";
public static final String LANGUAGE_ID_BALLERINA = "ballerina";
public static final String LANGUAGE_ID_TOML = "toml";
public static final String MARKDOWN_MARKUP_KIND = "markdown";
public static final String BALLERINA_ORG_NAME = "ballerina";
public static final String SDK_VERSION = System.getProperty("ballerina.version");
public static final String EXPR_SCHEME = "expr";
public static final List<String> PRE_DECLARED_LANG_LIBS = Arrays.asList("lang.boolean", "lang.decimal",
"lang.error", "lang.float", "lang.future", "lang.int", "lang.map", "lang.object", "lang.stream",
"lang.string", "lang.table", "lang.transaction", "lang.typedesc", "lang.xml");
public static final List<String> BALLERINA_KEYWORDS = SyntaxInfo.keywords();
public static final Set<SyntaxKind> QUALIFIER_KINDS = Set.of(SyntaxKind.SERVICE_KEYWORD,
SyntaxKind.CLIENT_KEYWORD, SyntaxKind.ISOLATED_KEYWORD, SyntaxKind.TRANSACTIONAL_KEYWORD,
SyntaxKind.PUBLIC_KEYWORD, SyntaxKind.PRIVATE_KEYWORD);
public static final String SELF_KW = "self";
private static final Pattern TYPE_NAME_DECOMPOSE_PATTERN = Pattern.compile("([\\w_.]*)/([\\w._]*):([\\w.-]*)");
static {
BALLERINA_HOME = System.getProperty("ballerina.home");
String onlineCompilation = System.getProperty("ls.compilation.online");
COMPILE_OFFLINE = !Boolean.parseBoolean(onlineCompilation);
BALLERINA_CMD = BALLERINA_HOME + File.separator + "bin" + File.separator + "bal" +
(SystemUtils.IS_OS_WINDOWS ? ".bat" : "");
}
private CommonUtil() {
}
/**
* Convert the syntax-node line range into a lsp4j range.
*
* @param lineRange - line range
* @return {@link Range} converted range
*/
public static Range toRange(LineRange lineRange) {
return new Range(toPosition(lineRange.startLine()), toPosition(lineRange.endLine()));
}
/**
* Converts syntax-node line position into a lsp4j position.
*
* @param linePosition - line position
* @return {@link Position} converted position
*/
public static Position toPosition(LinePosition linePosition) {
return new Position(linePosition.line(), linePosition.offset());
}
/**
* Get the text edit for an auto import statement.
* Here we do not check whether the package is not already imported or a predeclared lang-lib, Particular
* check should be done before usage
*
* @param orgName package org name
* @param pkgName package name
* @param context Language server context
* @return {@link List} List of Text Edits to apply
*/
public static List<TextEdit> getAutoImportTextEdits(@Nonnull String orgName, String pkgName,
DocumentServiceContext context) {
Map<ImportDeclarationNode, ModuleSymbol> currentDocImports = context.currentDocImportsMap();
Optional<ImportDeclarationNode> last = CommonUtil.getLastItem(new ArrayList<>(currentDocImports.keySet()));
int endLine = last.map(node -> node.lineRange().endLine().line()).orElse(0);
Position start = new Position(endLine, 0);
String importStatement = ItemResolverConstants.IMPORT + " "
+ (!orgName.isEmpty() ? orgName + SLASH_KEYWORD_KEY : orgName)
+ pkgName + SEMI_COLON_SYMBOL_KEY
+ CommonUtil.LINE_SEPARATOR;
return Collections.singletonList(new TextEdit(new Range(start, start), importStatement));
}
/**
* Get the text edit for an auto import statement.
* Here we do not check whether the package is not already imported or a predeclared lang-lib, Particular
* check should be done before usage
*
* @param orgName package org name
* @param pkgName package name
* @param alias import alias
* @param context Language server context
* @return {@link List} List of Text Edits to apply
*/
public static List<TextEdit> getAutoImportTextEdits(@Nonnull String orgName, String pkgName, String alias,
DocumentServiceContext context) {
Map<ImportDeclarationNode, ModuleSymbol> currentDocImports = context.currentDocImportsMap();
Optional<ImportDeclarationNode> last = CommonUtil.getLastItem(new ArrayList<>(currentDocImports.keySet()));
int endLine = last.map(node -> node.lineRange().endLine().line()).orElse(0);
Position start = new Position(endLine, 0);
StringBuilder builder = new StringBuilder(ItemResolverConstants.IMPORT + " "
+ (!orgName.isEmpty() ? orgName + SLASH_KEYWORD_KEY : orgName)
+ pkgName);
if (!alias.isEmpty()) {
builder.append(" as ").append(alias);
}
builder.append(SEMI_COLON_SYMBOL_KEY).append(CommonUtil.LINE_SEPARATOR);
return Collections.singletonList(new TextEdit(new Range(start, start), builder.toString()));
}
/**
* Get the default value for the given BType.
*
* @param bType Type descriptor to get the default value
* @param offset snippet offset.
* @return {@link String} Default value as a String
*/
public static Optional<String> getDefaultValueForType(TypeSymbol bType, int offset) {
return getDefaultValueForType(bType, true, offset);
}
public static Optional<String> getDefaultPlaceholderForType(TypeSymbol bType) {
return getDefaultValueForType(bType)
.map(defaultValue -> defaultValue.replace("}", "\\}
|
class CommonUtil {
public static final String MD_LINE_SEPARATOR = " " + System.lineSeparator();
public static final String LINE_SEPARATOR = System.lineSeparator();
public static final String FILE_SEPARATOR = File.separator;
public static final Pattern MD_NEW_LINE_PATTERN = Pattern.compile("\\s\\s\\r\\n?|\\s\\s\\n|\\r\\n?|\\n");
public static final String BALLERINA_HOME;
public static final boolean COMPILE_OFFLINE;
public static final String BALLERINA_CMD;
public static final String URI_SCHEME_BALA = "bala";
public static final String URI_SCHEME_EXPR = "expr";
public static final String URI_SCHEME_FILE = "file";
public static final String LANGUAGE_ID_BALLERINA = "ballerina";
public static final String LANGUAGE_ID_TOML = "toml";
public static final String MARKDOWN_MARKUP_KIND = "markdown";
public static final String BALLERINA_ORG_NAME = "ballerina";
public static final String SDK_VERSION = System.getProperty("ballerina.version");
public static final String EXPR_SCHEME = "expr";
public static final List<String> PRE_DECLARED_LANG_LIBS = Arrays.asList("lang.boolean", "lang.decimal",
"lang.error", "lang.float", "lang.future", "lang.int", "lang.map", "lang.object", "lang.stream",
"lang.string", "lang.table", "lang.transaction", "lang.typedesc", "lang.xml");
public static final List<String> BALLERINA_KEYWORDS = SyntaxInfo.keywords();
public static final Set<SyntaxKind> QUALIFIER_KINDS = Set.of(SyntaxKind.SERVICE_KEYWORD,
SyntaxKind.CLIENT_KEYWORD, SyntaxKind.ISOLATED_KEYWORD, SyntaxKind.TRANSACTIONAL_KEYWORD,
SyntaxKind.PUBLIC_KEYWORD, SyntaxKind.PRIVATE_KEYWORD);
public static final String SELF_KW = "self";
private static final Pattern TYPE_NAME_DECOMPOSE_PATTERN = Pattern.compile("([\\w_.]*)/([\\w._]*):([\\w.-]*)");
static {
BALLERINA_HOME = System.getProperty("ballerina.home");
String onlineCompilation = System.getProperty("ls.compilation.online");
COMPILE_OFFLINE = !Boolean.parseBoolean(onlineCompilation);
BALLERINA_CMD = BALLERINA_HOME + File.separator + "bin" + File.separator + "bal" +
(SystemUtils.IS_OS_WINDOWS ? ".bat" : "");
}
private CommonUtil() {
}
/**
* Convert the syntax-node line range into a lsp4j range.
*
* @param lineRange - line range
* @return {@link Range} converted range
*/
public static Range toRange(LineRange lineRange) {
return new Range(toPosition(lineRange.startLine()), toPosition(lineRange.endLine()));
}
/**
* Convert the syntax-node line position into a lsp4j range.
*
* @param linePosition - line position.
* @return {@link Range} converted range
*/
public static Range toRange(LinePosition linePosition) {
return new Range(toPosition(linePosition), toPosition(linePosition));
}
/**
* Converts syntax-node line position into a lsp4j position.
*
* @param linePosition - line position
* @return {@link Position} converted position
*/
public static Position toPosition(LinePosition linePosition) {
return new Position(linePosition.line(), linePosition.offset());
}
/**
* Get the text edit for an auto import statement.
* Here we do not check whether the package is not already imported or a predeclared lang-lib, Particular
* check should be done before usage
*
* @param orgName package org name
* @param pkgName package name
* @param context Language server context
* @return {@link List} List of Text Edits to apply
*/
public static List<TextEdit> getAutoImportTextEdits(@Nonnull String orgName, String pkgName,
DocumentServiceContext context) {
Map<ImportDeclarationNode, ModuleSymbol> currentDocImports = context.currentDocImportsMap();
Optional<ImportDeclarationNode> last = CommonUtil.getLastItem(new ArrayList<>(currentDocImports.keySet()));
int endLine = last.map(node -> node.lineRange().endLine().line()).orElse(0);
Position start = new Position(endLine, 0);
String importStatement = ItemResolverConstants.IMPORT + " "
+ (!orgName.isEmpty() ? orgName + SLASH_KEYWORD_KEY : orgName)
+ pkgName + SEMI_COLON_SYMBOL_KEY
+ CommonUtil.LINE_SEPARATOR;
return Collections.singletonList(new TextEdit(new Range(start, start), importStatement));
}
/**
* Get the text edit for an auto import statement.
* Here we do not check whether the package is not already imported or a predeclared lang-lib, Particular
* check should be done before usage
*
* @param orgName package org name
* @param pkgName package name
* @param alias import alias
* @param context Language server context
* @return {@link List} List of Text Edits to apply
*/
public static List<TextEdit> getAutoImportTextEdits(@Nonnull String orgName, String pkgName, String alias,
DocumentServiceContext context) {
Map<ImportDeclarationNode, ModuleSymbol> currentDocImports = context.currentDocImportsMap();
Optional<ImportDeclarationNode> last = CommonUtil.getLastItem(new ArrayList<>(currentDocImports.keySet()));
int endLine = last.map(node -> node.lineRange().endLine().line()).orElse(0);
Position start = new Position(endLine, 0);
StringBuilder builder = new StringBuilder(ItemResolverConstants.IMPORT + " "
+ (!orgName.isEmpty() ? orgName + SLASH_KEYWORD_KEY : orgName)
+ pkgName);
if (!alias.isEmpty()) {
builder.append(" as ").append(alias);
}
builder.append(SEMI_COLON_SYMBOL_KEY).append(CommonUtil.LINE_SEPARATOR);
return Collections.singletonList(new TextEdit(new Range(start, start), builder.toString()));
}
/**
* Get the default value for the given BType.
*
* @param bType Type descriptor to get the default value
* @param offset snippet offset.
* @return {@link String} Default value as a String
*/
public static Optional<String> getDefaultValueForType(TypeSymbol bType, int offset) {
return getDefaultValueForType(bType, true, offset);
}
public static Optional<String> getDefaultPlaceholderForType(TypeSymbol bType) {
return getDefaultValueForType(bType)
.map(defaultValue -> defaultValue.replace("}", "\\}
|
Thanks, please consider also moving the DEV mode test as then you can remove the hibernate dependency @gsmet
|
public AgroalDataSource doCreateDataSource(String dataSourceName) {
if (!dataSourceSupport.entries.containsKey(dataSourceName)) {
throw new IllegalArgumentException("No datasource named '" + dataSourceName + "' exists");
}
DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig = getDataSourceJdbcBuildTimeConfig(dataSourceName);
DataSourceRuntimeConfig dataSourceRuntimeConfig = getDataSourceRuntimeConfig(dataSourceName);
DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig = getDataSourceJdbcRuntimeConfig(dataSourceName);
DataSourceSupport.Entry matchingSupportEntry = dataSourceSupport.entries.get(dataSourceName);
if (!dataSourceJdbcRuntimeConfig.url.isPresent()) {
return new UnconfiguredDataSource(
DataSourceUtil.dataSourcePropertyKey(dataSourceName, "jdbc.url") + " has not been defined");
}
loadDriversInTCCL();
String resolvedDriverClass = matchingSupportEntry.resolvedDriverClass;
Class<?> driver;
try {
driver = Class.forName(resolvedDriverClass, true, Thread.currentThread().getContextClassLoader());
} catch (ClassNotFoundException e) {
throw new RuntimeException(
"Unable to load the datasource driver " + resolvedDriverClass + " for datasource " + dataSourceName, e);
}
String jdbcUrl = dataSourceJdbcRuntimeConfig.url.get();
if (dataSourceJdbcBuildTimeConfig.tracing) {
boolean tracingEnabled = dataSourceJdbcRuntimeConfig.tracing.enabled.orElse(dataSourceJdbcBuildTimeConfig.tracing);
if (tracingEnabled) {
String rootTracingUrl = !jdbcUrl.startsWith(JDBC_TRACING_URL_PREFIX)
? jdbcUrl.replace(JDBC_URL_PREFIX, JDBC_TRACING_URL_PREFIX)
: jdbcUrl;
StringBuilder tracingURL = new StringBuilder(rootTracingUrl);
if (dataSourceJdbcRuntimeConfig.tracing.traceWithActiveSpanOnly) {
if (!tracingURL.toString().contains("?")) {
tracingURL.append("?");
}
tracingURL.append("traceWithActiveSpanOnly=true");
}
if (dataSourceJdbcRuntimeConfig.tracing.ignoreForTracing.isPresent()) {
if (!tracingURL.toString().contains("?")) {
tracingURL.append("?");
}
Arrays.stream(dataSourceJdbcRuntimeConfig.tracing.ignoreForTracing.get().split(";"))
.filter(query -> !query.isEmpty())
.forEach(query -> tracingURL.append("ignoreForTracing=")
.append(query.replaceAll("\"", "\\\""))
.append(";"));
}
jdbcUrl = tracingURL.toString();
driver = null;
}
}
String resolvedDbKind = matchingSupportEntry.resolvedDbKind;
AgroalConnectionConfigurer agroalConnectionConfigurer = Arc.container()
.instance(AgroalConnectionConfigurer.class, new JdbcDriverLiteral(resolvedDbKind))
.orElse(new UnknownDbAgroalConnectionConfigurer());
AgroalDataSourceConfigurationSupplier dataSourceConfiguration = new AgroalDataSourceConfigurationSupplier();
if (!dataSourceJdbcRuntimeConfig.poolingEnabled) {
dataSourceConfiguration.dataSourceImplementation(DataSourceImplementation.AGROAL_POOLLESS);
}
AgroalConnectionPoolConfigurationSupplier poolConfiguration = dataSourceConfiguration.connectionPoolConfiguration();
AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration = poolConfiguration
.connectionFactoryConfiguration();
boolean mpMetricsPresent = dataSourceSupport.mpMetricsPresent;
applyNewConfiguration(dataSourceName, dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration,
driver, jdbcUrl,
dataSourceJdbcBuildTimeConfig, dataSourceRuntimeConfig, dataSourceJdbcRuntimeConfig, transactionRuntimeConfig,
mpMetricsPresent);
if (dataSourceSupport.disableSslSupport) {
agroalConnectionConfigurer.disableSslSupport(resolvedDbKind, dataSourceConfiguration);
}
try {
Class.forName("io.netty.util.concurrent.FastThreadLocal", true, Thread.currentThread().getContextClassLoader());
dataSourceConfiguration.connectionPoolConfiguration().connectionCache(new QuarkusNettyConnectionCache());
} catch (ClassNotFoundException e) {
dataSourceConfiguration.connectionPoolConfiguration().connectionCache(new QuarkusSimpleConnectionCache());
}
agroalConnectionConfigurer.setExceptionSorter(resolvedDbKind, dataSourceConfiguration);
AgroalDataSourceConfiguration agroalConfiguration = dataSourceConfiguration.get();
AgroalDataSource dataSource = new io.agroal.pool.DataSource(agroalConfiguration,
new AgroalEventLoggingListener(dataSourceName,
agroalConfiguration.connectionPoolConfiguration()
.transactionRequirement() == TransactionRequirement.WARN));
log.debugv("Started datasource {0} connected to {1}", dataSourceName,
agroalConfiguration.connectionPoolConfiguration().connectionFactoryConfiguration().jdbcUrl());
Collection<AgroalPoolInterceptor> interceptorList = agroalPoolInterceptors
.select(dataSourceName == null || DataSourceUtil.isDefault(dataSourceName)
? Default.Literal.INSTANCE
: new DataSource.DataSourceLiteral(dataSourceName))
.stream().collect(Collectors.toList());
if (!interceptorList.isEmpty()) {
dataSource.setPoolInterceptors(interceptorList);
}
if (dataSourceJdbcRuntimeConfig.telemetry.orElse(dataSourceJdbcBuildTimeConfig.telemetry)
&& agroalOpenTelemetryWrapper.isResolvable()) {
dataSource = agroalOpenTelemetryWrapper.get().apply(dataSource);
}
return dataSource;
}
|
if (dataSourceJdbcRuntimeConfig.telemetry.orElse(dataSourceJdbcBuildTimeConfig.telemetry)
|
public AgroalDataSource doCreateDataSource(String dataSourceName) {
if (!dataSourceSupport.entries.containsKey(dataSourceName)) {
throw new IllegalArgumentException("No datasource named '" + dataSourceName + "' exists");
}
DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig = getDataSourceJdbcBuildTimeConfig(dataSourceName);
DataSourceRuntimeConfig dataSourceRuntimeConfig = getDataSourceRuntimeConfig(dataSourceName);
DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig = getDataSourceJdbcRuntimeConfig(dataSourceName);
DataSourceSupport.Entry matchingSupportEntry = dataSourceSupport.entries.get(dataSourceName);
if (!dataSourceJdbcRuntimeConfig.url.isPresent()) {
return new UnconfiguredDataSource(
DataSourceUtil.dataSourcePropertyKey(dataSourceName, "jdbc.url") + " has not been defined");
}
loadDriversInTCCL();
String resolvedDriverClass = matchingSupportEntry.resolvedDriverClass;
Class<?> driver;
try {
driver = Class.forName(resolvedDriverClass, true, Thread.currentThread().getContextClassLoader());
} catch (ClassNotFoundException e) {
throw new RuntimeException(
"Unable to load the datasource driver " + resolvedDriverClass + " for datasource " + dataSourceName, e);
}
String jdbcUrl = dataSourceJdbcRuntimeConfig.url.get();
if (dataSourceJdbcBuildTimeConfig.tracing) {
boolean tracingEnabled = dataSourceJdbcRuntimeConfig.tracing.enabled.orElse(dataSourceJdbcBuildTimeConfig.tracing);
if (tracingEnabled) {
String rootTracingUrl = !jdbcUrl.startsWith(JDBC_TRACING_URL_PREFIX)
? jdbcUrl.replace(JDBC_URL_PREFIX, JDBC_TRACING_URL_PREFIX)
: jdbcUrl;
StringBuilder tracingURL = new StringBuilder(rootTracingUrl);
if (dataSourceJdbcRuntimeConfig.tracing.traceWithActiveSpanOnly) {
if (!tracingURL.toString().contains("?")) {
tracingURL.append("?");
}
tracingURL.append("traceWithActiveSpanOnly=true");
}
if (dataSourceJdbcRuntimeConfig.tracing.ignoreForTracing.isPresent()) {
if (!tracingURL.toString().contains("?")) {
tracingURL.append("?");
}
Arrays.stream(dataSourceJdbcRuntimeConfig.tracing.ignoreForTracing.get().split(";"))
.filter(query -> !query.isEmpty())
.forEach(query -> tracingURL.append("ignoreForTracing=")
.append(query.replaceAll("\"", "\\\""))
.append(";"));
}
jdbcUrl = tracingURL.toString();
driver = null;
}
}
String resolvedDbKind = matchingSupportEntry.resolvedDbKind;
AgroalConnectionConfigurer agroalConnectionConfigurer = Arc.container()
.instance(AgroalConnectionConfigurer.class, new JdbcDriverLiteral(resolvedDbKind))
.orElse(new UnknownDbAgroalConnectionConfigurer());
AgroalDataSourceConfigurationSupplier dataSourceConfiguration = new AgroalDataSourceConfigurationSupplier();
if (!dataSourceJdbcRuntimeConfig.poolingEnabled) {
dataSourceConfiguration.dataSourceImplementation(DataSourceImplementation.AGROAL_POOLLESS);
}
AgroalConnectionPoolConfigurationSupplier poolConfiguration = dataSourceConfiguration.connectionPoolConfiguration();
AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration = poolConfiguration
.connectionFactoryConfiguration();
boolean mpMetricsPresent = dataSourceSupport.mpMetricsPresent;
applyNewConfiguration(dataSourceName, dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration,
driver, jdbcUrl,
dataSourceJdbcBuildTimeConfig, dataSourceRuntimeConfig, dataSourceJdbcRuntimeConfig, transactionRuntimeConfig,
mpMetricsPresent);
if (dataSourceSupport.disableSslSupport) {
agroalConnectionConfigurer.disableSslSupport(resolvedDbKind, dataSourceConfiguration);
}
try {
Class.forName("io.netty.util.concurrent.FastThreadLocal", true, Thread.currentThread().getContextClassLoader());
dataSourceConfiguration.connectionPoolConfiguration().connectionCache(new QuarkusNettyConnectionCache());
} catch (ClassNotFoundException e) {
dataSourceConfiguration.connectionPoolConfiguration().connectionCache(new QuarkusSimpleConnectionCache());
}
agroalConnectionConfigurer.setExceptionSorter(resolvedDbKind, dataSourceConfiguration);
AgroalDataSourceConfiguration agroalConfiguration = dataSourceConfiguration.get();
AgroalDataSource dataSource = new io.agroal.pool.DataSource(agroalConfiguration,
new AgroalEventLoggingListener(dataSourceName,
agroalConfiguration.connectionPoolConfiguration()
.transactionRequirement() == TransactionRequirement.WARN));
log.debugv("Started datasource {0} connected to {1}", dataSourceName,
agroalConfiguration.connectionPoolConfiguration().connectionFactoryConfiguration().jdbcUrl());
Collection<AgroalPoolInterceptor> interceptorList = agroalPoolInterceptors
.select(dataSourceName == null || DataSourceUtil.isDefault(dataSourceName)
? Default.Literal.INSTANCE
: new DataSource.DataSourceLiteral(dataSourceName))
.stream().collect(Collectors.toList());
if (!interceptorList.isEmpty()) {
dataSource.setPoolInterceptors(interceptorList);
}
if (dataSourceJdbcBuildTimeConfig.telemetry && dataSourceJdbcRuntimeConfig.telemetry.orElse(true)) {
dataSource = agroalOpenTelemetryWrapper.get().apply(dataSource);
}
return dataSource;
}
|
class DataSources {
private static final Logger log = Logger.getLogger(DataSources.class.getName());
public static final String TRACING_DRIVER_CLASSNAME = "io.opentracing.contrib.jdbc.TracingDriver";
private static final String JDBC_URL_PREFIX = "jdbc:";
private static final String JDBC_TRACING_URL_PREFIX = "jdbc:tracing:";
private final DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig;
private final DataSourcesRuntimeConfig dataSourcesRuntimeConfig;
private final DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig;
private final DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig;
private final TransactionManagerConfiguration transactionRuntimeConfig;
private final TransactionManager transactionManager;
private final XAResourceRecoveryRegistry xaResourceRecoveryRegistry;
private final TransactionSynchronizationRegistry transactionSynchronizationRegistry;
private final DataSourceSupport dataSourceSupport;
private final Instance<AgroalPoolInterceptor> agroalPoolInterceptors;
private final Instance<AgroalOpenTelemetryWrapper> agroalOpenTelemetryWrapper;
private final ConcurrentMap<String, AgroalDataSource> dataSources = new ConcurrentHashMap<>();
public DataSources(DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig,
DataSourcesRuntimeConfig dataSourcesRuntimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig,
DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig,
TransactionManagerConfiguration transactionRuntimeConfig,
TransactionManager transactionManager,
XAResourceRecoveryRegistry xaResourceRecoveryRegistry,
TransactionSynchronizationRegistry transactionSynchronizationRegistry,
DataSourceSupport dataSourceSupport,
@Any Instance<AgroalPoolInterceptor> agroalPoolInterceptors,
Instance<AgroalOpenTelemetryWrapper> agroalOpenTelemetryWrapper) {
this.dataSourcesBuildTimeConfig = dataSourcesBuildTimeConfig;
this.dataSourcesRuntimeConfig = dataSourcesRuntimeConfig;
this.dataSourcesJdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig;
this.dataSourcesJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig;
this.transactionRuntimeConfig = transactionRuntimeConfig;
this.transactionManager = transactionManager;
this.xaResourceRecoveryRegistry = xaResourceRecoveryRegistry;
this.transactionSynchronizationRegistry = transactionSynchronizationRegistry;
this.dataSourceSupport = dataSourceSupport;
this.agroalPoolInterceptors = agroalPoolInterceptors;
this.agroalOpenTelemetryWrapper = agroalOpenTelemetryWrapper;
}
/**
* Meant to be used from recorders that create synthetic beans that need access to {@code Datasource}.
* In such using {@code Arc.container.instance(DataSource.class)} is not possible because
* {@code Datasource} is itself a synthetic bean.
* <p>
* This method relies on the fact that {@code DataSources} should - given the same input -
* always return the same {@code AgroalDataSource} no matter how many times it is invoked
* (which makes sense because {@code DataSource} is a {@code Singleton} bean).
* <p>
* This method is thread-safe
*/
public static AgroalDataSource fromName(String dataSourceName) {
return Arc.container().instance(DataSources.class).get()
.getDataSource(dataSourceName);
}
public AgroalDataSource getDataSource(String dataSourceName) {
return dataSources.computeIfAbsent(dataSourceName, new Function<String, AgroalDataSource>() {
@Override
public AgroalDataSource apply(String s) {
return doCreateDataSource(s);
}
});
}
@SuppressWarnings("resource")
private void applyNewConfiguration(String dataSourceName, AgroalDataSourceConfigurationSupplier dataSourceConfiguration,
AgroalConnectionPoolConfigurationSupplier poolConfiguration,
AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration, Class<?> driver, String jdbcUrl,
DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig, DataSourceRuntimeConfig dataSourceRuntimeConfig,
DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig, TransactionManagerConfiguration transactionRuntimeConfig,
boolean mpMetricsPresent) {
connectionFactoryConfiguration.jdbcUrl(jdbcUrl);
connectionFactoryConfiguration.connectionProviderClass(driver);
connectionFactoryConfiguration.trackJdbcResources(dataSourceJdbcRuntimeConfig.detectStatementLeaks);
if (dataSourceJdbcRuntimeConfig.transactionIsolationLevel.isPresent()) {
connectionFactoryConfiguration
.jdbcTransactionIsolation(
dataSourceJdbcRuntimeConfig.transactionIsolationLevel.get());
}
if (dataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) {
TransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager,
transactionSynchronizationRegistry, null, false,
dataSourceJdbcBuildTimeConfig.transactions == io.quarkus.agroal.runtime.TransactionIntegration.XA
&& transactionRuntimeConfig.enableRecovery
? xaResourceRecoveryRegistry
: null);
if (dataSourceJdbcBuildTimeConfig.transactions == io.quarkus.agroal.runtime.TransactionIntegration.XA
&& !transactionRuntimeConfig.enableRecovery) {
log.warnv(
"Datasource {0} enables XA but transaction recovery is not enabled. Please enable transaction recovery by setting quarkus.transaction-manager.enable-recovery=true, otherwise data may be lost if the application is terminated abruptly",
dataSourceName);
}
poolConfiguration.transactionIntegration(txIntegration);
}
if (dataSourceJdbcRuntimeConfig.newConnectionSql.isPresent()) {
connectionFactoryConfiguration.initialSql(dataSourceJdbcRuntimeConfig.newConnectionSql.get());
}
if (dataSourceJdbcBuildTimeConfig.enableMetrics.isPresent()) {
dataSourceConfiguration.metricsEnabled(dataSourceJdbcBuildTimeConfig.enableMetrics.get());
} else {
dataSourceConfiguration.metricsEnabled(dataSourcesBuildTimeConfig.metricsEnabled && mpMetricsPresent);
}
if (dataSourceRuntimeConfig.username.isPresent()) {
NamePrincipal username = new NamePrincipal(dataSourceRuntimeConfig.username.get());
connectionFactoryConfiguration
.principal(username).recoveryPrincipal(username);
}
if (dataSourceRuntimeConfig.password.isPresent()) {
SimplePassword password = new SimplePassword(dataSourceRuntimeConfig.password.get());
connectionFactoryConfiguration
.credential(password).recoveryCredential(password);
}
if (dataSourceRuntimeConfig.credentialsProvider.isPresent()) {
String beanName = dataSourceRuntimeConfig.credentialsProviderName.orElse(null);
CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName);
String name = dataSourceRuntimeConfig.credentialsProvider.get();
connectionFactoryConfiguration
.credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider));
}
for (Map.Entry<String, String> entry : dataSourceJdbcRuntimeConfig.additionalJdbcProperties.entrySet()) {
connectionFactoryConfiguration.jdbcProperty(entry.getKey(), entry.getValue());
}
poolConfiguration.minSize(dataSourceJdbcRuntimeConfig.minSize);
poolConfiguration.maxSize(dataSourceJdbcRuntimeConfig.maxSize);
if (dataSourceJdbcRuntimeConfig.initialSize.isPresent() && dataSourceJdbcRuntimeConfig.initialSize.getAsInt() > 0) {
poolConfiguration.initialSize(dataSourceJdbcRuntimeConfig.initialSize.getAsInt());
}
poolConfiguration.connectionValidator(ConnectionValidator.defaultValidator());
if (dataSourceJdbcRuntimeConfig.acquisitionTimeout.isPresent()) {
poolConfiguration.acquisitionTimeout(dataSourceJdbcRuntimeConfig.acquisitionTimeout.get());
}
if (dataSourceJdbcRuntimeConfig.backgroundValidationInterval.isPresent()) {
poolConfiguration.validationTimeout(dataSourceJdbcRuntimeConfig.backgroundValidationInterval.get());
}
if (dataSourceJdbcRuntimeConfig.foregroundValidationInterval.isPresent()) {
poolConfiguration.idleValidationTimeout(dataSourceJdbcRuntimeConfig.foregroundValidationInterval.get());
}
if (dataSourceJdbcRuntimeConfig.validationQuerySql.isPresent()) {
String validationQuery = dataSourceJdbcRuntimeConfig.validationQuerySql.get();
poolConfiguration.connectionValidator(new ConnectionValidator() {
@Override
public boolean isValid(Connection connection) {
try (Statement stmt = connection.createStatement()) {
stmt.execute(validationQuery);
return true;
} catch (Exception e) {
log.warn("Connection validation failed", e);
}
return false;
}
});
}
if (dataSourceJdbcRuntimeConfig.idleRemovalInterval.isPresent()) {
poolConfiguration.reapTimeout(dataSourceJdbcRuntimeConfig.idleRemovalInterval.get());
}
if (dataSourceJdbcRuntimeConfig.leakDetectionInterval.isPresent()) {
poolConfiguration.leakTimeout(dataSourceJdbcRuntimeConfig.leakDetectionInterval.get());
}
if (dataSourceJdbcRuntimeConfig.maxLifetime.isPresent()) {
poolConfiguration.maxLifetime(dataSourceJdbcRuntimeConfig.maxLifetime.get());
}
if (dataSourceJdbcRuntimeConfig.transactionRequirement.isPresent()) {
poolConfiguration.transactionRequirement(dataSourceJdbcRuntimeConfig.transactionRequirement.get());
}
poolConfiguration.enhancedLeakReport(dataSourceJdbcRuntimeConfig.extendedLeakReport);
poolConfiguration.flushOnClose(dataSourceJdbcRuntimeConfig.flushOnClose);
}
public DataSourceBuildTimeConfig getDataSourceBuildTimeConfig(String dataSourceName) {
if (DataSourceUtil.isDefault(dataSourceName)) {
return dataSourcesBuildTimeConfig.defaultDataSource;
}
DataSourceBuildTimeConfig namedConfig = dataSourcesBuildTimeConfig.namedDataSources.get(dataSourceName);
return namedConfig != null ? namedConfig : new DataSourceBuildTimeConfig();
}
public DataSourceJdbcBuildTimeConfig getDataSourceJdbcBuildTimeConfig(String dataSourceName) {
if (DataSourceUtil.isDefault(dataSourceName)) {
return dataSourcesJdbcBuildTimeConfig.jdbc;
}
DataSourceJdbcOuterNamedBuildTimeConfig namedOuterConfig = dataSourcesJdbcBuildTimeConfig.namedDataSources
.get(dataSourceName);
return namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcBuildTimeConfig();
}
public DataSourceRuntimeConfig getDataSourceRuntimeConfig(String dataSourceName) {
if (DataSourceUtil.isDefault(dataSourceName)) {
return dataSourcesRuntimeConfig.defaultDataSource;
}
DataSourceRuntimeConfig namedConfig = dataSourcesRuntimeConfig.namedDataSources.get(dataSourceName);
return namedConfig != null ? namedConfig : new DataSourceRuntimeConfig();
}
public DataSourceJdbcRuntimeConfig getDataSourceJdbcRuntimeConfig(String dataSourceName) {
if (DataSourceUtil.isDefault(dataSourceName)) {
return dataSourcesJdbcRuntimeConfig.jdbc;
}
DataSourceJdbcOuterNamedRuntimeConfig namedOuterConfig = dataSourcesJdbcRuntimeConfig.namedDataSources
.get(dataSourceName);
return namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcRuntimeConfig();
}
/**
* Uses the {@link ServiceLoader
* of the current {@link Thread
*/
private static void loadDriversInTCCL() {
final ServiceLoader<Driver> drivers = ServiceLoader.load(Driver.class);
final Iterator<Driver> iterator = drivers.iterator();
while (iterator.hasNext()) {
try {
iterator.next();
} catch (Throwable t) {
}
}
}
@PreDestroy
public void stop() {
for (AgroalDataSource dataSource : dataSources.values()) {
if (dataSource != null) {
dataSource.close();
}
}
}
}
|
class DataSources {
private static final Logger log = Logger.getLogger(DataSources.class.getName());
public static final String TRACING_DRIVER_CLASSNAME = "io.opentracing.contrib.jdbc.TracingDriver";
private static final String JDBC_URL_PREFIX = "jdbc:";
private static final String JDBC_TRACING_URL_PREFIX = "jdbc:tracing:";
private final DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig;
private final DataSourcesRuntimeConfig dataSourcesRuntimeConfig;
private final DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig;
private final DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig;
private final TransactionManagerConfiguration transactionRuntimeConfig;
private final TransactionManager transactionManager;
private final XAResourceRecoveryRegistry xaResourceRecoveryRegistry;
private final TransactionSynchronizationRegistry transactionSynchronizationRegistry;
private final DataSourceSupport dataSourceSupport;
private final Instance<AgroalPoolInterceptor> agroalPoolInterceptors;
private final Instance<AgroalOpenTelemetryWrapper> agroalOpenTelemetryWrapper;
private final ConcurrentMap<String, AgroalDataSource> dataSources = new ConcurrentHashMap<>();
public DataSources(DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig,
DataSourcesRuntimeConfig dataSourcesRuntimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig,
DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig,
TransactionManagerConfiguration transactionRuntimeConfig,
TransactionManager transactionManager,
XAResourceRecoveryRegistry xaResourceRecoveryRegistry,
TransactionSynchronizationRegistry transactionSynchronizationRegistry,
DataSourceSupport dataSourceSupport,
@Any Instance<AgroalPoolInterceptor> agroalPoolInterceptors,
Instance<AgroalOpenTelemetryWrapper> agroalOpenTelemetryWrapper) {
this.dataSourcesBuildTimeConfig = dataSourcesBuildTimeConfig;
this.dataSourcesRuntimeConfig = dataSourcesRuntimeConfig;
this.dataSourcesJdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig;
this.dataSourcesJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig;
this.transactionRuntimeConfig = transactionRuntimeConfig;
this.transactionManager = transactionManager;
this.xaResourceRecoveryRegistry = xaResourceRecoveryRegistry;
this.transactionSynchronizationRegistry = transactionSynchronizationRegistry;
this.dataSourceSupport = dataSourceSupport;
this.agroalPoolInterceptors = agroalPoolInterceptors;
this.agroalOpenTelemetryWrapper = agroalOpenTelemetryWrapper;
}
/**
* Meant to be used from recorders that create synthetic beans that need access to {@code Datasource}.
* In such using {@code Arc.container.instance(DataSource.class)} is not possible because
* {@code Datasource} is itself a synthetic bean.
* <p>
* This method relies on the fact that {@code DataSources} should - given the same input -
* always return the same {@code AgroalDataSource} no matter how many times it is invoked
* (which makes sense because {@code DataSource} is a {@code Singleton} bean).
* <p>
* This method is thread-safe
*/
public static AgroalDataSource fromName(String dataSourceName) {
return Arc.container().instance(DataSources.class).get()
.getDataSource(dataSourceName);
}
public AgroalDataSource getDataSource(String dataSourceName) {
return dataSources.computeIfAbsent(dataSourceName, new Function<String, AgroalDataSource>() {
@Override
public AgroalDataSource apply(String s) {
return doCreateDataSource(s);
}
});
}
@SuppressWarnings("resource")
private void applyNewConfiguration(String dataSourceName, AgroalDataSourceConfigurationSupplier dataSourceConfiguration,
AgroalConnectionPoolConfigurationSupplier poolConfiguration,
AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration, Class<?> driver, String jdbcUrl,
DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig, DataSourceRuntimeConfig dataSourceRuntimeConfig,
DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig, TransactionManagerConfiguration transactionRuntimeConfig,
boolean mpMetricsPresent) {
connectionFactoryConfiguration.jdbcUrl(jdbcUrl);
connectionFactoryConfiguration.connectionProviderClass(driver);
connectionFactoryConfiguration.trackJdbcResources(dataSourceJdbcRuntimeConfig.detectStatementLeaks);
if (dataSourceJdbcRuntimeConfig.transactionIsolationLevel.isPresent()) {
connectionFactoryConfiguration
.jdbcTransactionIsolation(
dataSourceJdbcRuntimeConfig.transactionIsolationLevel.get());
}
if (dataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) {
TransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager,
transactionSynchronizationRegistry, null, false,
dataSourceJdbcBuildTimeConfig.transactions == io.quarkus.agroal.runtime.TransactionIntegration.XA
&& transactionRuntimeConfig.enableRecovery
? xaResourceRecoveryRegistry
: null);
if (dataSourceJdbcBuildTimeConfig.transactions == io.quarkus.agroal.runtime.TransactionIntegration.XA
&& !transactionRuntimeConfig.enableRecovery) {
log.warnv(
"Datasource {0} enables XA but transaction recovery is not enabled. Please enable transaction recovery by setting quarkus.transaction-manager.enable-recovery=true, otherwise data may be lost if the application is terminated abruptly",
dataSourceName);
}
poolConfiguration.transactionIntegration(txIntegration);
}
if (dataSourceJdbcRuntimeConfig.newConnectionSql.isPresent()) {
connectionFactoryConfiguration.initialSql(dataSourceJdbcRuntimeConfig.newConnectionSql.get());
}
if (dataSourceJdbcBuildTimeConfig.enableMetrics.isPresent()) {
dataSourceConfiguration.metricsEnabled(dataSourceJdbcBuildTimeConfig.enableMetrics.get());
} else {
dataSourceConfiguration.metricsEnabled(dataSourcesBuildTimeConfig.metricsEnabled && mpMetricsPresent);
}
if (dataSourceRuntimeConfig.username.isPresent()) {
NamePrincipal username = new NamePrincipal(dataSourceRuntimeConfig.username.get());
connectionFactoryConfiguration
.principal(username).recoveryPrincipal(username);
}
if (dataSourceRuntimeConfig.password.isPresent()) {
SimplePassword password = new SimplePassword(dataSourceRuntimeConfig.password.get());
connectionFactoryConfiguration
.credential(password).recoveryCredential(password);
}
if (dataSourceRuntimeConfig.credentialsProvider.isPresent()) {
String beanName = dataSourceRuntimeConfig.credentialsProviderName.orElse(null);
CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName);
String name = dataSourceRuntimeConfig.credentialsProvider.get();
connectionFactoryConfiguration
.credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider));
}
for (Map.Entry<String, String> entry : dataSourceJdbcRuntimeConfig.additionalJdbcProperties.entrySet()) {
connectionFactoryConfiguration.jdbcProperty(entry.getKey(), entry.getValue());
}
poolConfiguration.minSize(dataSourceJdbcRuntimeConfig.minSize);
poolConfiguration.maxSize(dataSourceJdbcRuntimeConfig.maxSize);
if (dataSourceJdbcRuntimeConfig.initialSize.isPresent() && dataSourceJdbcRuntimeConfig.initialSize.getAsInt() > 0) {
poolConfiguration.initialSize(dataSourceJdbcRuntimeConfig.initialSize.getAsInt());
}
poolConfiguration.connectionValidator(ConnectionValidator.defaultValidator());
if (dataSourceJdbcRuntimeConfig.acquisitionTimeout.isPresent()) {
poolConfiguration.acquisitionTimeout(dataSourceJdbcRuntimeConfig.acquisitionTimeout.get());
}
if (dataSourceJdbcRuntimeConfig.backgroundValidationInterval.isPresent()) {
poolConfiguration.validationTimeout(dataSourceJdbcRuntimeConfig.backgroundValidationInterval.get());
}
if (dataSourceJdbcRuntimeConfig.foregroundValidationInterval.isPresent()) {
poolConfiguration.idleValidationTimeout(dataSourceJdbcRuntimeConfig.foregroundValidationInterval.get());
}
if (dataSourceJdbcRuntimeConfig.validationQuerySql.isPresent()) {
String validationQuery = dataSourceJdbcRuntimeConfig.validationQuerySql.get();
poolConfiguration.connectionValidator(new ConnectionValidator() {
@Override
public boolean isValid(Connection connection) {
try (Statement stmt = connection.createStatement()) {
stmt.execute(validationQuery);
return true;
} catch (Exception e) {
log.warn("Connection validation failed", e);
}
return false;
}
});
}
if (dataSourceJdbcRuntimeConfig.idleRemovalInterval.isPresent()) {
poolConfiguration.reapTimeout(dataSourceJdbcRuntimeConfig.idleRemovalInterval.get());
}
if (dataSourceJdbcRuntimeConfig.leakDetectionInterval.isPresent()) {
poolConfiguration.leakTimeout(dataSourceJdbcRuntimeConfig.leakDetectionInterval.get());
}
if (dataSourceJdbcRuntimeConfig.maxLifetime.isPresent()) {
poolConfiguration.maxLifetime(dataSourceJdbcRuntimeConfig.maxLifetime.get());
}
if (dataSourceJdbcRuntimeConfig.transactionRequirement.isPresent()) {
poolConfiguration.transactionRequirement(dataSourceJdbcRuntimeConfig.transactionRequirement.get());
}
poolConfiguration.enhancedLeakReport(dataSourceJdbcRuntimeConfig.extendedLeakReport);
poolConfiguration.flushOnClose(dataSourceJdbcRuntimeConfig.flushOnClose);
}
public DataSourceBuildTimeConfig getDataSourceBuildTimeConfig(String dataSourceName) {
if (DataSourceUtil.isDefault(dataSourceName)) {
return dataSourcesBuildTimeConfig.defaultDataSource;
}
DataSourceBuildTimeConfig namedConfig = dataSourcesBuildTimeConfig.namedDataSources.get(dataSourceName);
return namedConfig != null ? namedConfig : new DataSourceBuildTimeConfig();
}
public DataSourceJdbcBuildTimeConfig getDataSourceJdbcBuildTimeConfig(String dataSourceName) {
if (DataSourceUtil.isDefault(dataSourceName)) {
return dataSourcesJdbcBuildTimeConfig.jdbc;
}
DataSourceJdbcOuterNamedBuildTimeConfig namedOuterConfig = dataSourcesJdbcBuildTimeConfig.namedDataSources
.get(dataSourceName);
return namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcBuildTimeConfig();
}
public DataSourceRuntimeConfig getDataSourceRuntimeConfig(String dataSourceName) {
if (DataSourceUtil.isDefault(dataSourceName)) {
return dataSourcesRuntimeConfig.defaultDataSource;
}
DataSourceRuntimeConfig namedConfig = dataSourcesRuntimeConfig.namedDataSources.get(dataSourceName);
return namedConfig != null ? namedConfig : new DataSourceRuntimeConfig();
}
public DataSourceJdbcRuntimeConfig getDataSourceJdbcRuntimeConfig(String dataSourceName) {
if (DataSourceUtil.isDefault(dataSourceName)) {
return dataSourcesJdbcRuntimeConfig.jdbc;
}
DataSourceJdbcOuterNamedRuntimeConfig namedOuterConfig = dataSourcesJdbcRuntimeConfig.namedDataSources
.get(dataSourceName);
return namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcRuntimeConfig();
}
/**
* Uses the {@link ServiceLoader
* of the current {@link Thread
*/
private static void loadDriversInTCCL() {
final ServiceLoader<Driver> drivers = ServiceLoader.load(Driver.class);
final Iterator<Driver> iterator = drivers.iterator();
while (iterator.hasNext()) {
try {
iterator.next();
} catch (Throwable t) {
}
}
}
@PreDestroy
public void stop() {
for (AgroalDataSource dataSource : dataSources.values()) {
if (dataSource != null) {
dataSource.close();
}
}
}
}
|
@yvgopal Do you have any idea?
|
public void setDefaultMessageTimeToLive(Duration defaultMessageTimeToLive) {
if (defaultMessageTimeToLive != null
&& (defaultMessageTimeToLive.compareTo(ManagementClientConstants.MIN_ALLOWED_TTL) < 0
|| defaultMessageTimeToLive.compareTo(ManagementClientConstants.MAX_ALLOWED_TTL) > 0)) {
throw new IllegalArgumentException(
String.format("The value must be between %s and %s.",
ManagementClientConstants.MAX_ALLOWED_TTL,
ManagementClientConstants.MIN_ALLOWED_TTL));
}
this.defaultMessageTimeToLive = defaultMessageTimeToLive;
if (this.defaultMessageTimeToLive != null
&& this.defaultMessageTimeToLive.compareTo(ManagementClientConstants.MAX_DURATION) > 0) {
this.defaultMessageTimeToLive = ManagementClientConstants.MAX_DURATION;
}
}
|
if (this.defaultMessageTimeToLive != null
|
public void setDefaultMessageTimeToLive(Duration defaultMessageTimeToLive) {
if (defaultMessageTimeToLive == null
|| (defaultMessageTimeToLive.compareTo(ManagementClientConstants.MIN_ALLOWED_TTL) < 0
|| defaultMessageTimeToLive.compareTo(ManagementClientConstants.MAX_ALLOWED_TTL) > 0)) {
throw new IllegalArgumentException(
String.format("The value must be between %s and %s.",
ManagementClientConstants.MAX_ALLOWED_TTL,
ManagementClientConstants.MIN_ALLOWED_TTL));
}
this.defaultMessageTimeToLive = defaultMessageTimeToLive;
if (this.defaultMessageTimeToLive.compareTo(ManagementClientConstants.MAX_DURATION) > 0) {
this.defaultMessageTimeToLive = ManagementClientConstants.MAX_DURATION;
}
}
|
class QueueDescription {
Duration duplicationDetectionHistoryTimeWindow = ManagementClientConstants.DEFAULT_HISTORY_DEDUP_WINDOW;
String path;
Duration lockDuration = ManagementClientConstants.DEFAULT_LOCK_DURATION;
Duration defaultMessageTimeToLive = ManagementClientConstants.MAX_DURATION;
Duration autoDeleteOnIdle = ManagementClientConstants.MAX_DURATION;
int maxDeliveryCount = ManagementClientConstants.DEFAULT_MAX_DELIVERY_COUNT;
String forwardTo = null;
String forwardDeadLetteredMessagesTo = null;
String userMetadata = null;
long maxSizeInMB = ManagementClientConstants.DEFAULT_MAX_SIZE_IN_MB;
boolean requiresDuplicateDetection = false;
boolean enableDeadLetteringOnMessageExpiration = false;
boolean requiresSession = false;
boolean enableBatchedOperations = true;
boolean enablePartitioning = false;
EntityStatus status = EntityStatus.Active;
List<AuthorizationRule> authorizationRules = null;
/**
* Initializes a new instance of QueueDescription with the specified relative path.
* @param path - Path of the topic.
* Max length is 260 chars. Cannot start or end with a slash.
* Cannot have restricted characters: '@','?','
*/
public QueueDescription(String path)
{
this.setPath(path);
}
/**
* @return The path of the queue.
*/
public String getPath()
{
return this.path;
}
/**
* @param path - The path of queue. Max length is 260 chars.
* Cannot start or end with a slash.
* Cannot have restricted characters: '@','?','
*/
private void setPath(String path)
{
EntityNameHelper.checkValidQueueName(path);
this.path = path;
}
/**
* The amount of time that the message is locked by a given receiver
* so that no other receiver receives the same message.
* @return The duration of a peek lock. Default value is 60 seconds.
*/
public Duration getLockDuration()
{
return this.lockDuration;
}
/**
* Sets The amount of time that the message is locked by a given receiver
* so that no other receiver receives the same message.
* @param lockDuration - The duration of a peek lock. Max value is 5 minutes.
*/
public void setLockDuration(Duration lockDuration) {
this.lockDuration = lockDuration;
if (this.lockDuration.compareTo(ManagementClientConstants.MAX_DURATION) > 0) {
this.lockDuration = ManagementClientConstants.MAX_DURATION;
}
}
/**
* @return the maximum size of the queue in megabytes, which is the size of memory allocated for the queue.
* Default value is 1024.
*/
public long getMaxSizeInMB() {
return this.maxSizeInMB;
}
/**
* @param maxSize - Maximum size of the queue in megabytes, which is the size of memory allocated for the queue.
*/
public void setMaxSizeInMB(long maxSize) {
this.maxSizeInMB = maxSize;
}
/**
* If enabled, duplicate messages having same {@link IMessage
* within duration of {@link
* @return value indicating if the queue requires guard against duplicate messages.
*/
public boolean isRequiresDuplicateDetection() {
return requiresDuplicateDetection;
}
/**
* @param requiresDuplicateDetection - True if duplicate detection needs to be enabled.
* See also - {@link
*/
public void setRequiresDuplicateDetection(boolean requiresDuplicateDetection) {
this.requiresDuplicateDetection = requiresDuplicateDetection;
}
/**
* @return boolean that indicates whether the queue supports the concept of session. Sessionful-messages follow FIFO ordering.
*/
public boolean isRequiresSession() {
return requiresSession;
}
/**
* @param requiresSession - True if queue should support sessions.
*/
public void setRequiresSession(boolean requiresSession) {
this.requiresSession = requiresSession;
}
/**
* Time-To-Live is the duration after which the message expires, starting from when
* the message is sent to Service Bus.
* This is the default value used when {@link IMessage
* Messages older than their TimeToLive value will expire and no longer be retained in the message store.
* Subscribers will be unable to receive expired messages.
* @return The default time to live value for the messages. Default value is {@link ManagementClientConstants
*/
public Duration getDefaultMessageTimeToLive() {
return defaultMessageTimeToLive;
}
/**
* @param defaultMessageTimeToLive - The default message time to live value.
* Value cannot be lower than 1 second.
* See {@link
*/
/**
* @return The idle interval after which the queue is automatically deleted.
* Default value is {@link ManagementClientConstants
*/
public Duration getAutoDeleteOnIdle() {
return autoDeleteOnIdle;
}
/**
* @param autoDeleteOnIdle - The idle interval after which the queue is automatically deleted.
* The minimum duration is 5 minutes.
*/
public void setAutoDeleteOnIdle(Duration autoDeleteOnIdle) {
if (autoDeleteOnIdle != null
&& autoDeleteOnIdle.compareTo(ManagementClientConstants.MIN_ALLOWED_AUTODELETE_DURATION) < 0) {
throw new IllegalArgumentException(
String.format("The value must be greater than %s.",
ManagementClientConstants.MIN_ALLOWED_AUTODELETE_DURATION));
}
this.autoDeleteOnIdle = autoDeleteOnIdle;
if (this.autoDeleteOnIdle != null
&& this.autoDeleteOnIdle.compareTo(ManagementClientConstants.MAX_DURATION) > 0) {
this.autoDeleteOnIdle = ManagementClientConstants.MAX_DURATION;
}
}
/**
* Indicates whether this queue has dead letter support when a message expires.
* @return If true, the expired messages are moved to dead-letter sub-queue.
* Default value is false.
*/
public boolean isEnableDeadLetteringOnMessageExpiration() {
return enableDeadLetteringOnMessageExpiration;
}
/**
* @param enableDeadLetteringOnMessageExpiration - True if messages should be dead-lettered on expiration.
* See {@link
*/
public void setEnableDeadLetteringOnMessageExpiration(boolean enableDeadLetteringOnMessageExpiration) {
this.enableDeadLetteringOnMessageExpiration = enableDeadLetteringOnMessageExpiration;
}
/**
* @return The duration of duplicate detection history that is maintained by the service.
* The default value is 1 minute.
*/
public Duration getDuplicationDetectionHistoryTimeWindow() {
return duplicationDetectionHistoryTimeWindow;
}
/**
* @param duplicationDetectionHistoryTimeWindow - The duration of duplicate detection history that is maintained by the service.
* Max value is 1 day and minimum is 20 seconds.
*/
public void setDuplicationDetectionHistoryTimeWindow(Duration duplicationDetectionHistoryTimeWindow) {
if (duplicationDetectionHistoryTimeWindow != null
&& (duplicationDetectionHistoryTimeWindow.compareTo(ManagementClientConstants.MIN_DUPLICATE_HISTORY_DURATION) < 0
|| duplicationDetectionHistoryTimeWindow.compareTo(ManagementClientConstants.MAX_DUPLICATE_HISTORY_DURATION) > 0)) {
throw new IllegalArgumentException(
String.format("The value must be between %s and %s.",
ManagementClientConstants.MIN_DUPLICATE_HISTORY_DURATION,
ManagementClientConstants.MAX_DUPLICATE_HISTORY_DURATION));
}
this.duplicationDetectionHistoryTimeWindow = duplicationDetectionHistoryTimeWindow;
if (this.duplicationDetectionHistoryTimeWindow != null
&& this.duplicationDetectionHistoryTimeWindow.compareTo(ManagementClientConstants.MAX_DURATION) > 0) {
this.duplicationDetectionHistoryTimeWindow = ManagementClientConstants.MAX_DURATION;
}
}
/**
* The maximum delivery count of a message before it is dead-lettered.
* The delivery count is increased when a message is received in {@link com.microsoft.azure.servicebus.ReceiveMode
* and didn't complete the message before the message lock expired.
* @return Default value is 10.
*/
public int getMaxDeliveryCount() {
return maxDeliveryCount;
}
/**
* The maximum delivery count of a message before it is dead-lettered.
* The delivery count is increased when a message is received in {@link com.microsoft.azure.servicebus.ReceiveMode
* and didn't complete the message before the message lock expired.
* @param maxDeliveryCount - Minimum value is 1.
*/
public void setMaxDeliveryCount(int maxDeliveryCount) {
if (maxDeliveryCount < ManagementClientConstants.MIN_ALLOWED_MAX_DELIVERYCOUNT) {
throw new IllegalArgumentException(
String.format("The value must be greater than %s.",
ManagementClientConstants.MIN_ALLOWED_MAX_DELIVERYCOUNT));
}
this.maxDeliveryCount = maxDeliveryCount;
}
/**
* @return Indicates whether server-side batched operations are enabled.
* Defaults to true.
*/
public boolean isEnableBatchedOperations() {
return enableBatchedOperations;
}
/**
* @param enableBatchedOperations - Indicates whether server-side batched operations are enabled.
*/
public void setEnableBatchedOperations(boolean enableBatchedOperations) {
this.enableBatchedOperations = enableBatchedOperations;
}
/**
* @return The {@link AuthorizationRule} on the queue to control user access at entity level.
*/
public List<AuthorizationRule> getAuthorizationRules() {
return authorizationRules;
}
/**
* @param authorizationRules - The {@link AuthorizationRule} on the queue to control user access at entity level.
*/
public void setAuthorizationRules(List<AuthorizationRule> authorizationRules) {
this.authorizationRules = authorizationRules;
}
/**
* Gets the status of the entity. When an entity is disabled, that entity cannot send or receive messages.
* @return The current status of the queue (Enabled / Disabled).
* The default value is Enabled.
*/
public EntityStatus getEntityStatus() {
return this.status;
}
/**
* @param status - the status of the queue (Enabled / Disabled).
* When an entity is disabled, that entity cannot send or receive messages.
*/
public void setEntityStatus(EntityStatus status) {
this.status = status;
}
/**
* @return The path of the recipient entity to which all the messages sent to the queue are forwarded to.
* If set, user cannot manually receive messages from this queue. The destination entity
* must be an already existing entity.
*/
public String getForwardTo() {
return forwardTo;
}
/**
* @param forwardTo - The path of the recipient entity to which all the messages sent to the queue are forwarded to.
* If set, user cannot manually receive messages from this queue. The destination entity
* must be an already existing entity.
*/
public void setForwardTo(String forwardTo) {
if (forwardTo == null || forwardTo.isEmpty()) {
this.forwardTo = forwardTo;
return;
}
EntityNameHelper.checkValidQueueName(forwardTo);
if (this.path.equals(forwardTo)) {
throw new IllegalArgumentException("Entity cannot have auto-forwarding policy to itself");
}
this.forwardTo = forwardTo;
}
/**
* @return The path of the recipient entity to which all the dead-lettered messages of this queue are forwarded to.
* If set, user cannot manually receive dead-lettered messages from this queue. The destination
* entity must already exist.
*/
public String getForwardDeadLetteredMessagesTo() {
return forwardDeadLetteredMessagesTo;
}
/**
* @param forwardDeadLetteredMessagesTo - The path of the recipient entity to which all the dead-lettered messages of this queue are forwarded to.
* If set, user cannot manually receive dead-lettered messages from this queue. The destination
* entity must already exist.
*/
public void setForwardDeadLetteredMessagesTo(String forwardDeadLetteredMessagesTo) {
if (forwardDeadLetteredMessagesTo == null || forwardDeadLetteredMessagesTo.isEmpty()) {
this.forwardDeadLetteredMessagesTo = forwardDeadLetteredMessagesTo;
return;
}
EntityNameHelper.checkValidQueueName(forwardDeadLetteredMessagesTo);
if (this.path.equals(forwardDeadLetteredMessagesTo)) {
throw new IllegalArgumentException("Entity cannot have auto-forwarding policy to itself");
}
this.forwardDeadLetteredMessagesTo = forwardDeadLetteredMessagesTo;
}
/**
* @return boolean indicating whether the queue is to be partitioned across multiple message brokers.
* Defaults to false
*/
public boolean isEnablePartitioning() {
return enablePartitioning;
}
/**
* @param enablePartitioning - true if queue is to be partitioned across multiple message brokers.
*/
public void setEnablePartitioning(boolean enablePartitioning) {
this.enablePartitioning = enablePartitioning;
}
/**
* @return Custom metdata that user can associate with the description.
*/
public String getUserMetadata() {
return userMetadata;
}
/**
* @param userMetadata - Custom metdata that user can associate with the description.
* Cannot be null. Max length is 1024 chars
*/
public void setUserMetadata(String userMetadata) {
if (userMetadata == null) {
throw new IllegalArgumentException("Value cannot be null");
}
if (userMetadata.length() > ManagementClientConstants.MAX_USERMETADATA_LENGTH) {
throw new IllegalArgumentException("Length cannot cross " + ManagementClientConstants.MAX_USERMETADATA_LENGTH + " characters");
}
this.userMetadata = userMetadata;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (!(o instanceof QueueDescription)) {
return false;
}
QueueDescription other = (QueueDescription) o;
if (this.path.equalsIgnoreCase(other.path)
&& this.autoDeleteOnIdle.equals(other.autoDeleteOnIdle)
&& this.defaultMessageTimeToLive.equals(other.defaultMessageTimeToLive)
&& (!this.requiresDuplicateDetection || this.duplicationDetectionHistoryTimeWindow.equals(other.duplicationDetectionHistoryTimeWindow))
&& this.enableBatchedOperations == other.enableBatchedOperations
&& this.enableDeadLetteringOnMessageExpiration == other.enableDeadLetteringOnMessageExpiration
&& this.enablePartitioning == other.enablePartitioning
&& (this.forwardTo == null ? other.forwardTo == null : this.forwardTo.equalsIgnoreCase(other.forwardTo))
&& (this.forwardDeadLetteredMessagesTo == null ? other.forwardDeadLetteredMessagesTo == null : this.forwardDeadLetteredMessagesTo.equalsIgnoreCase(other.forwardDeadLetteredMessagesTo))
&& this.lockDuration.equals(other.lockDuration)
&& this.maxDeliveryCount == other.maxDeliveryCount
&& this.maxSizeInMB == other.maxSizeInMB
&& this.requiresDuplicateDetection == other.requiresDuplicateDetection
&& this.requiresSession == other.requiresSession
&& this.status.equals(other.status)
&& (this.userMetadata == null ? other.userMetadata == null : this.userMetadata.equals(other.userMetadata))
&& AuthorizationRuleSerializer.equals(this.authorizationRules, other.authorizationRules)) {
return true;
}
return false;
}
@Override
public int hashCode() {
return this.path.hashCode();
}
}
|
class QueueDescription {
Duration duplicationDetectionHistoryTimeWindow = ManagementClientConstants.DEFAULT_HISTORY_DEDUP_WINDOW;
String path;
Duration lockDuration = ManagementClientConstants.DEFAULT_LOCK_DURATION;
Duration defaultMessageTimeToLive = ManagementClientConstants.MAX_DURATION;
Duration autoDeleteOnIdle = ManagementClientConstants.MAX_DURATION;
int maxDeliveryCount = ManagementClientConstants.DEFAULT_MAX_DELIVERY_COUNT;
String forwardTo = null;
String forwardDeadLetteredMessagesTo = null;
String userMetadata = null;
long maxSizeInMB = ManagementClientConstants.DEFAULT_MAX_SIZE_IN_MB;
boolean requiresDuplicateDetection = false;
boolean enableDeadLetteringOnMessageExpiration = false;
boolean requiresSession = false;
boolean enableBatchedOperations = true;
boolean enablePartitioning = false;
EntityStatus status = EntityStatus.Active;
List<AuthorizationRule> authorizationRules = null;
/**
* Initializes a new instance of QueueDescription with the specified relative path.
* @param path - Path of the topic.
* Max length is 260 chars. Cannot start or end with a slash.
* Cannot have restricted characters: '@','?','
*/
public QueueDescription(String path) {
this.setPath(path);
}
/**
* @return The path of the queue.
*/
public String getPath() {
return this.path;
}
/**
* @param path - The path of queue. Max length is 260 chars.
* Cannot start or end with a slash.
* Cannot have restricted characters: '@','?','
*/
private void setPath(String path) {
EntityNameHelper.checkValidQueueName(path);
this.path = path;
}
/**
* The amount of time that the message is locked by a given receiver
* so that no other receiver receives the same message.
* @return The duration of a peek lock. Default value is 60 seconds.
*/
public Duration getLockDuration() {
return this.lockDuration;
}
/**
* Sets The amount of time that the message is locked by a given receiver
* so that no other receiver receives the same message.
* @param lockDuration - The duration of a peek lock. Max value is 5 minutes.
*/
public void setLockDuration(Duration lockDuration) {
this.lockDuration = lockDuration;
if (this.lockDuration.compareTo(ManagementClientConstants.MAX_DURATION) > 0) {
this.lockDuration = ManagementClientConstants.MAX_DURATION;
}
}
/**
* @return the maximum size of the queue in megabytes, which is the size of memory allocated for the queue.
* Default value is 1024.
*/
public long getMaxSizeInMB() {
return this.maxSizeInMB;
}
/**
* @param maxSize - Maximum size of the queue in megabytes, which is the size of memory allocated for the queue.
*/
public void setMaxSizeInMB(long maxSize) {
this.maxSizeInMB = maxSize;
}
/**
* If enabled, duplicate messages having same {@link IMessage
* within duration of {@link
* @return value indicating if the queue requires guard against duplicate messages.
*/
public boolean isRequiresDuplicateDetection() {
return requiresDuplicateDetection;
}
/**
* @param requiresDuplicateDetection - True if duplicate detection needs to be enabled.
* See also - {@link
*/
public void setRequiresDuplicateDetection(boolean requiresDuplicateDetection) {
this.requiresDuplicateDetection = requiresDuplicateDetection;
}
/**
* @return boolean that indicates whether the queue supports the concept of session. Sessionful-messages follow FIFO ordering.
*/
public boolean isRequiresSession() {
return requiresSession;
}
/**
* @param requiresSession - True if queue should support sessions.
*/
public void setRequiresSession(boolean requiresSession) {
this.requiresSession = requiresSession;
}
/**
* Time-To-Live is the duration after which the message expires, starting from when
* the message is sent to Service Bus.
* This is the default value used when {@link IMessage
* Messages older than their TimeToLive value will expire and no longer be retained in the message store.
* Subscribers will be unable to receive expired messages.
* @return The default time to live value for the messages. Default value is {@link ManagementClientConstants
*/
public Duration getDefaultMessageTimeToLive() {
return defaultMessageTimeToLive;
}
/**
* @param defaultMessageTimeToLive - The default message time to live value.
* Value cannot be lower than 1 second.
* See {@link
*/
/**
* @return The idle interval after which the queue is automatically deleted.
* Default value is {@link ManagementClientConstants
*/
public Duration getAutoDeleteOnIdle() {
return autoDeleteOnIdle;
}
/**
* @param autoDeleteOnIdle - The idle interval after which the queue is automatically deleted.
* The minimum duration is 5 minutes.
*/
public void setAutoDeleteOnIdle(Duration autoDeleteOnIdle) {
if (autoDeleteOnIdle == null
|| autoDeleteOnIdle.compareTo(ManagementClientConstants.MIN_ALLOWED_AUTODELETE_DURATION) < 0) {
throw new IllegalArgumentException(
String.format("The value must be greater than %s.",
ManagementClientConstants.MIN_ALLOWED_AUTODELETE_DURATION));
}
this.autoDeleteOnIdle = autoDeleteOnIdle;
if (this.autoDeleteOnIdle.compareTo(ManagementClientConstants.MAX_DURATION) > 0) {
this.autoDeleteOnIdle = ManagementClientConstants.MAX_DURATION;
}
}
/**
* Indicates whether this queue has dead letter support when a message expires.
* @return If true, the expired messages are moved to dead-letter sub-queue.
* Default value is false.
*/
public boolean isEnableDeadLetteringOnMessageExpiration() {
return enableDeadLetteringOnMessageExpiration;
}
/**
* @param enableDeadLetteringOnMessageExpiration - True if messages should be dead-lettered on expiration.
* See {@link
*/
public void setEnableDeadLetteringOnMessageExpiration(boolean enableDeadLetteringOnMessageExpiration) {
this.enableDeadLetteringOnMessageExpiration = enableDeadLetteringOnMessageExpiration;
}
/**
* @return The duration of duplicate detection history that is maintained by the service.
* The default value is 1 minute.
*/
public Duration getDuplicationDetectionHistoryTimeWindow() {
return duplicationDetectionHistoryTimeWindow;
}
/**
* @param duplicationDetectionHistoryTimeWindow - The duration of duplicate detection history that is maintained by the service.
* Max value is 1 day and minimum is 20 seconds.
*/
public void setDuplicationDetectionHistoryTimeWindow(Duration duplicationDetectionHistoryTimeWindow) {
if (duplicationDetectionHistoryTimeWindow == null
|| (duplicationDetectionHistoryTimeWindow.compareTo(ManagementClientConstants.MIN_DUPLICATE_HISTORY_DURATION) < 0
|| duplicationDetectionHistoryTimeWindow.compareTo(ManagementClientConstants.MAX_DUPLICATE_HISTORY_DURATION) > 0)) {
throw new IllegalArgumentException(
String.format("The value must be between %s and %s.",
ManagementClientConstants.MIN_DUPLICATE_HISTORY_DURATION,
ManagementClientConstants.MAX_DUPLICATE_HISTORY_DURATION));
}
this.duplicationDetectionHistoryTimeWindow = duplicationDetectionHistoryTimeWindow;
if (this.duplicationDetectionHistoryTimeWindow.compareTo(ManagementClientConstants.MAX_DURATION) > 0) {
this.duplicationDetectionHistoryTimeWindow = ManagementClientConstants.MAX_DURATION;
}
}
/**
* The maximum delivery count of a message before it is dead-lettered.
* The delivery count is increased when a message is received in {@link com.microsoft.azure.servicebus.ReceiveMode
* and didn't complete the message before the message lock expired.
* @return Default value is 10.
*/
public int getMaxDeliveryCount() {
return maxDeliveryCount;
}
/**
* The maximum delivery count of a message before it is dead-lettered.
* The delivery count is increased when a message is received in {@link com.microsoft.azure.servicebus.ReceiveMode
* and didn't complete the message before the message lock expired.
* @param maxDeliveryCount - Minimum value is 1.
*/
public void setMaxDeliveryCount(int maxDeliveryCount) {
if (maxDeliveryCount < ManagementClientConstants.MIN_ALLOWED_MAX_DELIVERYCOUNT) {
throw new IllegalArgumentException(
String.format("The value must be greater than %s.",
ManagementClientConstants.MIN_ALLOWED_MAX_DELIVERYCOUNT));
}
this.maxDeliveryCount = maxDeliveryCount;
}
/**
* @return Indicates whether server-side batched operations are enabled.
* Defaults to true.
*/
public boolean isEnableBatchedOperations() {
return enableBatchedOperations;
}
/**
* @param enableBatchedOperations - Indicates whether server-side batched operations are enabled.
*/
public void setEnableBatchedOperations(boolean enableBatchedOperations) {
this.enableBatchedOperations = enableBatchedOperations;
}
/**
* @return The {@link AuthorizationRule} on the queue to control user access at entity level.
*/
public List<AuthorizationRule> getAuthorizationRules() {
return authorizationRules;
}
/**
* @param authorizationRules - The {@link AuthorizationRule} on the queue to control user access at entity level.
*/
public void setAuthorizationRules(List<AuthorizationRule> authorizationRules) {
this.authorizationRules = authorizationRules;
}
/**
* Gets the status of the entity. When an entity is disabled, that entity cannot send or receive messages.
* @return The current status of the queue (Enabled / Disabled).
* The default value is Enabled.
*/
public EntityStatus getEntityStatus() {
return this.status;
}
/**
* @param status - the status of the queue (Enabled / Disabled).
* When an entity is disabled, that entity cannot send or receive messages.
*/
public void setEntityStatus(EntityStatus status) {
this.status = status;
}
/**
* @return The path of the recipient entity to which all the messages sent to the queue are forwarded to.
* If set, user cannot manually receive messages from this queue. The destination entity
* must be an already existing entity.
*/
public String getForwardTo() {
return forwardTo;
}
/**
* @param forwardTo - The path of the recipient entity to which all the messages sent to the queue are forwarded to.
* If set, user cannot manually receive messages from this queue. The destination entity
* must be an already existing entity.
*/
public void setForwardTo(String forwardTo) {
if (forwardTo == null || forwardTo.isEmpty()) {
this.forwardTo = forwardTo;
return;
}
EntityNameHelper.checkValidQueueName(forwardTo);
if (this.path.equals(forwardTo)) {
throw new IllegalArgumentException("Entity cannot have auto-forwarding policy to itself");
}
this.forwardTo = forwardTo;
}
/**
* @return The path of the recipient entity to which all the dead-lettered messages of this queue are forwarded to.
* If set, user cannot manually receive dead-lettered messages from this queue. The destination
* entity must already exist.
*/
public String getForwardDeadLetteredMessagesTo() {
return forwardDeadLetteredMessagesTo;
}
/**
* @param forwardDeadLetteredMessagesTo - The path of the recipient entity to which all the dead-lettered messages of this queue are forwarded to.
* If set, user cannot manually receive dead-lettered messages from this queue. The destination
* entity must already exist.
*/
public void setForwardDeadLetteredMessagesTo(String forwardDeadLetteredMessagesTo) {
if (forwardDeadLetteredMessagesTo == null || forwardDeadLetteredMessagesTo.isEmpty()) {
this.forwardDeadLetteredMessagesTo = forwardDeadLetteredMessagesTo;
return;
}
EntityNameHelper.checkValidQueueName(forwardDeadLetteredMessagesTo);
if (this.path.equals(forwardDeadLetteredMessagesTo)) {
throw new IllegalArgumentException("Entity cannot have auto-forwarding policy to itself");
}
this.forwardDeadLetteredMessagesTo = forwardDeadLetteredMessagesTo;
}
/**
* @return boolean indicating whether the queue is to be partitioned across multiple message brokers.
* Defaults to false
*/
public boolean isEnablePartitioning() {
return enablePartitioning;
}
/**
* @param enablePartitioning - true if queue is to be partitioned across multiple message brokers.
*/
public void setEnablePartitioning(boolean enablePartitioning) {
this.enablePartitioning = enablePartitioning;
}
/**
* @return Custom metdata that user can associate with the description.
*/
public String getUserMetadata() {
return userMetadata;
}
/**
* @param userMetadata - Custom metdata that user can associate with the description.
* Cannot be null. Max length is 1024 chars
*/
public void setUserMetadata(String userMetadata) {
if (userMetadata == null) {
throw new IllegalArgumentException("Value cannot be null");
}
if (userMetadata.length() > ManagementClientConstants.MAX_USERMETADATA_LENGTH) {
throw new IllegalArgumentException("Length cannot cross " + ManagementClientConstants.MAX_USERMETADATA_LENGTH + " characters");
}
this.userMetadata = userMetadata;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (!(o instanceof QueueDescription)) {
return false;
}
QueueDescription other = (QueueDescription) o;
if (this.path.equalsIgnoreCase(other.path)
&& this.autoDeleteOnIdle.equals(other.autoDeleteOnIdle)
&& this.defaultMessageTimeToLive.equals(other.defaultMessageTimeToLive)
&& (!this.requiresDuplicateDetection || this.duplicationDetectionHistoryTimeWindow.equals(other.duplicationDetectionHistoryTimeWindow))
&& this.enableBatchedOperations == other.enableBatchedOperations
&& this.enableDeadLetteringOnMessageExpiration == other.enableDeadLetteringOnMessageExpiration
&& this.enablePartitioning == other.enablePartitioning
&& (this.forwardTo == null ? other.forwardTo == null : this.forwardTo.equalsIgnoreCase(other.forwardTo))
&& (this.forwardDeadLetteredMessagesTo == null ? other.forwardDeadLetteredMessagesTo == null : this.forwardDeadLetteredMessagesTo.equalsIgnoreCase(other.forwardDeadLetteredMessagesTo))
&& this.lockDuration.equals(other.lockDuration)
&& this.maxDeliveryCount == other.maxDeliveryCount
&& this.maxSizeInMB == other.maxSizeInMB
&& this.requiresDuplicateDetection == other.requiresDuplicateDetection
&& this.requiresSession == other.requiresSession
&& this.status.equals(other.status)
&& (this.userMetadata == null ? other.userMetadata == null : this.userMetadata.equals(other.userMetadata))
&& AuthorizationRuleSerializer.equals(this.authorizationRules, other.authorizationRules)) {
return true;
}
return false;
}
@Override
public int hashCode() {
return this.path.hashCode();
}
}
|
Created issue to track this. : https://github.com/ballerina-platform/ballerina-lang/issues/18342
|
private void checkForExportableType(BTypeSymbol symbol, DiagnosticPos pos) {
if (symbol == null || symbol.type == null || Symbols.isFlagOn(symbol.flags, Flags.TYPE_PARAM)) {
return;
}
switch (symbol.type.tag) {
case TypeTags.ARRAY:
checkForExportableType(((BArrayType) symbol.type).eType.tsymbol, pos);
return;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) symbol.type;
tupleType.tupleTypes.forEach(t -> checkForExportableType(t.tsymbol, pos));
if (tupleType.restType != null) {
checkForExportableType(tupleType.restType.tsymbol, pos);
}
return;
case TypeTags.MAP:
checkForExportableType(((BMapType) symbol.type).constraint.tsymbol, pos);
return;
case TypeTags.RECORD:
if (Symbols.isFlagOn(symbol.flags, Flags.ANONYMOUS)) {
BRecordType recordType = (BRecordType) symbol.type;
recordType.fields.forEach(f -> checkForExportableType(f.type.tsymbol, pos));
if (recordType.restFieldType != null) {
checkForExportableType(recordType.restFieldType.tsymbol, pos);
}
return;
}
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) symbol.type;
if (tableType.constraint != null) {
checkForExportableType(tableType.constraint.tsymbol, pos);
}
return;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) symbol.type;
if (streamType.constraint != null) {
checkForExportableType(streamType.constraint.tsymbol, pos);
}
return;
}
if (!Symbols.isPublic(symbol)) {
dlog.error(pos, DiagnosticCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name);
}
}
|
private void checkForExportableType(BTypeSymbol symbol, DiagnosticPos pos) {
if (symbol == null || symbol.type == null || Symbols.isFlagOn(symbol.flags, Flags.TYPE_PARAM)) {
return;
}
switch (symbol.type.tag) {
case TypeTags.ARRAY:
checkForExportableType(((BArrayType) symbol.type).eType.tsymbol, pos);
return;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) symbol.type;
tupleType.tupleTypes.forEach(t -> checkForExportableType(t.tsymbol, pos));
if (tupleType.restType != null) {
checkForExportableType(tupleType.restType.tsymbol, pos);
}
return;
case TypeTags.MAP:
checkForExportableType(((BMapType) symbol.type).constraint.tsymbol, pos);
return;
case TypeTags.RECORD:
if (Symbols.isFlagOn(symbol.flags, Flags.ANONYMOUS)) {
BRecordType recordType = (BRecordType) symbol.type;
recordType.fields.forEach(f -> checkForExportableType(f.type.tsymbol, pos));
if (recordType.restFieldType != null) {
checkForExportableType(recordType.restFieldType.tsymbol, pos);
}
return;
}
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) symbol.type;
if (tableType.constraint != null) {
checkForExportableType(tableType.constraint.tsymbol, pos);
}
return;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) symbol.type;
if (streamType.constraint != null) {
checkForExportableType(streamType.constraint.tsymbol, pos);
}
return;
}
if (!Symbols.isPublic(symbol)) {
dlog.error(pos, DiagnosticCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name);
}
}
|
class CodeAnalyzer extends BLangNodeVisitor {
private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY =
new CompilerContext.Key<>();
private static final String NULL_LITERAL = "null";
private final SymbolResolver symResolver;
private int loopCount;
private int transactionCount;
private boolean statementReturns;
private boolean lastStatement;
private boolean withinRetryBlock;
private int workerCount;
private SymbolTable symTable;
private Types types;
private BLangDiagnosticLog dlog;
private TypeChecker typeChecker;
private Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>();
private Stack<Boolean> loopWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> returnWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> doneWithintransactionCheckStack = new Stack<>();
private BLangNode parent;
private Names names;
private SymbolEnv env;
private final Stack<HashSet<BType>> returnTypes = new Stack<>();
private boolean withinAbortedBlock;
private boolean withinCommittedBlock;
private boolean isJSONContext;
private boolean enableExperimentalFeatures;
public static CodeAnalyzer getInstance(CompilerContext context) {
CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY);
if (codeGenerator == null) {
codeGenerator = new CodeAnalyzer(context);
}
return codeGenerator;
}
public CodeAnalyzer(CompilerContext context) {
context.put(CODE_ANALYZER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeChecker = TypeChecker.getInstance(context);
this.names = Names.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.enableExperimentalFeatures = Boolean.parseBoolean(
CompilerOptions.getInstance(context).get(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED));
}
private void resetFunction() {
this.resetStatementReturns();
}
private void resetStatementReturns() {
this.statementReturns = false;
}
private void resetLastStatement() {
this.lastStatement = false;
}
public BLangPackage analyze(BLangPackage pkgNode) {
pkgNode.accept(this);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) {
return;
}
parent = pkgNode;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol);
analyzeTopLevelNodes(pkgNode, pkgEnv);
pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage));
}
private void analyzeTopLevelNodes(BLangPackage pkgNode, SymbolEnv pkgEnv) {
pkgNode.topLevelNodes.forEach(topLevelNode -> analyzeNode((BLangNode) topLevelNode, pkgEnv));
pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE);
parent = null;
}
private void analyzeNode(BLangNode node, SymbolEnv env) {
SymbolEnv prevEnv = this.env;
this.env = env;
BLangNode myParent = parent;
node.parent = parent;
parent = node;
node.accept(this);
parent = myParent;
this.env = prevEnv;
}
private void analyzeTypeNode(BLangType node, SymbolEnv env) {
if (node == null) {
return;
}
analyzeNode(node, env);
}
@Override
public void visit(BLangCompilationUnit compUnitNode) {
compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, env));
}
public void visit(BLangTypeDefinition typeDefinition) {
analyzeTypeNode(typeDefinition.typeNode, this.env);
}
@Override
public void visit(BLangTupleVariableDef bLangTupleVariableDef) {
analyzeNode(bLangTupleVariableDef.var, this.env);
}
@Override
public void visit(BLangRecordVariableDef bLangRecordVariableDef) {
analyzeNode(bLangRecordVariableDef.var, this.env);
}
@Override
public void visit(BLangErrorVariableDef bLangErrorVariableDef) {
analyzeNode(bLangErrorVariableDef.errorVariable, this.env);
}
@Override
public void visit(BLangFunction funcNode) {
boolean isLambda = funcNode.flagSet.contains(Flag.LAMBDA);
if (isLambda) {
return;
}
if (Symbols.isPublic(funcNode.symbol)) {
funcNode.symbol.params.forEach(symbol -> analyzeExportableTypeRef(funcNode.symbol, symbol.type.tsymbol,
true,
funcNode.pos));
if (funcNode.symbol.restParam != null) {
analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.restParam.type.tsymbol, true,
funcNode.restParam.pos);
}
analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.retType.tsymbol, true,
funcNode.returnTypeNode.pos);
}
this.validateMainFunction(funcNode);
this.validateModuleInitFunction(funcNode);
try {
this.initNewWorkerActionSystem();
this.workerActionSystemStack.peek().startWorkerActionStateMachine(DEFAULT_WORKER_NAME,
funcNode.pos,
funcNode);
this.visitFunction(funcNode);
this.workerActionSystemStack.peek().endWorkerActionStateMachine();
} finally {
this.finalizeCurrentWorkerActionSystem();
}
}
private void visitFunction(BLangFunction funcNode) {
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
this.returnWithintransactionCheckStack.push(true);
this.doneWithintransactionCheckStack.push(true);
this.returnTypes.push(new HashSet<>());
this.resetFunction();
if (Symbols.isNative(funcNode.symbol)) {
return;
}
if (isPublicInvokableNode(funcNode)) {
analyzeNode(funcNode.returnTypeNode, invokableEnv);
}
/* the body can be null in the case of Object type function declarations */
if (funcNode.body != null) {
analyzeNode(funcNode.body, invokableEnv);
boolean isNilableReturn = funcNode.symbol.type.getReturnType().isNullable();
if (!isNilableReturn && !this.statementReturns) {
this.dlog.error(funcNode.pos, DiagnosticCode.INVOKABLE_MUST_RETURN,
funcNode.getKind().toString().toLowerCase());
}
}
this.returnTypes.pop();
this.returnWithintransactionCheckStack.pop();
this.doneWithintransactionCheckStack.pop();
}
private boolean isPublicInvokableNode(BLangInvokableNode invNode) {
return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) ||
Symbols.isPublic(invNode.symbol.owner));
}
@Override
public void visit(BLangForkJoin forkJoin) {
/* ignore */
}
@Override
public void visit(BLangWorker worker) {
/* ignore, remove later */
}
@Override
public void visit(BLangEndpoint endpointNode) {
}
@Override
public void visit(BLangTransaction transactionNode) {
checkExperimentalFeatureValidity(ExperimentalFeatures.TRANSACTIONS, transactionNode.pos);
this.checkStatementExecutionValidity(transactionNode);
if (!isValidTransactionBlock()) {
this.dlog.error(transactionNode.pos, DiagnosticCode.TRANSACTION_CANNOT_BE_USED_WITHIN_HANDLER);
return;
}
this.loopWithintransactionCheckStack.push(false);
this.returnWithintransactionCheckStack.push(false);
this.doneWithintransactionCheckStack.push(false);
this.transactionCount++;
if (this.transactionCount > 1) {
this.dlog.error(transactionNode.pos, DiagnosticCode.NESTED_TRANSACTIONS_ARE_INVALID);
}
analyzeNode(transactionNode.transactionBody, env);
this.transactionCount--;
this.resetLastStatement();
if (transactionNode.onRetryBody != null) {
this.withinRetryBlock = true;
analyzeNode(transactionNode.onRetryBody, env);
this.resetStatementReturns();
this.resetLastStatement();
this.withinRetryBlock = false;
}
if (transactionNode.abortedBody != null) {
this.withinAbortedBlock = true;
analyzeNode(transactionNode.abortedBody, env);
this.resetStatementReturns();
this.resetLastStatement();
this.withinAbortedBlock = false;
}
if (transactionNode.committedBody != null) {
this.withinCommittedBlock = true;
analyzeNode(transactionNode.committedBody, env);
this.resetStatementReturns();
this.resetLastStatement();
this.withinCommittedBlock = false;
}
this.returnWithintransactionCheckStack.pop();
this.loopWithintransactionCheckStack.pop();
this.doneWithintransactionCheckStack.pop();
analyzeExpr(transactionNode.retryCount);
}
@Override
public void visit(BLangAbort abortNode) {
if (this.transactionCount == 0) {
this.dlog.error(abortNode.pos, DiagnosticCode.ABORT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
this.lastStatement = true;
}
@Override
public void visit(BLangRetry retryNode) {
if (this.transactionCount == 0) {
this.dlog.error(retryNode.pos, DiagnosticCode.RETRY_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
this.lastStatement = true;
}
private void checkUnreachableCode(BLangStatement stmt) {
if (this.statementReturns) {
this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE);
this.resetStatementReturns();
}
if (lastStatement) {
this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE);
this.resetLastStatement();
}
}
private void checkStatementExecutionValidity(BLangStatement stmt) {
this.checkUnreachableCode(stmt);
}
@Override
public void visit(BLangBlockStmt blockNode) {
final SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env);
blockNode.stmts.forEach(e -> analyzeNode(e, blockEnv));
this.resetLastStatement();
}
@Override
public void visit(BLangReturn returnStmt) {
this.checkStatementExecutionValidity(returnStmt);
if (checkReturnValidityInTransaction()) {
this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.statementReturns = true;
analyzeExpr(returnStmt.expr);
this.returnTypes.peek().add(returnStmt.expr.type);
}
@Override
public void visit(BLangIf ifStmt) {
this.checkStatementExecutionValidity(ifStmt);
analyzeNode(ifStmt.body, env);
boolean ifStmtReturns = this.statementReturns;
this.resetStatementReturns();
if (ifStmt.elseStmt != null) {
analyzeNode(ifStmt.elseStmt, env);
this.statementReturns = ifStmtReturns && this.statementReturns;
}
analyzeExpr(ifStmt.expr);
}
@Override
public void visit(BLangMatch matchStmt) {
analyzeExpr(matchStmt.expr);
boolean staticLastPattern = false;
if (!matchStmt.getStaticPatternClauses().isEmpty()) {
staticLastPattern = analyzeStaticMatchPatterns(matchStmt);
}
boolean structuredLastPattern = false;
if (!matchStmt.getStructuredPatternClauses().isEmpty()) {
structuredLastPattern = analyzeStructuredMatchPatterns(matchStmt);
}
if (!matchStmt.getPatternClauses().isEmpty()) {
analyzeEmptyMatchPatterns(matchStmt);
analyzeMatchedPatterns(matchStmt, staticLastPattern, structuredLastPattern);
}
}
private void analyzeMatchedPatterns(BLangMatch matchStmt, boolean staticLastPattern,
boolean structuredLastPattern) {
if (staticLastPattern && structuredLastPattern) {
dlog.error(matchStmt.pos, DiagnosticCode.MATCH_STMT_CONTAINS_TWO_DEFAULT_PATTERNS);
}
if ((staticLastPattern && !hasErrorType(matchStmt.exprTypes)) || structuredLastPattern) {
if (matchStmt.getPatternClauses().size() == 1) {
dlog.error(matchStmt.getPatternClauses().get(0).pos, DiagnosticCode.MATCH_STMT_PATTERN_ALWAYS_MATCHES);
}
this.checkStatementExecutionValidity(matchStmt);
boolean matchStmtReturns = true;
for (BLangMatchBindingPatternClause patternClause : matchStmt.getPatternClauses()) {
analyzeNode(patternClause.body, env);
matchStmtReturns = matchStmtReturns && this.statementReturns;
this.resetStatementReturns();
}
this.statementReturns = matchStmtReturns;
}
}
private boolean hasErrorType(List<BType> typeList) {
return typeList.stream().anyMatch(t -> types.isAssignable(t, symTable.errorType));
}
private boolean analyzeStructuredMatchPatterns(BLangMatch matchStmt) {
if (matchStmt.exprTypes.isEmpty()) {
return false;
}
return analyseStructuredBindingPatterns(matchStmt.getStructuredPatternClauses(),
hasErrorType(matchStmt.exprTypes));
}
/**
* This method is used to check structured `var []`, `var {}` & static `[]`, `{}` match pattern.
*
* @param matchStmt the match statement containing structured & static match patterns.
*/
private void analyzeEmptyMatchPatterns(BLangMatch matchStmt) {
List<BLangMatchBindingPatternClause> emptyLists = new ArrayList<>();
List<BLangMatchBindingPatternClause> emptyRecords = new ArrayList<>();
for (BLangMatchBindingPatternClause pattern : matchStmt.patternClauses) {
if (pattern.getKind() == NodeKind.MATCH_STATIC_PATTERN_CLAUSE) {
BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) pattern;
if (staticPattern.literal.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) {
BLangListConstructorExpr listLiteral = (BLangListConstructorExpr) staticPattern.literal;
if (listLiteral.exprs.isEmpty()) {
emptyLists.add(pattern);
}
} else if (staticPattern.literal.getKind() == NodeKind.RECORD_LITERAL_EXPR) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) staticPattern.literal;
if (recordLiteral.keyValuePairs.isEmpty()) {
emptyRecords.add(pattern);
}
}
} else if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause structuredPattern
= (BLangMatchStructuredBindingPatternClause) pattern;
if (structuredPattern.bindingPatternVariable.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) structuredPattern.bindingPatternVariable;
if (tupleVariable.memberVariables.isEmpty() && tupleVariable.restVariable == null) {
emptyLists.add(pattern);
}
} else if (structuredPattern.bindingPatternVariable.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariable recordVariable = (BLangRecordVariable) structuredPattern.bindingPatternVariable;
if (recordVariable.variableList.isEmpty() && recordVariable.restParam == null) {
emptyRecords.add(pattern);
}
}
}
}
if (emptyLists.size() > 1) {
for (int i = 1; i < emptyLists.size(); i++) {
dlog.error(emptyLists.get(i).pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
}
}
if (emptyRecords.size() > 1) {
for (int i = 1; i < emptyRecords.size(); i++) {
dlog.error(emptyRecords.get(i).pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
}
}
}
/**
* This method is used to check the isLike test in a static match pattern.
* @param matchStmt the match statment containing static match patterns.
*/
private boolean analyzeStaticMatchPatterns(BLangMatch matchStmt) {
if (matchStmt.exprTypes.isEmpty()) {
return false;
}
List<BLangMatchStaticBindingPatternClause> matchedPatterns = new ArrayList<>();
for (BLangMatchStaticBindingPatternClause pattern : matchStmt.getStaticPatternClauses()) {
List<BType> matchedExpTypes = matchStmt.exprTypes
.stream()
.filter(exprType -> isValidStaticMatchPattern(exprType, pattern.literal))
.collect(Collectors.toList());
if (matchedExpTypes.isEmpty()) {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN);
continue;
}
this.isJSONContext = types.isJSONContext(matchStmt.expr.type);
analyzeNode(pattern.literal, env);
matchedPatterns.add(pattern);
}
if (matchedPatterns.isEmpty()) {
return false;
}
return analyzeStaticPatterns(matchedPatterns, hasErrorType(matchStmt.exprTypes));
}
private boolean analyzeStaticPatterns(List<BLangMatchStaticBindingPatternClause> matchedPatterns,
boolean errorTypeInMatchExpr) {
BLangMatchStaticBindingPatternClause finalPattern = matchedPatterns.get(matchedPatterns.size() - 1);
if (finalPattern.literal.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) finalPattern.literal).variableName.value.equals(Names.IGNORE.value)
&& !errorTypeInMatchExpr) {
finalPattern.isLastPattern = true;
}
for (int i = 0; i < matchedPatterns.size() - 1; i++) {
BLangExpression precedingPattern = matchedPatterns.get(i).literal;
for (int j = i + 1; j < matchedPatterns.size(); j++) {
BLangExpression pattern = matchedPatterns.get(j).literal;
if (checkLiteralSimilarity(precedingPattern, pattern)) {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
matchedPatterns.remove(j--);
}
}
}
return finalPattern.isLastPattern;
}
private boolean analyseStructuredBindingPatterns(List<BLangMatchStructuredBindingPatternClause> clauses,
boolean errorTypeInMatchExpr) {
BLangMatchStructuredBindingPatternClause finalPattern = clauses.get(clauses.size() - 1);
if (finalPattern.bindingPatternVariable.getKind() == NodeKind.VARIABLE
&& finalPattern.typeGuardExpr == null
&& !(errorTypeInMatchExpr && isWildcardMatchPattern(finalPattern))) {
finalPattern.isLastPattern = true;
}
BLangMatchStructuredBindingPatternClause currentPattern;
BLangMatchStructuredBindingPatternClause precedingPattern;
for (int i = 0; i < clauses.size(); i++) {
precedingPattern = clauses.get(i);
if (precedingPattern.typeGuardExpr != null) {
analyzeExpr(precedingPattern.typeGuardExpr);
}
for (int j = i + 1; j < clauses.size(); j++) {
currentPattern = clauses.get(j);
BLangVariable precedingVar = precedingPattern.bindingPatternVariable;
BLangVariable currentVar = currentPattern.bindingPatternVariable;
if (checkStructuredPatternSimilarity(precedingVar, currentVar, errorTypeInMatchExpr) &&
checkTypeGuardSimilarity(precedingPattern.typeGuardExpr, currentPattern.typeGuardExpr)) {
dlog.error(currentVar.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
clauses.remove(j--);
}
}
}
return finalPattern.isLastPattern;
}
private boolean isWildcardMatchPattern(BLangMatchStructuredBindingPatternClause finalPattern) {
return ((BLangSimpleVariable) finalPattern.bindingPatternVariable).name.value.equals(Names.IGNORE.value);
}
/**
* This method will check if two patterns are similar to each other.
* Having similar patterns in the match block will result in unreachable pattern.
*
* @param precedingPattern pattern taken to compare similarity.
* @param pattern the pattern that the precedingPattern is checked for similarity.
* @return true if both patterns are similar.
*/
private boolean checkLiteralSimilarity(BLangExpression precedingPattern, BLangExpression pattern) {
if (precedingPattern.getKind() == NodeKind.BINARY_EXPR) {
BLangBinaryExpr precedingBinaryExpr = (BLangBinaryExpr) precedingPattern;
BLangExpression precedingLhsExpr = precedingBinaryExpr.lhsExpr;
BLangExpression precedingRhsExpr = precedingBinaryExpr.rhsExpr;
return checkLiteralSimilarity(precedingLhsExpr, pattern) ||
checkLiteralSimilarity(precedingRhsExpr, pattern);
}
if (pattern.getKind() == NodeKind.BINARY_EXPR) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) pattern;
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
return checkLiteralSimilarity(precedingPattern, lhsExpr) ||
checkLiteralSimilarity(precedingPattern, rhsExpr);
}
switch (precedingPattern.type.tag) {
case TypeTags.MAP:
if (pattern.type.tag == TypeTags.MAP) {
BLangRecordLiteral precedingRecordLiteral = (BLangRecordLiteral) precedingPattern;
Map<String, BLangExpression> recordLiteral = ((BLangRecordLiteral) pattern).keyValuePairs
.stream()
.collect(Collectors.toMap(
keyValuePair -> ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value,
BLangRecordKeyValue::getValue
));
for (int i = 0; i < precedingRecordLiteral.keyValuePairs.size(); i++) {
BLangRecordKeyValue bLangRecordKeyValue = precedingRecordLiteral.keyValuePairs.get(i);
String key = ((BLangSimpleVarRef) bLangRecordKeyValue.key.expr).variableName.value;
if (!recordLiteral.containsKey(key)) {
return false;
}
if (!checkLiteralSimilarity(bLangRecordKeyValue.valueExpr, recordLiteral.get(key))) {
return false;
}
}
return true;
}
return false;
case TypeTags.TUPLE:
if (pattern.type.tag == TypeTags.TUPLE) {
BLangListConstructorExpr precedingTupleLiteral = (BLangListConstructorExpr) precedingPattern;
BLangListConstructorExpr tupleLiteral = (BLangListConstructorExpr) pattern;
if (precedingTupleLiteral.exprs.size() != tupleLiteral.exprs.size()) {
return false;
}
return IntStream.range(0, precedingTupleLiteral.exprs.size())
.allMatch(i -> checkLiteralSimilarity(precedingTupleLiteral.exprs.get(i),
tupleLiteral.exprs.get(i)));
}
return false;
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
if (precedingPattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BConstantSymbol precedingPatternSym =
(BConstantSymbol) ((BLangSimpleVarRef) precedingPattern).symbol;
if (pattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
if (!((BLangSimpleVarRef) pattern).variableName.value.equals(Names.IGNORE.value)) {
BConstantSymbol patternSym = (BConstantSymbol) ((BLangSimpleVarRef) pattern).symbol;
return precedingPatternSym.value.equals(patternSym.value);
}
return false;
}
BLangLiteral literal = pattern.getKind() == NodeKind.GROUP_EXPR ?
(BLangLiteral) ((BLangGroupExpr) pattern).expression :
(BLangLiteral) pattern;
return (precedingPatternSym.value.equals(literal.value));
}
if (types.isValueType(pattern.type)) {
BLangLiteral precedingLiteral = precedingPattern.getKind() == NodeKind.GROUP_EXPR ?
(BLangLiteral) ((BLangGroupExpr) precedingPattern).expression :
(BLangLiteral) precedingPattern;
if (pattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
if (pattern.type.tag != TypeTags.NONE) {
BConstantSymbol patternSym = (BConstantSymbol) ((BLangSimpleVarRef) pattern).symbol;
return patternSym.value.equals(precedingLiteral.value);
}
return false;
}
BLangLiteral literal = pattern.getKind() == NodeKind.GROUP_EXPR ?
(BLangLiteral) ((BLangGroupExpr) pattern).expression :
(BLangLiteral) pattern;
return (precedingLiteral.value.equals(literal.value));
}
return false;
case TypeTags.ANY:
if (pattern.type.tag == TypeTags.ERROR) {
return false;
}
return true;
default:
return false;
}
}
/**
* This method will determine if the type guard of the preceding pattern will result in the current pattern
* being unreachable.
*
* @param precedingGuard type guard of the preceding structured pattern
* @param currentGuard type guard of the cuurent structured pattern
* @return true if the current pattern is unreachable due to the type guard of the preceding pattern
*/
private boolean checkTypeGuardSimilarity(BLangExpression precedingGuard, BLangExpression currentGuard) {
if (precedingGuard != null && currentGuard != null) {
if (precedingGuard.getKind() == NodeKind.TYPE_TEST_EXPR &&
currentGuard.getKind() == NodeKind.TYPE_TEST_EXPR &&
((BLangTypeTestExpr) precedingGuard).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
((BLangTypeTestExpr) currentGuard).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangTypeTestExpr precedingTypeTest = (BLangTypeTestExpr) precedingGuard;
BLangTypeTestExpr currentTypeTest = (BLangTypeTestExpr) currentGuard;
return ((BLangSimpleVarRef) precedingTypeTest.expr).variableName.toString().equals(
((BLangSimpleVarRef) currentTypeTest.expr).variableName.toString()) &&
precedingTypeTest.typeNode.type.tag == currentTypeTest.typeNode.type.tag;
}
return false;
}
return currentGuard != null || precedingGuard == null;
}
/**
* This method will determine if the current structured pattern will be unreachable due to a preceding pattern.
*
* @param precedingVar the structured pattern that appears on top
* @param var the structured pattern that appears after the precedingVar
* @param errorTypeInMatchExpr
* @return true if the the current pattern is unreachable due to the preceding pattern
*/
private boolean checkStructuredPatternSimilarity(BLangVariable precedingVar,
BLangVariable var,
boolean errorTypeInMatchExpr) {
if (precedingVar.type.tag == TypeTags.SEMANTIC_ERROR || var.type.tag == TypeTags.SEMANTIC_ERROR) {
return false;
}
if (precedingVar.getKind() == NodeKind.RECORD_VARIABLE && var.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariable precedingRecVar = (BLangRecordVariable) precedingVar;
BLangRecordVariable recVar = (BLangRecordVariable) var;
Map<String, BLangVariable> recVarAsMap = recVar.variableList.stream()
.collect(Collectors.toMap(
keyValue -> keyValue.key.value,
keyValue -> keyValue.valueBindingPattern
));
if (precedingRecVar.variableList.size() > recVar.variableList.size()) {
return false;
}
for (int i = 0; i < precedingRecVar.variableList.size(); i++) {
BLangRecordVariableKeyValue precedingKeyValue = precedingRecVar.variableList.get(i);
if (!recVarAsMap.containsKey(precedingKeyValue.key.value)) {
return false;
}
if (!checkStructuredPatternSimilarity(
precedingKeyValue.valueBindingPattern,
recVarAsMap.get(precedingKeyValue.key.value),
errorTypeInMatchExpr)) {
return false;
}
}
if (precedingRecVar.hasRestParam() && recVar.hasRestParam()) {
return true;
}
return precedingRecVar.hasRestParam() || !recVar.hasRestParam();
}
if (precedingVar.getKind() == NodeKind.TUPLE_VARIABLE && var.getKind() == NodeKind.TUPLE_VARIABLE) {
List<BLangVariable> precedingMemberVars = ((BLangTupleVariable) precedingVar).memberVariables;
BLangVariable precedingRestVar = ((BLangTupleVariable) precedingVar).restVariable;
List<BLangVariable> memberVars = ((BLangTupleVariable) var).memberVariables;
BLangVariable memberRestVar = ((BLangTupleVariable) var).restVariable;
if (precedingRestVar != null && memberRestVar != null) {
return true;
}
if (precedingRestVar == null && memberRestVar == null
&& precedingMemberVars.size() != memberVars.size()) {
return false;
}
if (precedingRestVar != null && precedingMemberVars.size() > memberVars.size()) {
return false;
}
if (memberRestVar != null) {
return false;
}
for (int i = 0; i < memberVars.size(); i++) {
if (!checkStructuredPatternSimilarity(precedingMemberVars.get(i), memberVars.get(i),
errorTypeInMatchExpr)) {
return false;
}
}
return true;
}
if (precedingVar.getKind() == NodeKind.ERROR_VARIABLE && var.getKind() == NodeKind.ERROR_VARIABLE) {
BLangErrorVariable precedingErrVar = (BLangErrorVariable) precedingVar;
BLangErrorVariable errVar = (BLangErrorVariable) var;
if (precedingErrVar.restDetail != null) {
return true;
}
if (errVar.restDetail != null) {
return false;
}
if (precedingErrVar.detail != null && errVar.detail != null) {
Map<String, BLangVariable> preDetails = precedingErrVar.detail.stream()
.collect(Collectors.toMap(entry -> entry.key.value, entry -> entry.valueBindingPattern));
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : errVar.detail) {
BLangVariable correspondingCurDetail = preDetails.get(detailEntry.key.value);
if (correspondingCurDetail == null) {
return false;
}
boolean similar =
checkStructuredPatternSimilarity(detailEntry.valueBindingPattern, correspondingCurDetail,
errorTypeInMatchExpr);
if (!similar) {
return false;
}
}
}
return true;
}
if (precedingVar.getKind() == NodeKind.VARIABLE
&& ((BLangSimpleVariable) precedingVar).name.value.equals(Names.IGNORE.value)
&& var.getKind() == NodeKind.ERROR_VARIABLE) {
return false;
}
return precedingVar.getKind() == NodeKind.VARIABLE;
}
/**
* This method will check if the static match pattern is valid based on the matching type.
*
* @param matchType type of the expression being matched.
* @param literal the static match pattern.
* @return true if the pattern is valid, else false.
*/
private boolean isValidStaticMatchPattern(BType matchType, BLangExpression literal) {
if (literal.type.tag == TypeTags.NONE) {
return true;
}
if (types.isSameType(literal.type, matchType)) {
return true;
}
if (TypeTags.ANY == literal.type.tag) {
return true;
}
switch (matchType.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
return true;
case TypeTags.UNION:
BUnionType unionMatchType = (BUnionType) matchType;
return unionMatchType.getMemberTypes()
.stream()
.anyMatch(memberMatchType -> isValidStaticMatchPattern(memberMatchType, literal));
case TypeTags.TUPLE:
if (literal.type.tag == TypeTags.TUPLE) {
BLangListConstructorExpr tupleLiteral = (BLangListConstructorExpr) literal;
BTupleType literalTupleType = (BTupleType) literal.type;
BTupleType matchTupleType = (BTupleType) matchType;
if (literalTupleType.tupleTypes.size() != matchTupleType.tupleTypes.size()) {
return false;
}
return IntStream.range(0, literalTupleType.tupleTypes.size())
.allMatch(i ->
isValidStaticMatchPattern(matchTupleType.tupleTypes.get(i),
tupleLiteral.exprs.get(i)));
}
break;
case TypeTags.MAP:
if (literal.type.tag == TypeTags.MAP) {
BLangRecordLiteral mapLiteral = (BLangRecordLiteral) literal;
return IntStream.range(0, mapLiteral.keyValuePairs.size())
.allMatch(i -> isValidStaticMatchPattern(((BMapType) matchType).constraint,
mapLiteral.keyValuePairs.get(i).valueExpr));
}
break;
case TypeTags.RECORD:
if (literal.type.tag == TypeTags.MAP) {
BLangRecordLiteral mapLiteral = (BLangRecordLiteral) literal;
BRecordType recordMatchType = (BRecordType) matchType;
Map<String, BType> recordFields = recordMatchType.fields
.stream()
.collect(Collectors.toMap(
field -> field.getName().getValue(),
BField::getType
));
for (BLangRecordKeyValue literalKeyValue : mapLiteral.keyValuePairs) {
String literalKeyName;
NodeKind nodeKind = literalKeyValue.key.expr.getKind();
if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) {
literalKeyName = ((BLangSimpleVarRef) literalKeyValue.key.expr).variableName.value;
} else if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
literalKeyName = ((BLangLiteral) literalKeyValue.key.expr).value.toString();
} else {
return false;
}
if (recordFields.containsKey(literalKeyName)) {
if (!isValidStaticMatchPattern(
recordFields.get(literalKeyName), literalKeyValue.valueExpr)) {
return false;
}
} else if (recordMatchType.sealed ||
!isValidStaticMatchPattern(recordMatchType.restFieldType, literalKeyValue.valueExpr)) {
return false;
}
}
return true;
}
break;
case TypeTags.BYTE:
if (literal.type.tag == TypeTags.INT) {
return true;
}
break;
case TypeTags.FINITE:
if (literal.getKind() == NodeKind.LITERAL || literal.getKind() == NodeKind.NUMERIC_LITERAL) {
return types.isAssignableToFiniteType(matchType, (BLangLiteral) literal);
}
if (literal.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
((BLangSimpleVarRef) literal).symbol.getKind() == SymbolKind.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) ((BLangSimpleVarRef) literal).symbol;
return types.isAssignableToFiniteType(matchType,
(BLangLiteral) ((BFiniteType) constSymbol.type).valueSpace.iterator().next());
}
break;
}
return false;
}
@Override
public void visit(BLangForeach foreach) {
this.loopWithintransactionCheckStack.push(true);
this.checkStatementExecutionValidity(foreach);
this.loopCount++;
analyzeNode(foreach.body, env);
this.loopCount--;
this.resetLastStatement();
this.loopWithintransactionCheckStack.pop();
analyzeExpr(foreach.collection);
}
@Override
public void visit(BLangWhile whileNode) {
this.loopWithintransactionCheckStack.push(true);
this.checkStatementExecutionValidity(whileNode);
this.loopCount++;
analyzeNode(whileNode.body, env);
this.loopCount--;
this.resetLastStatement();
this.loopWithintransactionCheckStack.pop();
analyzeExpr(whileNode.expr);
}
@Override
public void visit(BLangLock lockNode) {
checkExperimentalFeatureValidity(ExperimentalFeatures.LOCK, lockNode.pos);
this.checkStatementExecutionValidity(lockNode);
lockNode.body.stmts.forEach(e -> analyzeNode(e, env));
}
@Override
public void visit(BLangContinue continueNode) {
this.checkStatementExecutionValidity(continueNode);
if (this.loopCount == 0) {
this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction()) {
this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
if (pkgEnv == null) {
return;
}
analyzeNode(pkgEnv.node, env);
}
public void visit(BLangXMLNS xmlnsNode) {
/* ignore */
}
public void visit(BLangService serviceNode) {
}
public void visit(BLangResource resourceNode) {
throw new RuntimeException("Deprecated lang feature");
}
public void visit(BLangForever foreverStatement) {
checkExperimentalFeatureValidity(ExperimentalFeatures.STREAMING_QUERIES, foreverStatement.pos);
this.checkStatementExecutionValidity(foreverStatement);
this.lastStatement = true;
}
private void analyzeExportableTypeRef(BSymbol owner, BTypeSymbol symbol, boolean inFuncSignature,
DiagnosticPos pos) {
if (!inFuncSignature && Symbols.isFlagOn(owner.flags, Flags.ANONYMOUS)) {
return;
}
if (Symbols.isPublic(owner)) {
checkForExportableType(symbol, pos);
}
}
public void visit(BLangSimpleVariable varNode) {
analyzeTypeNode(varNode.typeNode, this.env);
analyzeExpr(varNode.expr);
if (Objects.isNull(varNode.symbol)) {
return;
}
if (!Symbols.isPublic(varNode.symbol)) {
return;
}
int ownerSymTag = this.env.scope.owner.tag;
if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD || (ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) {
analyzeExportableTypeRef(this.env.scope.owner, varNode.type.tsymbol, false, varNode.pos);
} else if ((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
analyzeExportableTypeRef(varNode.symbol, varNode.type.tsymbol, false, varNode.pos);
}
}
@Override
public void visit(BLangTupleVariable bLangTupleVariable) {
if (bLangTupleVariable.typeNode != null) {
analyzeNode(bLangTupleVariable.typeNode, this.env);
}
analyzeExpr(bLangTupleVariable.expr);
}
@Override
public void visit(BLangRecordVariable bLangRecordVariable) {
if (bLangRecordVariable.typeNode != null) {
analyzeNode(bLangRecordVariable.typeNode, this.env);
}
analyzeExpr(bLangRecordVariable.expr);
}
@Override
public void visit(BLangErrorVariable bLangErrorVariable) {
if (bLangErrorVariable.typeNode != null) {
analyzeNode(bLangErrorVariable.typeNode, this.env);
}
analyzeExpr(bLangErrorVariable.expr);
}
private BType getNilableType(BType type) {
if (type.isNullable()) {
return type;
}
BUnionType unionType = BUnionType.create(null);
if (type.tag == TypeTags.UNION) {
LinkedHashSet<BType> memTypes = new LinkedHashSet<>(((BUnionType) type).getMemberTypes());
unionType.addAll(memTypes);
}
unionType.add(type);
unionType.add(symTable.nilType);
return unionType;
}
public void visit(BLangIdentifier identifierNode) {
/* ignore */
}
public void visit(BLangAnnotation annotationNode) {
/* ignore */
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
/* ignore */
}
public void visit(BLangSimpleVariableDef varDefNode) {
this.checkStatementExecutionValidity(varDefNode);
analyzeNode(varDefNode.var, env);
}
public void visit(BLangCompoundAssignment compoundAssignment) {
this.checkStatementExecutionValidity(compoundAssignment);
analyzeExpr(compoundAssignment.varRef);
analyzeExpr(compoundAssignment.expr);
}
public void visit(BLangAssignment assignNode) {
this.checkStatementExecutionValidity(assignNode);
analyzeExpr(assignNode.varRef);
analyzeExpr(assignNode.expr);
}
public void visit(BLangRecordDestructure stmt) {
this.checkDuplicateVarRefs(getVarRefs(stmt.varRef));
this.checkStatementExecutionValidity(stmt);
analyzeExpr(stmt.varRef);
analyzeExpr(stmt.expr);
}
public void visit(BLangErrorDestructure stmt) {
this.checkDuplicateVarRefs(getVarRefs(stmt.varRef));
this.checkStatementExecutionValidity(stmt);
analyzeExpr(stmt.varRef);
analyzeExpr(stmt.expr);
}
@Override
public void visit(BLangTupleDestructure stmt) {
this.checkDuplicateVarRefs(getVarRefs(stmt.varRef));
this.checkStatementExecutionValidity(stmt);
analyzeExpr(stmt.varRef);
analyzeExpr(stmt.expr);
}
private void checkDuplicateVarRefs(List<BLangExpression> varRefs) {
checkDuplicateVarRefs(varRefs, new HashSet<>());
}
private void checkDuplicateVarRefs(List<BLangExpression> varRefs, Set<BSymbol> symbols) {
for (BLangExpression varRef : varRefs) {
if (varRef == null || (varRef.getKind() != NodeKind.SIMPLE_VARIABLE_REF
&& varRef.getKind() != NodeKind.RECORD_VARIABLE_REF
&& varRef.getKind() != NodeKind.ERROR_VARIABLE_REF
&& varRef.getKind() != NodeKind.TUPLE_VARIABLE_REF)) {
continue;
}
if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& names.fromIdNode(((BLangSimpleVarRef) varRef).variableName) == Names.IGNORE) {
continue;
}
if (varRef.getKind() == NodeKind.TUPLE_VARIABLE_REF) {
checkDuplicateVarRefs(getVarRefs((BLangTupleVarRef) varRef), symbols);
}
if (varRef.getKind() == NodeKind.RECORD_VARIABLE_REF) {
checkDuplicateVarRefs(getVarRefs((BLangRecordVarRef) varRef), symbols);
}
if (varRef.getKind() == NodeKind.ERROR_VARIABLE_REF) {
checkDuplicateVarRefs(getVarRefs((BLangErrorVarRef) varRef), symbols);
}
BLangVariableReference varRefExpr = (BLangVariableReference) varRef;
if (varRefExpr.symbol != null && !symbols.add(varRefExpr.symbol)) {
this.dlog.error(varRef.pos, DiagnosticCode.DUPLICATE_VARIABLE_IN_BINDING_PATTERN,
varRefExpr.symbol);
}
}
}
private List<BLangExpression> getVarRefs(BLangRecordVarRef varRef) {
List<BLangExpression> varRefs = varRef.recordRefFields.stream()
.map(e -> e.variableReference).collect(Collectors.toList());
varRefs.add((BLangExpression) varRef.restParam);
return varRefs;
}
private List<BLangExpression> getVarRefs(BLangErrorVarRef varRef) {
List<BLangExpression> varRefs = new ArrayList<>();
varRefs.add(varRef.reason);
varRefs.addAll(varRef.detail.stream().map(e -> e.expr).collect(Collectors.toList()));
varRefs.add(varRef.restVar);
return varRefs;
}
private List<BLangExpression> getVarRefs(BLangTupleVarRef varRef) {
List<BLangExpression> varRefs = new ArrayList<>(varRef.expressions);
varRefs.add((BLangExpression) varRef.restParam);
return varRefs;
}
public void visit(BLangBreak breakNode) {
this.checkStatementExecutionValidity(breakNode);
if (this.loopCount == 0) {
this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction()) {
this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
public void visit(BLangThrow throwNode) {
/* ignore */
}
public void visit(BLangPanic panicNode) {
this.checkStatementExecutionValidity(panicNode);
this.statementReturns = true;
analyzeExpr(panicNode.expr);
}
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
this.checkStatementExecutionValidity(xmlnsStmtNode);
}
public void visit(BLangExpressionStmt exprStmtNode) {
this.checkStatementExecutionValidity(exprStmtNode);
analyzeExpr(exprStmtNode.expr);
validateExprStatementExpression(exprStmtNode);
}
private void validateExprStatementExpression(BLangExpressionStmt exprStmtNode) {
BLangExpression expr = exprStmtNode.expr;
while (expr.getKind() == NodeKind.MATCH_EXPRESSION ||
expr.getKind() == NodeKind.CHECK_EXPR ||
expr.getKind() == NodeKind.CHECK_PANIC_EXPR) {
if (expr.getKind() == NodeKind.MATCH_EXPRESSION) {
expr = ((BLangMatchExpression) expr).expr;
} else if (expr.getKind() == NodeKind.CHECK_EXPR) {
expr = ((BLangCheckedExpr) expr).expr;
} else if (expr.getKind() == NodeKind.CHECK_PANIC_EXPR) {
expr = ((BLangCheckPanickedExpr) expr).expr;
}
}
if (expr.getKind() == NodeKind.INVOCATION || expr.getKind() == NodeKind.WAIT_EXPR) {
return;
}
if (expr.type == symTable.nilType) {
dlog.error(exprStmtNode.pos, DiagnosticCode.INVALID_EXPR_STATEMENT);
}
}
public void visit(BLangTryCatchFinally tryNode) {
/* ignore */
}
public void visit(BLangCatch catchNode) {
/* ignore */
}
private boolean isTopLevel() {
SymbolEnv env = this.env;
return env.enclInvokable.body == env.node;
}
private boolean isInWorker() {
return env.enclInvokable.flagSet.contains(Flag.WORKER);
}
private boolean isCommunicationAllowedLocation(String workerIdentifier) {
return (isDefaultWorkerCommunication(workerIdentifier) && isInWorker()) || isTopLevel();
}
private boolean isDefaultWorkerCommunication(String workerIdentifier) {
return workerIdentifier.equals(DEFAULT_WORKER_NAME);
}
private boolean workerExists(BType type, String workerName) {
if (isDefaultWorkerCommunication(workerName) && isInWorker()) {
return true;
}
if (type == symTable.semanticError) {
return false;
}
return type.tag == TypeTags.FUTURE && ((BFutureType) type).workerDerivative;
}
public void visit(BLangWorkerSend workerSendNode) {
this.checkStatementExecutionValidity(workerSendNode);
if (workerSendNode.isChannel) {
analyzeExpr(workerSendNode.expr);
if (workerSendNode.keyExpr != null) {
analyzeExpr(workerSendNode.keyExpr);
}
return;
}
WorkerActionSystem was = this.workerActionSystemStack.peek();
BType type = workerSendNode.expr.type;
if (type == symTable.semanticError) {
was.hasErrors = true;
} else if (!types.isAnydata(type)) {
this.dlog.error(workerSendNode.pos, DiagnosticCode.INVALID_TYPE_FOR_SEND, type);
}
String workerName = workerSendNode.workerIdentifier.getValue();
boolean allowedLocation = isCommunicationAllowedLocation(workerName);
if (!allowedLocation) {
this.dlog.error(workerSendNode.pos, DiagnosticCode.INVALID_WORKER_SEND_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(workerSendNode.type, workerName)) {
this.dlog.error(workerSendNode.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
workerSendNode.type = createAccumulatedErrorTypeForMatchingRecive(workerSendNode);
was.addWorkerAction(workerSendNode);
analyzeExpr(workerSendNode.expr);
validateActionParentNode(workerSendNode.pos, workerSendNode.expr);
}
private BType createAccumulatedErrorTypeForMatchingRecive(BLangWorkerSend workerSendNode) {
Set<BType> returnTypesUpToNow = this.returnTypes.peek();
LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<BType>() {
{
Comparator.comparing(BType::toString);
}
};
for (BType returnType : returnTypesUpToNow) {
if (returnType.tag == TypeTags.ERROR) {
returnTypeAndSendType.add(returnType);
} else {
this.dlog.error(workerSendNode.pos, DiagnosticCode.WORKER_SEND_AFTER_RETURN);
}
}
returnTypeAndSendType.add(workerSendNode.expr.type);
if (returnTypeAndSendType.size() > 1) {
return BUnionType.create(null, returnTypeAndSendType);
} else {
return workerSendNode.expr.type;
}
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
validateActionParentNode(syncSendExpr.pos, syncSendExpr);
String workerName = syncSendExpr.workerIdentifier.getValue();
WorkerActionSystem was = this.workerActionSystemStack.peek();
boolean allowedLocation = isCommunicationAllowedLocation(workerName);
if (!allowedLocation) {
this.dlog.error(syncSendExpr.pos, DiagnosticCode.INVALID_WORKER_SEND_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(syncSendExpr.workerType, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
was.addWorkerAction(syncSendExpr);
analyzeExpr(syncSendExpr.expr);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
validateActionParentNode(workerReceiveNode.pos, workerReceiveNode);
if (workerReceiveNode.isChannel) {
if (workerReceiveNode.keyExpr != null) {
analyzeExpr(workerReceiveNode.keyExpr);
}
return;
}
WorkerActionSystem was = this.workerActionSystemStack.peek();
String workerName = workerReceiveNode.workerIdentifier.getValue();
boolean allowedLocation = isCommunicationAllowedLocation(workerName);
if (!allowedLocation) {
this.dlog.error(workerReceiveNode.pos, DiagnosticCode.INVALID_WORKER_RECEIVE_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(workerReceiveNode.workerType, workerName)) {
this.dlog.error(workerReceiveNode.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
workerReceiveNode.matchingSendsError = createAccumulatedErrorTypeForMatchingSyncSend(workerReceiveNode);
was.addWorkerAction(workerReceiveNode);
}
public BType createAccumulatedErrorTypeForMatchingSyncSend(BLangWorkerReceive workerReceiveNode) {
Set<BType> returnTypesUpToNow = this.returnTypes.peek();
LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<>();
for (BType returnType : returnTypesUpToNow) {
if (returnType.tag == TypeTags.ERROR) {
returnTypeAndSendType.add(returnType);
} else {
this.dlog.error(workerReceiveNode.pos, DiagnosticCode.WORKER_RECEIVE_AFTER_RETURN);
}
}
returnTypeAndSendType.add(symTable.nilType);
if (returnTypeAndSendType.size() > 1) {
return BUnionType.create(null, returnTypeAndSendType);
} else {
return symTable.nilType;
}
}
public void visit(BLangLiteral literalExpr) {
if (literalExpr.type.tag == TypeTags.NIL &&
NULL_LITERAL.equals(literalExpr.originalValue) &&
!literalExpr.isJSONContext && !this.isJSONContext) {
dlog.error(literalExpr.pos, DiagnosticCode.INVALID_USE_OF_NULL_LITERAL);
}
}
public void visit(BLangListConstructorExpr listConstructorExpr) {
analyzeExprs(listConstructorExpr.exprs);
}
public void visit(BLangRecordLiteral recordLiteral) {
List<BLangRecordKeyValue> keyValuePairs = recordLiteral.keyValuePairs;
keyValuePairs.forEach(kv -> analyzeExpr(kv.valueExpr));
Set<Object> names = new HashSet<>();
BType type = recordLiteral.type;
boolean isOpenRecord = type != null && type.tag == TypeTags.RECORD && !((BRecordType) type).sealed;
for (BLangRecordKeyValue recFieldDecl : keyValuePairs) {
BLangExpression key = recFieldDecl.getKey();
if (recFieldDecl.key.computedKey) {
analyzeExpr(key);
continue;
}
if (key.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef keyRef = (BLangSimpleVarRef) key;
String fieldName = keyRef.variableName.value;
if (names.contains(fieldName)) {
String assigneeType = recordLiteral.parent.type.getKind().typeName();
this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, assigneeType, keyRef);
}
if (isOpenRecord && ((BRecordType) type).fields.stream()
.noneMatch(field -> fieldName.equals(field.name.value))) {
dlog.error(key.pos, DiagnosticCode.INVALID_RECORD_LITERAL_IDENTIFIER_KEY, fieldName);
}
names.add(fieldName);
} else if (key.getKind() == NodeKind.LITERAL || key.getKind() == NodeKind.NUMERIC_LITERAL) {
BLangLiteral keyLiteral = (BLangLiteral) key;
if (names.contains(keyLiteral.value)) {
String assigneeType = recordLiteral.parent.type.getKind().typeName();
this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, assigneeType, keyLiteral);
}
names.add(keyLiteral.value);
}
}
}
public void visit(BLangTableLiteral tableLiteral) {
/* ignore */
}
public void visit(BLangSimpleVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangRecordVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangErrorVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangTupleVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
/* ignore */
analyzeExpr(fieldAccessExpr.expr);
if (fieldAccessExpr.expr.type.tag == TypeTags.XML) {
checkExperimentalFeatureValidity(ExperimentalFeatures.XML_ACCESS, fieldAccessExpr.pos);
}
}
public void visit(BLangIndexBasedAccess indexAccessExpr) {
analyzeExpr(indexAccessExpr.indexExpr);
analyzeExpr(indexAccessExpr.expr);
if (indexAccessExpr.expr.type.tag == TypeTags.XML) {
checkExperimentalFeatureValidity(ExperimentalFeatures.XML_ACCESS, indexAccessExpr.pos);
}
}
public void visit(BLangInvocation invocationExpr) {
analyzeExpr(invocationExpr.expr);
analyzeExprs(invocationExpr.requiredArgs);
analyzeExprs(invocationExpr.restArgs);
if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) {
BSymbol funcSymbol = invocationExpr.symbol;
if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) {
dlog.warning(invocationExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_FUNCTION,
names.fromIdNode(invocationExpr.name));
}
}
if (invocationExpr.actionInvocation || invocationExpr.async) {
validateActionInvocation(invocationExpr.pos, invocationExpr);
}
}
private void validateActionInvocation(DiagnosticPos pos, BLangInvocation iExpr) {
if (iExpr.expr != null) {
final NodeKind clientNodeKind = iExpr.expr.getKind();
if (clientNodeKind != NodeKind.SIMPLE_VARIABLE_REF && clientNodeKind != NodeKind.FIELD_BASED_ACCESS_EXPR) {
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
} else if (clientNodeKind == NodeKind.FIELD_BASED_ACCESS_EXPR) {
final BLangFieldBasedAccess fieldBasedAccess = (BLangFieldBasedAccess) iExpr.expr;
if (fieldBasedAccess.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
} else {
final BLangSimpleVarRef selfName = (BLangSimpleVarRef) fieldBasedAccess.expr;
if (!Names.SELF.equals(selfName.symbol.name)) {
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
}
}
}
validateActionParentNode(pos, iExpr);
}
/**
* Actions can only occur as part of a statement or nested inside other actions.
*/
private void validateActionParentNode(DiagnosticPos pos, BLangNode node) {
BLangNode parent = node.parent;
if (parent.getKind() == NodeKind.BLOCK) {
return;
}
while (parent != null) {
final NodeKind kind = parent.getKind();
if (kind == NodeKind.ASSIGNMENT
|| kind == NodeKind.EXPRESSION_STATEMENT || kind == NodeKind.RETURN
|| kind == NodeKind.RECORD_DESTRUCTURE || kind == NodeKind.ERROR_DESTRUCTURE
|| kind == NodeKind.TUPLE_DESTRUCTURE || kind == NodeKind.VARIABLE
|| kind == NodeKind.MATCH || kind == NodeKind.FOREACH) {
return;
} else if (kind == NodeKind.CHECK_PANIC_EXPR || kind == NodeKind.CHECK_EXPR
|| kind == NodeKind.WORKER_RECEIVE || kind == NodeKind.WORKER_FLUSH
|| kind == NodeKind.WORKER_SEND || kind == NodeKind.WAIT_EXPR
|| kind == NodeKind.GROUP_EXPR || kind == NodeKind.TRAP_EXPR) {
parent = parent.parent;
if (parent.getKind() == NodeKind.BLOCK) {
return;
}
continue;
} else if (kind == NodeKind.ELVIS_EXPR
&& ((BLangElvisExpr) parent).lhsExpr.getKind() == NodeKind.INVOCATION
&& ((BLangInvocation) ((BLangElvisExpr) parent).lhsExpr).actionInvocation) {
parent = parent.parent;
continue;
}
break;
}
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
public void visit(BLangTypeInit cIExpr) {
analyzeExprs(cIExpr.argsExpr);
analyzeExpr(cIExpr.initInvocation);
}
public void visit(BLangTernaryExpr ternaryExpr) {
analyzeExpr(ternaryExpr.expr);
boolean isJSONCtx = getIsJSONContext(ternaryExpr.type);
this.isJSONContext = isJSONCtx;
analyzeExpr(ternaryExpr.thenExpr);
this.isJSONContext = isJSONCtx;
analyzeExpr(ternaryExpr.elseExpr);
}
public void visit(BLangWaitExpr awaitExpr) {
analyzeExpr(awaitExpr.getExpression());
validateActionParentNode(awaitExpr.pos, awaitExpr);
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
waitForAllExpr.keyValuePairs.forEach(keyValue -> {
BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr;
analyzeExpr(expr);
});
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
BLangIdentifier flushWrkIdentifier = workerFlushExpr.workerIdentifier;
Stack<WorkerActionSystem> workerActionSystems = this.workerActionSystemStack;
WorkerActionSystem currentWrkerAction = workerActionSystems.peek();
List<BLangWorkerSend> sendStmts = getAsyncSendStmtsOfWorker(currentWrkerAction);
if (flushWrkIdentifier != null) {
List<BLangWorkerSend> sendsToGivenWrkr = sendStmts.stream()
.filter(bLangNode -> bLangNode.workerIdentifier.equals
(flushWrkIdentifier))
.collect(Collectors.toList());
if (sendsToGivenWrkr.size() == 0) {
this.dlog.error(workerFlushExpr.pos, DiagnosticCode.INVALID_WORKER_FLUSH_FOR_WORKER, flushWrkIdentifier,
currentWrkerAction.currentWorkerId());
return;
} else {
sendStmts = sendsToGivenWrkr;
}
} else {
if (sendStmts.size() == 0) {
this.dlog.error(workerFlushExpr.pos, DiagnosticCode.INVALID_WORKER_FLUSH,
currentWrkerAction.currentWorkerId());
return;
}
}
workerFlushExpr.cachedWorkerSendStmts = sendStmts;
validateActionParentNode(workerFlushExpr.pos, workerFlushExpr);
}
private List<BLangWorkerSend> getAsyncSendStmtsOfWorker(WorkerActionSystem currentWorkerAction) {
List<BLangNode> actions = currentWorkerAction.workerActionStateMachines.peek().actions;
return actions.stream()
.filter(CodeAnalyzer::isWorkerSend)
.map(bLangNode -> (BLangWorkerSend) bLangNode)
.collect(Collectors.toList());
}
@Override
public void visit(BLangTrapExpr trapExpr) {
analyzeExpr(trapExpr.expr);
}
public void visit(BLangBinaryExpr binaryExpr) {
if (validateBinaryExpr(binaryExpr)) {
boolean isJSONCtx = getIsJSONContext(binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type);
this.isJSONContext = isJSONCtx;
analyzeExpr(binaryExpr.lhsExpr);
this.isJSONContext = isJSONCtx;
analyzeExpr(binaryExpr.rhsExpr);
}
}
private boolean validateBinaryExpr(BLangBinaryExpr binaryExpr) {
if (binaryExpr.lhsExpr.type.tag != TypeTags.FUTURE && binaryExpr.rhsExpr.type.tag != TypeTags.FUTURE) {
return true;
}
BLangNode parentNode = binaryExpr.parent;
if (binaryExpr.lhsExpr.type.tag == TypeTags.FUTURE || binaryExpr.rhsExpr.type.tag == TypeTags.FUTURE) {
if (parentNode == null) {
return false;
}
if (parentNode.getKind() == NodeKind.WAIT_EXPR) {
return true;
}
}
if (parentNode.getKind() != NodeKind.BINARY_EXPR && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
dlog.error(binaryExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED, OperatorKind.BITWISE_OR,
symTable.futureType);
return false;
}
if (parentNode.getKind() == NodeKind.BINARY_EXPR) {
return validateBinaryExpr((BLangBinaryExpr) parentNode);
}
return true;
}
public void visit(BLangElvisExpr elvisExpr) {
analyzeExpr(elvisExpr.lhsExpr);
analyzeExpr(elvisExpr.rhsExpr);
}
@Override
public void visit(BLangGroupExpr groupExpr) {
analyzeExpr(groupExpr.expression);
}
public void visit(BLangUnaryExpr unaryExpr) {
analyzeExpr(unaryExpr.expr);
}
public void visit(BLangTypedescExpr accessExpr) {
/* ignore */
}
public void visit(BLangTypeConversionExpr conversionExpr) {
analyzeExpr(conversionExpr.expr);
}
public void visit(BLangXMLQName xmlQName) {
/* ignore */
}
public void visit(BLangXMLAttribute xmlAttribute) {
analyzeExpr(xmlAttribute.name);
analyzeExpr(xmlAttribute.value);
}
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
analyzeExpr(xmlElementLiteral.startTagName);
analyzeExpr(xmlElementLiteral.endTagName);
analyzeExprs(xmlElementLiteral.attributes);
analyzeExprs(xmlElementLiteral.children);
}
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
analyzeExprs(xmlTextLiteral.textFragments);
}
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
analyzeExprs(xmlCommentLiteral.textFragments);
}
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
analyzeExprs(xmlProcInsLiteral.dataFragments);
analyzeExpr(xmlProcInsLiteral.target);
}
public void visit(BLangXMLQuotedString xmlQuotedString) {
analyzeExprs(xmlQuotedString.textFragments);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
analyzeExprs(stringTemplateLiteral.exprs);
}
public void visit(BLangLambdaFunction bLangLambdaFunction) {
boolean isWorker = false;
if (bLangLambdaFunction.parent.getKind() == NodeKind.VARIABLE) {
String workerVarName = ((BLangSimpleVariable) bLangLambdaFunction.parent).name.value;
if (workerVarName.startsWith(WORKER_LAMBDA_VAR_PREFIX)) {
String workerName = workerVarName.substring(1);
isWorker = true;
this.workerActionSystemStack.peek().startWorkerActionStateMachine(workerName,
bLangLambdaFunction.function.pos,
bLangLambdaFunction.function);
}
}
boolean statementReturn = this.statementReturns;
this.visitFunction(bLangLambdaFunction.function);
this.statementReturns = statementReturn;
if (isWorker) {
this.workerActionSystemStack.peek().endWorkerActionStateMachine();
}
}
public void visit(BLangArrowFunction bLangArrowFunction) {
analyzeExpr(bLangArrowFunction.expression);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
checkExperimentalFeatureValidity(ExperimentalFeatures.XML_ATTRIBUTES_ACCESS, xmlAttributeAccessExpr.pos);
analyzeExpr(xmlAttributeAccessExpr.expr);
analyzeExpr(xmlAttributeAccessExpr.indexExpr);
}
public void visit(BLangIntRangeExpression intRangeExpression) {
analyzeExpr(intRangeExpression.startExpr);
analyzeExpr(intRangeExpression.endExpr);
}
/* Type Nodes */
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
SymbolEnv recordEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env);
if (recordTypeNode.isFieldAnalyseRequired) {
recordTypeNode.fields.forEach(field -> analyzeNode(field, recordEnv));
}
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env);
if (objectTypeNode.isFieldAnalyseRequired) {
objectTypeNode.fields.forEach(field -> analyzeNode(field, objectEnv));
}
Stream.concat(objectTypeNode.functions.stream(),
Optional.ofNullable(objectTypeNode.initFunction).map(Stream::of).orElseGet(Stream::empty))
.sorted(Comparator.comparingInt(fn -> fn.pos.sLine))
.forEachOrdered(fn -> this.analyzeNode(fn, objectEnv));
}
@Override
public void visit(BLangValueType valueType) {
/* ignore */
}
@Override
public void visit(BLangArrayType arrayType) {
analyzeTypeNode(arrayType.elemtype, env);
}
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
/* ignore */
}
public void visit(BLangConstrainedType constrainedType) {
if (constrainedType.type.type.tag == TypeTags.STREAM) {
checkExperimentalFeatureValidity(ExperimentalFeatures.STREAMS, constrainedType.pos);
}
analyzeTypeNode(constrainedType.constraint, env);
}
public void visit(BLangErrorType errorType) {
analyzeTypeNode(errorType.reasonType, env);
analyzeTypeNode(errorType.detailType, env);
}
public void visit(BLangUserDefinedType userDefinedType) {
/* Ignore */
}
public void visit(BLangTupleTypeNode tupleTypeNode) {
tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, env));
analyzeTypeNode(tupleTypeNode.restParamType, env);
}
public void visit(BLangUnionTypeNode unionTypeNode) {
unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, env));
}
public void visit(BLangFunctionTypeNode functionTypeNode) {
functionTypeNode.params.forEach(node -> analyzeNode(node, env));
analyzeTypeNode(functionTypeNode.returnTypeNode, env);
}
@Override
public void visit(BLangFiniteTypeNode finiteTypeNode) {
/* Ignore */
}
@Override
public void visit(BLangTableQueryExpression tableQueryExpression) {
checkExperimentalFeatureValidity(ExperimentalFeatures.TABLE_QUERIES, tableQueryExpression.pos);
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
analyzeExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
analyzeExpr(bLangNamedArgsExpression.expr);
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
analyzeExpr(checkedExpr.expr);
boolean enclInvokableHasErrorReturn = false;
if (this.env.scope.owner.getKind() == SymbolKind.PACKAGE) {
return;
}
BType exprType = env.enclInvokable.getReturnTypeNode().type;
if (exprType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) env.enclInvokable.getReturnTypeNode().type;
enclInvokableHasErrorReturn = unionType.getMemberTypes().stream()
.anyMatch(memberType -> types.isAssignable(memberType, symTable.errorType));
} else if (types.isAssignable(exprType, symTable.errorType)) {
enclInvokableHasErrorReturn = true;
}
if (!enclInvokableHasErrorReturn) {
dlog.error(checkedExpr.pos, DiagnosticCode.CHECKED_EXPR_NO_ERROR_RETURN_IN_ENCL_INVOKABLE);
}
returnTypes.peek().add(exprType);
}
@Override
public void visit(BLangCheckPanickedExpr checkPanicExpr) {
analyzeExpr(checkPanicExpr.expr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
analyzeNode(typeTestExpr.expr, env);
if (typeTestExpr.typeNode.type == symTable.semanticError || typeTestExpr.expr.type == symTable.semanticError) {
return;
}
if (types.isAssignable(typeTestExpr.expr.type, typeTestExpr.typeNode.type)) {
dlog.error(typeTestExpr.pos, DiagnosticCode.UNNECESSARY_CONDITION);
return;
}
if (!types.isAssignable(typeTestExpr.typeNode.type, typeTestExpr.expr.type) &&
!indirectIntersectionExists(typeTestExpr.expr, typeTestExpr.typeNode.type)) {
dlog.error(typeTestExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPE_CHECK, typeTestExpr.expr.type,
typeTestExpr.typeNode.type);
}
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
analyzeExpr(annotAccessExpr.expr);
}
private boolean indirectIntersectionExists(BLangExpression expression, BType testType) {
BType expressionType = expression.type;
switch (expressionType.tag) {
case TypeTags.UNION:
if (types.getTypeForUnionTypeMembersAssignableToType((BUnionType) expressionType, testType) !=
symTable.semanticError) {
return true;
}
break;
case TypeTags.FINITE:
if (types.getTypeForFiniteTypeValuesAssignableToType((BFiniteType) expressionType, testType) !=
symTable.semanticError) {
return true;
}
}
switch (testType.tag) {
case TypeTags.UNION:
return types.getTypeForUnionTypeMembersAssignableToType((BUnionType) testType, expressionType) !=
symTable.semanticError;
case TypeTags.FINITE:
return types.getTypeForFiniteTypeValuesAssignableToType((BFiniteType) testType, expressionType) !=
symTable.semanticError;
}
return false;
}
private <E extends BLangExpression> void analyzeExpr(E node) {
if (node == null) {
return;
}
BLangNode myParent = parent;
node.parent = parent;
parent = node;
node.accept(this);
this.isJSONContext = false;
parent = myParent;
checkAccess(node);
}
@Override
public void visit(BLangConstant constant) {
analyzeTypeNode(constant.typeNode, env);
analyzeNode(constant.expr, env);
analyzeExportableTypeRef(constant.symbol, constant.symbol.type.tsymbol, false, constant.pos);
}
/**
* This method checks for private symbols being accessed or used outside of package and|or private symbols being
* used in public fields of objects/records and will fail those occurrences.
*
* @param node expression node to analyze
*/
private <E extends BLangExpression> void checkAccess(E node) {
if (node.type != null) {
checkAccessSymbol(node.type.tsymbol, node.pos);
}
if (node.getKind() == NodeKind.INVOCATION) {
BLangInvocation bLangInvocation = (BLangInvocation) node;
checkAccessSymbol(bLangInvocation.symbol, bLangInvocation.pos);
}
}
private void checkAccessSymbol(BSymbol symbol, DiagnosticPos position) {
if (symbol == null) {
return;
}
if (env.enclPkg.symbol.pkgID != symbol.pkgID && !Symbols.isPublic(symbol)) {
dlog.error(position, DiagnosticCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name);
}
}
private <E extends BLangExpression> void analyzeExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
analyzeExpr(nodeList.get(i));
}
}
private void initNewWorkerActionSystem() {
this.workerActionSystemStack.push(new WorkerActionSystem());
}
private void finalizeCurrentWorkerActionSystem() {
WorkerActionSystem was = this.workerActionSystemStack.pop();
if (!was.hasErrors) {
this.validateWorkerInteractions(was);
}
}
private static boolean isWorkerSend(BLangNode action) {
return action.getKind() == NodeKind.WORKER_SEND;
}
private static boolean isWorkerSyncSend(BLangNode action) {
return action.getKind() == NodeKind.WORKER_SYNC_SEND;
}
private String extractWorkerId(BLangNode action) {
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).workerIdentifier.value;
} else if (isWorkerSyncSend(action)) {
return ((BLangWorkerSyncSendExpr) action).workerIdentifier.value;
} else {
return ((BLangWorkerReceive) action).workerIdentifier.value;
}
}
private void validateWorkerInteractions(WorkerActionSystem workerActionSystem) {
BLangNode currentAction;
boolean systemRunning;
do {
systemRunning = false;
for (WorkerActionStateMachine worker : workerActionSystem.finshedWorkers) {
if (worker.done()) {
continue;
}
currentAction = worker.currentAction();
if (!isWorkerSend(currentAction) && !isWorkerSyncSend(currentAction)) {
continue;
}
WorkerActionStateMachine otherSM = workerActionSystem.find(this.extractWorkerId(currentAction));
if (otherSM == null || !otherSM.currentIsReceive(worker.workerId)) {
continue;
}
BLangWorkerReceive receive = (BLangWorkerReceive) otherSM.currentAction();
if (isWorkerSyncSend(currentAction)) {
this.validateWorkerActionParameters((BLangWorkerSyncSendExpr) currentAction, receive);
} else {
this.validateWorkerActionParameters((BLangWorkerSend) currentAction, receive);
}
otherSM.next();
worker.next();
systemRunning = true;
String channelName = WorkerDataChannelInfo.generateChannelName(worker.workerId, otherSM.workerId);
otherSM.node.sendsToThis.add(channelName);
worker.node.sendsToThis.add(channelName);
}
} while (systemRunning);
if (!workerActionSystem.everyoneDone()) {
this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem);
}
}
private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) {
this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticCode.INVALID_WORKER_INTERACTION,
workerActionSystem.toString());
}
private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) {
types.checkType(receive, send.type, receive.type);
addImplicitCast(send.type, receive);
NodeKind kind = receive.parent.getKind();
if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR) {
typeChecker.checkExpr((BLangExpression) receive.parent, receive.env);
}
receive.sendExpression = send.expr;
}
private void validateWorkerActionParameters(BLangWorkerSyncSendExpr send, BLangWorkerReceive receive) {
this.typeChecker.checkExpr(send.expr, send.env, receive.type);
types.checkType(send, send.type, receive.matchingSendsError);
addImplicitCast(send.expr.type, receive);
receive.sendExpression = send;
}
private void addImplicitCast(BType actualType, BLangWorkerReceive receive) {
if (receive.type != null && receive.type != symTable.semanticError) {
types.setImplicitCastExpr(receive, actualType, receive.type);
receive.type = actualType;
}
}
private boolean checkNextBreakValidityInTransaction() {
return !this.loopWithintransactionCheckStack.peek() && transactionCount > 0;
}
private boolean checkReturnValidityInTransaction() {
return (this.returnWithintransactionCheckStack.empty() || !this.returnWithintransactionCheckStack.peek())
&& transactionCount > 0;
}
private boolean isValidTransactionBlock() {
return !(this.withinRetryBlock || this.withinAbortedBlock || this.withinCommittedBlock);
}
private void validateMainFunction(BLangFunction funcNode) {
if (!MAIN_FUNCTION_NAME.equals(funcNode.name.value)) {
return;
}
if (!Symbols.isPublic(funcNode.symbol)) {
this.dlog.error(funcNode.pos, DiagnosticCode.MAIN_SHOULD_BE_PUBLIC);
}
funcNode.requiredParams.forEach(param -> {
if (!types.isAnydata(param.type)) {
this.dlog.error(param.pos, DiagnosticCode.MAIN_PARAMS_SHOULD_BE_ANYDATA, param.type);
}
});
if (funcNode.restParam != null && !types.isAnydata(funcNode.restParam.type)) {
this.dlog.error(funcNode.restParam.pos, DiagnosticCode.MAIN_PARAMS_SHOULD_BE_ANYDATA,
funcNode.restParam.type);
}
if (!types.isAssignable(funcNode.returnTypeNode.type,
BUnionType.create(null, symTable.nilType, symTable.errorType))) {
this.dlog.error(funcNode.returnTypeNode.pos, DiagnosticCode.MAIN_RETURN_SHOULD_BE_ERROR_OR_NIL,
funcNode.returnTypeNode.type);
}
}
private void validateModuleInitFunction(BLangFunction funcNode) {
if (funcNode.attachedFunction || !Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcNode.name.value)) {
return;
}
if (Symbols.isPublic(funcNode.symbol)) {
this.dlog.error(funcNode.pos, DiagnosticCode.MODULE_INIT_CANNOT_BE_PUBLIC);
}
if (!funcNode.requiredParams.isEmpty() || funcNode.restParam != null) {
this.dlog.error(funcNode.pos, DiagnosticCode.MODULE_INIT_CANNOT_HAVE_PARAMS);
}
if (!funcNode.returnTypeNode.type.isNullable() ||
!types.isAssignable(funcNode.returnTypeNode.type,
BUnionType.create(null, symTable.nilType, symTable.errorType))) {
this.dlog.error(funcNode.returnTypeNode.pos, DiagnosticCode.MODULE_INIT_RETURN_SHOULD_BE_ERROR_OR_NIL,
funcNode.returnTypeNode.type);
}
}
private void checkDuplicateNamedArgs(List<BLangExpression> args) {
List<BLangIdentifier> existingArgs = new ArrayList<>();
args.forEach(arg -> {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
if (existingArgs.contains(namedArg.name)) {
dlog.error(namedArg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, namedArg.name);
}
existingArgs.add(namedArg.name);
});
}
private boolean getIsJSONContext(BType... arg) {
if (this.isJSONContext) {
return true;
}
for (BType type : arg) {
if (types.isJSONContext(type)) {
return true;
}
}
return false;
}
/**
* This class contains the state machines for a set of workers.
*/
private static class WorkerActionSystem {
public List<WorkerActionStateMachine> finshedWorkers = new ArrayList<>();
private Stack<WorkerActionStateMachine> workerActionStateMachines = new Stack<>();
private boolean hasErrors = false;
public void startWorkerActionStateMachine(String workerId, DiagnosticPos pos, BLangFunction node) {
workerActionStateMachines.push(new WorkerActionStateMachine(pos, workerId, node));
}
public void endWorkerActionStateMachine() {
finshedWorkers.add(workerActionStateMachines.pop());
}
public void addWorkerAction(BLangNode action) {
this.workerActionStateMachines.peek().actions.add(action);
}
public WorkerActionStateMachine find(String workerId) {
for (WorkerActionStateMachine worker : this.finshedWorkers) {
if (worker.workerId.equals(workerId)) {
return worker;
}
}
throw new AssertionError("Reference to non existing worker " + workerId);
}
public boolean everyoneDone() {
return this.finshedWorkers.stream().allMatch(WorkerActionStateMachine::done);
}
public DiagnosticPos getRootPosition() {
return this.finshedWorkers.iterator().next().pos;
}
@Override
public String toString() {
return this.finshedWorkers.toString();
}
public String currentWorkerId() {
return workerActionStateMachines.peek().workerId;
}
}
/**
* This class represents a state machine to maintain the state of the send/receive
* actions of a worker.
*/
private static class WorkerActionStateMachine {
private static final String WORKER_SM_FINISHED = "FINISHED";
public int currentState;
public List<BLangNode> actions = new ArrayList<>();
public DiagnosticPos pos;
public String workerId;
public BLangFunction node;
public WorkerActionStateMachine(DiagnosticPos pos, String workerId, BLangFunction node) {
this.pos = pos;
this.workerId = workerId;
this.node = node;
}
public boolean done() {
return this.actions.size() == this.currentState;
}
public BLangNode currentAction() {
return this.actions.get(this.currentState);
}
public boolean currentIsReceive(String sourceWorkerId) {
if (this.done()) {
return false;
}
BLangNode action = this.currentAction();
return !isWorkerSend(action) && !isWorkerSyncSend(action) &&
((BLangWorkerReceive) action).workerIdentifier.value.equals(sourceWorkerId);
}
public void next() {
this.currentState++;
}
@Override
public String toString() {
if (this.done()) {
return WORKER_SM_FINISHED;
} else {
BLangNode action = this.currentAction();
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).toActionString();
} else if (isWorkerSyncSend(action)) {
return ((BLangWorkerSyncSendExpr) action).toActionString();
} else {
return ((BLangWorkerReceive) action).toActionString();
}
}
}
}
private void checkExperimentalFeatureValidity(ExperimentalFeatures constructName, DiagnosticPos pos) {
if (enableExperimentalFeatures) {
return;
}
dlog.error(pos, DiagnosticCode.INVALID_USE_OF_EXPERIMENTAL_FEATURE, constructName.value);
}
/**
* Experimental feature list for JBallerina 1.0.0.
*
* @since JBallerina 1.0.0
*/
private enum ExperimentalFeatures {
STREAMS("stream"),
TABLE_QUERIES("table queries"),
STREAMING_QUERIES("streaming queries"),
TRANSACTIONS("transaction"),
LOCK("lock"),
XML_ACCESS("xml access expression"),
XML_ATTRIBUTES_ACCESS("xml attribute expression"),
;
private String value;
private ExperimentalFeatures(String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
}
}
|
class CodeAnalyzer extends BLangNodeVisitor {
private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY =
new CompilerContext.Key<>();
private static final String NULL_LITERAL = "null";
private final SymbolResolver symResolver;
private int loopCount;
private int transactionCount;
private boolean statementReturns;
private boolean lastStatement;
private boolean withinRetryBlock;
private int workerCount;
private SymbolTable symTable;
private Types types;
private BLangDiagnosticLog dlog;
private TypeChecker typeChecker;
private Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>();
private Stack<Boolean> loopWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> returnWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> doneWithintransactionCheckStack = new Stack<>();
private BLangNode parent;
private Names names;
private SymbolEnv env;
private final Stack<HashSet<BType>> returnTypes = new Stack<>();
private boolean withinAbortedBlock;
private boolean withinCommittedBlock;
private boolean isJSONContext;
private boolean enableExperimentalFeatures;
public static CodeAnalyzer getInstance(CompilerContext context) {
CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY);
if (codeGenerator == null) {
codeGenerator = new CodeAnalyzer(context);
}
return codeGenerator;
}
public CodeAnalyzer(CompilerContext context) {
context.put(CODE_ANALYZER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeChecker = TypeChecker.getInstance(context);
this.names = Names.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.enableExperimentalFeatures = Boolean.parseBoolean(
CompilerOptions.getInstance(context).get(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED));
}
private void resetFunction() {
this.resetStatementReturns();
}
private void resetStatementReturns() {
this.statementReturns = false;
}
private void resetLastStatement() {
this.lastStatement = false;
}
public BLangPackage analyze(BLangPackage pkgNode) {
pkgNode.accept(this);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) {
return;
}
parent = pkgNode;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol);
analyzeTopLevelNodes(pkgNode, pkgEnv);
pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage));
}
private void analyzeTopLevelNodes(BLangPackage pkgNode, SymbolEnv pkgEnv) {
pkgNode.topLevelNodes.forEach(topLevelNode -> analyzeNode((BLangNode) topLevelNode, pkgEnv));
pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE);
parent = null;
}
private void analyzeNode(BLangNode node, SymbolEnv env) {
SymbolEnv prevEnv = this.env;
this.env = env;
BLangNode myParent = parent;
node.parent = parent;
parent = node;
node.accept(this);
parent = myParent;
this.env = prevEnv;
}
private void analyzeTypeNode(BLangType node, SymbolEnv env) {
if (node == null) {
return;
}
analyzeNode(node, env);
}
@Override
public void visit(BLangCompilationUnit compUnitNode) {
compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, env));
}
public void visit(BLangTypeDefinition typeDefinition) {
analyzeTypeNode(typeDefinition.typeNode, this.env);
}
@Override
public void visit(BLangTupleVariableDef bLangTupleVariableDef) {
analyzeNode(bLangTupleVariableDef.var, this.env);
}
@Override
public void visit(BLangRecordVariableDef bLangRecordVariableDef) {
analyzeNode(bLangRecordVariableDef.var, this.env);
}
@Override
public void visit(BLangErrorVariableDef bLangErrorVariableDef) {
analyzeNode(bLangErrorVariableDef.errorVariable, this.env);
}
@Override
public void visit(BLangFunction funcNode) {
boolean isLambda = funcNode.flagSet.contains(Flag.LAMBDA);
if (isLambda) {
return;
}
if (Symbols.isPublic(funcNode.symbol)) {
funcNode.symbol.params.forEach(symbol -> analyzeExportableTypeRef(funcNode.symbol, symbol.type.tsymbol,
true,
funcNode.pos));
if (funcNode.symbol.restParam != null) {
analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.restParam.type.tsymbol, true,
funcNode.restParam.pos);
}
analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.retType.tsymbol, true,
funcNode.returnTypeNode.pos);
}
this.validateMainFunction(funcNode);
this.validateModuleInitFunction(funcNode);
try {
this.initNewWorkerActionSystem();
this.workerActionSystemStack.peek().startWorkerActionStateMachine(DEFAULT_WORKER_NAME,
funcNode.pos,
funcNode);
this.visitFunction(funcNode);
this.workerActionSystemStack.peek().endWorkerActionStateMachine();
} finally {
this.finalizeCurrentWorkerActionSystem();
}
}
private void visitFunction(BLangFunction funcNode) {
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
this.returnWithintransactionCheckStack.push(true);
this.doneWithintransactionCheckStack.push(true);
this.returnTypes.push(new HashSet<>());
this.resetFunction();
if (Symbols.isNative(funcNode.symbol)) {
return;
}
if (isPublicInvokableNode(funcNode)) {
analyzeNode(funcNode.returnTypeNode, invokableEnv);
}
/* the body can be null in the case of Object type function declarations */
if (funcNode.body != null) {
analyzeNode(funcNode.body, invokableEnv);
boolean isNilableReturn = funcNode.symbol.type.getReturnType().isNullable();
if (!isNilableReturn && !this.statementReturns) {
this.dlog.error(funcNode.pos, DiagnosticCode.INVOKABLE_MUST_RETURN,
funcNode.getKind().toString().toLowerCase());
}
}
this.returnTypes.pop();
this.returnWithintransactionCheckStack.pop();
this.doneWithintransactionCheckStack.pop();
}
private boolean isPublicInvokableNode(BLangInvokableNode invNode) {
return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) ||
Symbols.isPublic(invNode.symbol.owner));
}
@Override
public void visit(BLangForkJoin forkJoin) {
/* ignore */
}
@Override
public void visit(BLangWorker worker) {
/* ignore, remove later */
}
@Override
public void visit(BLangEndpoint endpointNode) {
}
@Override
public void visit(BLangTransaction transactionNode) {
checkExperimentalFeatureValidity(ExperimentalFeatures.TRANSACTIONS, transactionNode.pos);
this.checkStatementExecutionValidity(transactionNode);
if (!isValidTransactionBlock()) {
this.dlog.error(transactionNode.pos, DiagnosticCode.TRANSACTION_CANNOT_BE_USED_WITHIN_HANDLER);
return;
}
this.loopWithintransactionCheckStack.push(false);
this.returnWithintransactionCheckStack.push(false);
this.doneWithintransactionCheckStack.push(false);
this.transactionCount++;
if (this.transactionCount > 1) {
this.dlog.error(transactionNode.pos, DiagnosticCode.NESTED_TRANSACTIONS_ARE_INVALID);
}
analyzeNode(transactionNode.transactionBody, env);
this.transactionCount--;
this.resetLastStatement();
if (transactionNode.onRetryBody != null) {
this.withinRetryBlock = true;
analyzeNode(transactionNode.onRetryBody, env);
this.resetStatementReturns();
this.resetLastStatement();
this.withinRetryBlock = false;
}
if (transactionNode.abortedBody != null) {
this.withinAbortedBlock = true;
analyzeNode(transactionNode.abortedBody, env);
this.resetStatementReturns();
this.resetLastStatement();
this.withinAbortedBlock = false;
}
if (transactionNode.committedBody != null) {
this.withinCommittedBlock = true;
analyzeNode(transactionNode.committedBody, env);
this.resetStatementReturns();
this.resetLastStatement();
this.withinCommittedBlock = false;
}
this.returnWithintransactionCheckStack.pop();
this.loopWithintransactionCheckStack.pop();
this.doneWithintransactionCheckStack.pop();
analyzeExpr(transactionNode.retryCount);
}
@Override
public void visit(BLangAbort abortNode) {
if (this.transactionCount == 0) {
this.dlog.error(abortNode.pos, DiagnosticCode.ABORT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
this.lastStatement = true;
}
@Override
public void visit(BLangRetry retryNode) {
if (this.transactionCount == 0) {
this.dlog.error(retryNode.pos, DiagnosticCode.RETRY_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
this.lastStatement = true;
}
private void checkUnreachableCode(BLangStatement stmt) {
if (this.statementReturns) {
this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE);
this.resetStatementReturns();
}
if (lastStatement) {
this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE);
this.resetLastStatement();
}
}
private void checkStatementExecutionValidity(BLangStatement stmt) {
this.checkUnreachableCode(stmt);
}
@Override
public void visit(BLangBlockStmt blockNode) {
final SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env);
blockNode.stmts.forEach(e -> analyzeNode(e, blockEnv));
this.resetLastStatement();
}
@Override
public void visit(BLangReturn returnStmt) {
this.checkStatementExecutionValidity(returnStmt);
if (checkReturnValidityInTransaction()) {
this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.statementReturns = true;
analyzeExpr(returnStmt.expr);
this.returnTypes.peek().add(returnStmt.expr.type);
}
@Override
public void visit(BLangIf ifStmt) {
this.checkStatementExecutionValidity(ifStmt);
analyzeNode(ifStmt.body, env);
boolean ifStmtReturns = this.statementReturns;
this.resetStatementReturns();
if (ifStmt.elseStmt != null) {
analyzeNode(ifStmt.elseStmt, env);
this.statementReturns = ifStmtReturns && this.statementReturns;
}
analyzeExpr(ifStmt.expr);
}
@Override
public void visit(BLangMatch matchStmt) {
analyzeExpr(matchStmt.expr);
boolean staticLastPattern = false;
if (!matchStmt.getStaticPatternClauses().isEmpty()) {
staticLastPattern = analyzeStaticMatchPatterns(matchStmt);
}
boolean structuredLastPattern = false;
if (!matchStmt.getStructuredPatternClauses().isEmpty()) {
structuredLastPattern = analyzeStructuredMatchPatterns(matchStmt);
}
if (!matchStmt.getPatternClauses().isEmpty()) {
analyzeEmptyMatchPatterns(matchStmt);
analyzeMatchedPatterns(matchStmt, staticLastPattern, structuredLastPattern);
}
}
private void analyzeMatchedPatterns(BLangMatch matchStmt, boolean staticLastPattern,
boolean structuredLastPattern) {
if (staticLastPattern && structuredLastPattern) {
dlog.error(matchStmt.pos, DiagnosticCode.MATCH_STMT_CONTAINS_TWO_DEFAULT_PATTERNS);
}
if ((staticLastPattern && !hasErrorType(matchStmt.exprTypes)) || structuredLastPattern) {
if (matchStmt.getPatternClauses().size() == 1) {
dlog.error(matchStmt.getPatternClauses().get(0).pos, DiagnosticCode.MATCH_STMT_PATTERN_ALWAYS_MATCHES);
}
this.checkStatementExecutionValidity(matchStmt);
boolean matchStmtReturns = true;
for (BLangMatchBindingPatternClause patternClause : matchStmt.getPatternClauses()) {
analyzeNode(patternClause.body, env);
matchStmtReturns = matchStmtReturns && this.statementReturns;
this.resetStatementReturns();
}
this.statementReturns = matchStmtReturns;
}
}
private boolean hasErrorType(List<BType> typeList) {
return typeList.stream().anyMatch(t -> types.isAssignable(t, symTable.errorType));
}
private boolean analyzeStructuredMatchPatterns(BLangMatch matchStmt) {
if (matchStmt.exprTypes.isEmpty()) {
return false;
}
return analyseStructuredBindingPatterns(matchStmt.getStructuredPatternClauses(),
hasErrorType(matchStmt.exprTypes));
}
/**
* This method is used to check structured `var []`, `var {}` & static `[]`, `{}` match pattern.
*
* @param matchStmt the match statement containing structured & static match patterns.
*/
private void analyzeEmptyMatchPatterns(BLangMatch matchStmt) {
List<BLangMatchBindingPatternClause> emptyLists = new ArrayList<>();
List<BLangMatchBindingPatternClause> emptyRecords = new ArrayList<>();
for (BLangMatchBindingPatternClause pattern : matchStmt.patternClauses) {
if (pattern.getKind() == NodeKind.MATCH_STATIC_PATTERN_CLAUSE) {
BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) pattern;
if (staticPattern.literal.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) {
BLangListConstructorExpr listLiteral = (BLangListConstructorExpr) staticPattern.literal;
if (listLiteral.exprs.isEmpty()) {
emptyLists.add(pattern);
}
} else if (staticPattern.literal.getKind() == NodeKind.RECORD_LITERAL_EXPR) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) staticPattern.literal;
if (recordLiteral.keyValuePairs.isEmpty()) {
emptyRecords.add(pattern);
}
}
} else if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause structuredPattern
= (BLangMatchStructuredBindingPatternClause) pattern;
if (structuredPattern.bindingPatternVariable.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) structuredPattern.bindingPatternVariable;
if (tupleVariable.memberVariables.isEmpty() && tupleVariable.restVariable == null) {
emptyLists.add(pattern);
}
} else if (structuredPattern.bindingPatternVariable.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariable recordVariable = (BLangRecordVariable) structuredPattern.bindingPatternVariable;
if (recordVariable.variableList.isEmpty() && recordVariable.restParam == null) {
emptyRecords.add(pattern);
}
}
}
}
if (emptyLists.size() > 1) {
for (int i = 1; i < emptyLists.size(); i++) {
dlog.error(emptyLists.get(i).pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
}
}
if (emptyRecords.size() > 1) {
for (int i = 1; i < emptyRecords.size(); i++) {
dlog.error(emptyRecords.get(i).pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
}
}
}
/**
* This method is used to check the isLike test in a static match pattern.
* @param matchStmt the match statment containing static match patterns.
*/
private boolean analyzeStaticMatchPatterns(BLangMatch matchStmt) {
if (matchStmt.exprTypes.isEmpty()) {
return false;
}
List<BLangMatchStaticBindingPatternClause> matchedPatterns = new ArrayList<>();
for (BLangMatchStaticBindingPatternClause pattern : matchStmt.getStaticPatternClauses()) {
List<BType> matchedExpTypes = matchStmt.exprTypes
.stream()
.filter(exprType -> isValidStaticMatchPattern(exprType, pattern.literal))
.collect(Collectors.toList());
if (matchedExpTypes.isEmpty()) {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN);
continue;
}
this.isJSONContext = types.isJSONContext(matchStmt.expr.type);
analyzeNode(pattern.literal, env);
matchedPatterns.add(pattern);
}
if (matchedPatterns.isEmpty()) {
return false;
}
return analyzeStaticPatterns(matchedPatterns, hasErrorType(matchStmt.exprTypes));
}
private boolean analyzeStaticPatterns(List<BLangMatchStaticBindingPatternClause> matchedPatterns,
boolean errorTypeInMatchExpr) {
BLangMatchStaticBindingPatternClause finalPattern = matchedPatterns.get(matchedPatterns.size() - 1);
if (finalPattern.literal.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) finalPattern.literal).variableName.value.equals(Names.IGNORE.value)
&& !errorTypeInMatchExpr) {
finalPattern.isLastPattern = true;
}
for (int i = 0; i < matchedPatterns.size() - 1; i++) {
BLangExpression precedingPattern = matchedPatterns.get(i).literal;
for (int j = i + 1; j < matchedPatterns.size(); j++) {
BLangExpression pattern = matchedPatterns.get(j).literal;
if (checkLiteralSimilarity(precedingPattern, pattern)) {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
matchedPatterns.remove(j--);
}
}
}
return finalPattern.isLastPattern;
}
private boolean analyseStructuredBindingPatterns(List<BLangMatchStructuredBindingPatternClause> clauses,
boolean errorTypeInMatchExpr) {
BLangMatchStructuredBindingPatternClause finalPattern = clauses.get(clauses.size() - 1);
if (finalPattern.bindingPatternVariable.getKind() == NodeKind.VARIABLE
&& finalPattern.typeGuardExpr == null
&& !(errorTypeInMatchExpr && isWildcardMatchPattern(finalPattern))) {
finalPattern.isLastPattern = true;
}
BLangMatchStructuredBindingPatternClause currentPattern;
BLangMatchStructuredBindingPatternClause precedingPattern;
for (int i = 0; i < clauses.size(); i++) {
precedingPattern = clauses.get(i);
if (precedingPattern.typeGuardExpr != null) {
analyzeExpr(precedingPattern.typeGuardExpr);
}
for (int j = i + 1; j < clauses.size(); j++) {
currentPattern = clauses.get(j);
BLangVariable precedingVar = precedingPattern.bindingPatternVariable;
BLangVariable currentVar = currentPattern.bindingPatternVariable;
if (checkStructuredPatternSimilarity(precedingVar, currentVar, errorTypeInMatchExpr) &&
checkTypeGuardSimilarity(precedingPattern.typeGuardExpr, currentPattern.typeGuardExpr)) {
dlog.error(currentVar.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
clauses.remove(j--);
}
}
}
return finalPattern.isLastPattern;
}
private boolean isWildcardMatchPattern(BLangMatchStructuredBindingPatternClause finalPattern) {
return ((BLangSimpleVariable) finalPattern.bindingPatternVariable).name.value.equals(Names.IGNORE.value);
}
/**
* This method will check if two patterns are similar to each other.
* Having similar patterns in the match block will result in unreachable pattern.
*
* @param precedingPattern pattern taken to compare similarity.
* @param pattern the pattern that the precedingPattern is checked for similarity.
* @return true if both patterns are similar.
*/
private boolean checkLiteralSimilarity(BLangExpression precedingPattern, BLangExpression pattern) {
if (precedingPattern.getKind() == NodeKind.BINARY_EXPR) {
BLangBinaryExpr precedingBinaryExpr = (BLangBinaryExpr) precedingPattern;
BLangExpression precedingLhsExpr = precedingBinaryExpr.lhsExpr;
BLangExpression precedingRhsExpr = precedingBinaryExpr.rhsExpr;
return checkLiteralSimilarity(precedingLhsExpr, pattern) ||
checkLiteralSimilarity(precedingRhsExpr, pattern);
}
if (pattern.getKind() == NodeKind.BINARY_EXPR) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) pattern;
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
return checkLiteralSimilarity(precedingPattern, lhsExpr) ||
checkLiteralSimilarity(precedingPattern, rhsExpr);
}
switch (precedingPattern.type.tag) {
case TypeTags.MAP:
if (pattern.type.tag == TypeTags.MAP) {
BLangRecordLiteral precedingRecordLiteral = (BLangRecordLiteral) precedingPattern;
Map<String, BLangExpression> recordLiteral = ((BLangRecordLiteral) pattern).keyValuePairs
.stream()
.collect(Collectors.toMap(
keyValuePair -> ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value,
BLangRecordKeyValue::getValue
));
for (int i = 0; i < precedingRecordLiteral.keyValuePairs.size(); i++) {
BLangRecordKeyValue bLangRecordKeyValue = precedingRecordLiteral.keyValuePairs.get(i);
String key = ((BLangSimpleVarRef) bLangRecordKeyValue.key.expr).variableName.value;
if (!recordLiteral.containsKey(key)) {
return false;
}
if (!checkLiteralSimilarity(bLangRecordKeyValue.valueExpr, recordLiteral.get(key))) {
return false;
}
}
return true;
}
return false;
case TypeTags.TUPLE:
if (pattern.type.tag == TypeTags.TUPLE) {
BLangListConstructorExpr precedingTupleLiteral = (BLangListConstructorExpr) precedingPattern;
BLangListConstructorExpr tupleLiteral = (BLangListConstructorExpr) pattern;
if (precedingTupleLiteral.exprs.size() != tupleLiteral.exprs.size()) {
return false;
}
return IntStream.range(0, precedingTupleLiteral.exprs.size())
.allMatch(i -> checkLiteralSimilarity(precedingTupleLiteral.exprs.get(i),
tupleLiteral.exprs.get(i)));
}
return false;
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
if (precedingPattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BConstantSymbol precedingPatternSym =
(BConstantSymbol) ((BLangSimpleVarRef) precedingPattern).symbol;
if (pattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
if (!((BLangSimpleVarRef) pattern).variableName.value.equals(Names.IGNORE.value)) {
BConstantSymbol patternSym = (BConstantSymbol) ((BLangSimpleVarRef) pattern).symbol;
return precedingPatternSym.value.equals(patternSym.value);
}
return false;
}
BLangLiteral literal = pattern.getKind() == NodeKind.GROUP_EXPR ?
(BLangLiteral) ((BLangGroupExpr) pattern).expression :
(BLangLiteral) pattern;
return (precedingPatternSym.value.equals(literal.value));
}
if (types.isValueType(pattern.type)) {
BLangLiteral precedingLiteral = precedingPattern.getKind() == NodeKind.GROUP_EXPR ?
(BLangLiteral) ((BLangGroupExpr) precedingPattern).expression :
(BLangLiteral) precedingPattern;
if (pattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
if (pattern.type.tag != TypeTags.NONE) {
BConstantSymbol patternSym = (BConstantSymbol) ((BLangSimpleVarRef) pattern).symbol;
return patternSym.value.equals(precedingLiteral.value);
}
return false;
}
BLangLiteral literal = pattern.getKind() == NodeKind.GROUP_EXPR ?
(BLangLiteral) ((BLangGroupExpr) pattern).expression :
(BLangLiteral) pattern;
return (precedingLiteral.value.equals(literal.value));
}
return false;
case TypeTags.ANY:
if (pattern.type.tag == TypeTags.ERROR) {
return false;
}
return true;
default:
return false;
}
}
/**
* This method will determine if the type guard of the preceding pattern will result in the current pattern
* being unreachable.
*
* @param precedingGuard type guard of the preceding structured pattern
* @param currentGuard type guard of the cuurent structured pattern
* @return true if the current pattern is unreachable due to the type guard of the preceding pattern
*/
private boolean checkTypeGuardSimilarity(BLangExpression precedingGuard, BLangExpression currentGuard) {
if (precedingGuard != null && currentGuard != null) {
if (precedingGuard.getKind() == NodeKind.TYPE_TEST_EXPR &&
currentGuard.getKind() == NodeKind.TYPE_TEST_EXPR &&
((BLangTypeTestExpr) precedingGuard).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
((BLangTypeTestExpr) currentGuard).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangTypeTestExpr precedingTypeTest = (BLangTypeTestExpr) precedingGuard;
BLangTypeTestExpr currentTypeTest = (BLangTypeTestExpr) currentGuard;
return ((BLangSimpleVarRef) precedingTypeTest.expr).variableName.toString().equals(
((BLangSimpleVarRef) currentTypeTest.expr).variableName.toString()) &&
precedingTypeTest.typeNode.type.tag == currentTypeTest.typeNode.type.tag;
}
return false;
}
return currentGuard != null || precedingGuard == null;
}
/**
* This method will determine if the current structured pattern will be unreachable due to a preceding pattern.
*
* @param precedingVar the structured pattern that appears on top
* @param var the structured pattern that appears after the precedingVar
* @param errorTypeInMatchExpr
* @return true if the the current pattern is unreachable due to the preceding pattern
*/
private boolean checkStructuredPatternSimilarity(BLangVariable precedingVar,
BLangVariable var,
boolean errorTypeInMatchExpr) {
if (precedingVar.type.tag == TypeTags.SEMANTIC_ERROR || var.type.tag == TypeTags.SEMANTIC_ERROR) {
return false;
}
if (precedingVar.getKind() == NodeKind.RECORD_VARIABLE && var.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariable precedingRecVar = (BLangRecordVariable) precedingVar;
BLangRecordVariable recVar = (BLangRecordVariable) var;
Map<String, BLangVariable> recVarAsMap = recVar.variableList.stream()
.collect(Collectors.toMap(
keyValue -> keyValue.key.value,
keyValue -> keyValue.valueBindingPattern
));
if (precedingRecVar.variableList.size() > recVar.variableList.size()) {
return false;
}
for (int i = 0; i < precedingRecVar.variableList.size(); i++) {
BLangRecordVariableKeyValue precedingKeyValue = precedingRecVar.variableList.get(i);
if (!recVarAsMap.containsKey(precedingKeyValue.key.value)) {
return false;
}
if (!checkStructuredPatternSimilarity(
precedingKeyValue.valueBindingPattern,
recVarAsMap.get(precedingKeyValue.key.value),
errorTypeInMatchExpr)) {
return false;
}
}
if (precedingRecVar.hasRestParam() && recVar.hasRestParam()) {
return true;
}
return precedingRecVar.hasRestParam() || !recVar.hasRestParam();
}
if (precedingVar.getKind() == NodeKind.TUPLE_VARIABLE && var.getKind() == NodeKind.TUPLE_VARIABLE) {
List<BLangVariable> precedingMemberVars = ((BLangTupleVariable) precedingVar).memberVariables;
BLangVariable precedingRestVar = ((BLangTupleVariable) precedingVar).restVariable;
List<BLangVariable> memberVars = ((BLangTupleVariable) var).memberVariables;
BLangVariable memberRestVar = ((BLangTupleVariable) var).restVariable;
if (precedingRestVar != null && memberRestVar != null) {
return true;
}
if (precedingRestVar == null && memberRestVar == null
&& precedingMemberVars.size() != memberVars.size()) {
return false;
}
if (precedingRestVar != null && precedingMemberVars.size() > memberVars.size()) {
return false;
}
if (memberRestVar != null) {
return false;
}
for (int i = 0; i < memberVars.size(); i++) {
if (!checkStructuredPatternSimilarity(precedingMemberVars.get(i), memberVars.get(i),
errorTypeInMatchExpr)) {
return false;
}
}
return true;
}
if (precedingVar.getKind() == NodeKind.ERROR_VARIABLE && var.getKind() == NodeKind.ERROR_VARIABLE) {
BLangErrorVariable precedingErrVar = (BLangErrorVariable) precedingVar;
BLangErrorVariable errVar = (BLangErrorVariable) var;
if (precedingErrVar.restDetail != null) {
return true;
}
if (errVar.restDetail != null) {
return false;
}
if (precedingErrVar.detail != null && errVar.detail != null) {
Map<String, BLangVariable> preDetails = precedingErrVar.detail.stream()
.collect(Collectors.toMap(entry -> entry.key.value, entry -> entry.valueBindingPattern));
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : errVar.detail) {
BLangVariable correspondingCurDetail = preDetails.get(detailEntry.key.value);
if (correspondingCurDetail == null) {
return false;
}
boolean similar =
checkStructuredPatternSimilarity(detailEntry.valueBindingPattern, correspondingCurDetail,
errorTypeInMatchExpr);
if (!similar) {
return false;
}
}
}
return true;
}
if (precedingVar.getKind() == NodeKind.VARIABLE
&& ((BLangSimpleVariable) precedingVar).name.value.equals(Names.IGNORE.value)
&& var.getKind() == NodeKind.ERROR_VARIABLE) {
return false;
}
return precedingVar.getKind() == NodeKind.VARIABLE;
}
/**
* This method will check if the static match pattern is valid based on the matching type.
*
* @param matchType type of the expression being matched.
* @param literal the static match pattern.
* @return true if the pattern is valid, else false.
*/
private boolean isValidStaticMatchPattern(BType matchType, BLangExpression literal) {
if (literal.type.tag == TypeTags.NONE) {
return true;
}
if (types.isSameType(literal.type, matchType)) {
return true;
}
if (TypeTags.ANY == literal.type.tag) {
return true;
}
switch (matchType.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
return true;
case TypeTags.UNION:
BUnionType unionMatchType = (BUnionType) matchType;
return unionMatchType.getMemberTypes()
.stream()
.anyMatch(memberMatchType -> isValidStaticMatchPattern(memberMatchType, literal));
case TypeTags.TUPLE:
if (literal.type.tag == TypeTags.TUPLE) {
BLangListConstructorExpr tupleLiteral = (BLangListConstructorExpr) literal;
BTupleType literalTupleType = (BTupleType) literal.type;
BTupleType matchTupleType = (BTupleType) matchType;
if (literalTupleType.tupleTypes.size() != matchTupleType.tupleTypes.size()) {
return false;
}
return IntStream.range(0, literalTupleType.tupleTypes.size())
.allMatch(i ->
isValidStaticMatchPattern(matchTupleType.tupleTypes.get(i),
tupleLiteral.exprs.get(i)));
}
break;
case TypeTags.MAP:
if (literal.type.tag == TypeTags.MAP) {
BLangRecordLiteral mapLiteral = (BLangRecordLiteral) literal;
return IntStream.range(0, mapLiteral.keyValuePairs.size())
.allMatch(i -> isValidStaticMatchPattern(((BMapType) matchType).constraint,
mapLiteral.keyValuePairs.get(i).valueExpr));
}
break;
case TypeTags.RECORD:
if (literal.type.tag == TypeTags.MAP) {
BLangRecordLiteral mapLiteral = (BLangRecordLiteral) literal;
BRecordType recordMatchType = (BRecordType) matchType;
Map<String, BType> recordFields = recordMatchType.fields
.stream()
.collect(Collectors.toMap(
field -> field.getName().getValue(),
BField::getType
));
for (BLangRecordKeyValue literalKeyValue : mapLiteral.keyValuePairs) {
String literalKeyName;
NodeKind nodeKind = literalKeyValue.key.expr.getKind();
if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) {
literalKeyName = ((BLangSimpleVarRef) literalKeyValue.key.expr).variableName.value;
} else if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
literalKeyName = ((BLangLiteral) literalKeyValue.key.expr).value.toString();
} else {
return false;
}
if (recordFields.containsKey(literalKeyName)) {
if (!isValidStaticMatchPattern(
recordFields.get(literalKeyName), literalKeyValue.valueExpr)) {
return false;
}
} else if (recordMatchType.sealed ||
!isValidStaticMatchPattern(recordMatchType.restFieldType, literalKeyValue.valueExpr)) {
return false;
}
}
return true;
}
break;
case TypeTags.BYTE:
if (literal.type.tag == TypeTags.INT) {
return true;
}
break;
case TypeTags.FINITE:
if (literal.getKind() == NodeKind.LITERAL || literal.getKind() == NodeKind.NUMERIC_LITERAL) {
return types.isAssignableToFiniteType(matchType, (BLangLiteral) literal);
}
if (literal.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
((BLangSimpleVarRef) literal).symbol.getKind() == SymbolKind.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) ((BLangSimpleVarRef) literal).symbol;
return types.isAssignableToFiniteType(matchType,
(BLangLiteral) ((BFiniteType) constSymbol.type).valueSpace.iterator().next());
}
break;
}
return false;
}
@Override
public void visit(BLangForeach foreach) {
this.loopWithintransactionCheckStack.push(true);
this.checkStatementExecutionValidity(foreach);
this.loopCount++;
analyzeNode(foreach.body, env);
this.loopCount--;
this.resetLastStatement();
this.loopWithintransactionCheckStack.pop();
analyzeExpr(foreach.collection);
}
@Override
public void visit(BLangWhile whileNode) {
this.loopWithintransactionCheckStack.push(true);
this.checkStatementExecutionValidity(whileNode);
this.loopCount++;
analyzeNode(whileNode.body, env);
this.loopCount--;
this.resetLastStatement();
this.loopWithintransactionCheckStack.pop();
analyzeExpr(whileNode.expr);
}
@Override
public void visit(BLangLock lockNode) {
checkExperimentalFeatureValidity(ExperimentalFeatures.LOCK, lockNode.pos);
this.checkStatementExecutionValidity(lockNode);
lockNode.body.stmts.forEach(e -> analyzeNode(e, env));
}
@Override
public void visit(BLangContinue continueNode) {
this.checkStatementExecutionValidity(continueNode);
if (this.loopCount == 0) {
this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction()) {
this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
if (pkgEnv == null) {
return;
}
analyzeNode(pkgEnv.node, env);
}
public void visit(BLangXMLNS xmlnsNode) {
/* ignore */
}
public void visit(BLangService serviceNode) {
}
public void visit(BLangResource resourceNode) {
throw new RuntimeException("Deprecated lang feature");
}
public void visit(BLangForever foreverStatement) {
checkExperimentalFeatureValidity(ExperimentalFeatures.STREAMING_QUERIES, foreverStatement.pos);
this.checkStatementExecutionValidity(foreverStatement);
this.lastStatement = true;
}
private void analyzeExportableTypeRef(BSymbol owner, BTypeSymbol symbol, boolean inFuncSignature,
DiagnosticPos pos) {
if (!inFuncSignature && Symbols.isFlagOn(owner.flags, Flags.ANONYMOUS)) {
return;
}
if (Symbols.isPublic(owner)) {
checkForExportableType(symbol, pos);
}
}
public void visit(BLangSimpleVariable varNode) {
analyzeTypeNode(varNode.typeNode, this.env);
analyzeExpr(varNode.expr);
if (Objects.isNull(varNode.symbol)) {
return;
}
if (!Symbols.isPublic(varNode.symbol)) {
return;
}
int ownerSymTag = this.env.scope.owner.tag;
if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD || (ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) {
analyzeExportableTypeRef(this.env.scope.owner, varNode.type.tsymbol, false, varNode.pos);
} else if ((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
analyzeExportableTypeRef(varNode.symbol, varNode.type.tsymbol, false, varNode.pos);
}
}
@Override
public void visit(BLangTupleVariable bLangTupleVariable) {
if (bLangTupleVariable.typeNode != null) {
analyzeNode(bLangTupleVariable.typeNode, this.env);
}
analyzeExpr(bLangTupleVariable.expr);
}
@Override
public void visit(BLangRecordVariable bLangRecordVariable) {
if (bLangRecordVariable.typeNode != null) {
analyzeNode(bLangRecordVariable.typeNode, this.env);
}
analyzeExpr(bLangRecordVariable.expr);
}
@Override
public void visit(BLangErrorVariable bLangErrorVariable) {
if (bLangErrorVariable.typeNode != null) {
analyzeNode(bLangErrorVariable.typeNode, this.env);
}
analyzeExpr(bLangErrorVariable.expr);
}
private BType getNilableType(BType type) {
if (type.isNullable()) {
return type;
}
BUnionType unionType = BUnionType.create(null);
if (type.tag == TypeTags.UNION) {
LinkedHashSet<BType> memTypes = new LinkedHashSet<>(((BUnionType) type).getMemberTypes());
unionType.addAll(memTypes);
}
unionType.add(type);
unionType.add(symTable.nilType);
return unionType;
}
public void visit(BLangIdentifier identifierNode) {
/* ignore */
}
public void visit(BLangAnnotation annotationNode) {
/* ignore */
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
/* ignore */
}
public void visit(BLangSimpleVariableDef varDefNode) {
this.checkStatementExecutionValidity(varDefNode);
analyzeNode(varDefNode.var, env);
}
public void visit(BLangCompoundAssignment compoundAssignment) {
this.checkStatementExecutionValidity(compoundAssignment);
analyzeExpr(compoundAssignment.varRef);
analyzeExpr(compoundAssignment.expr);
}
public void visit(BLangAssignment assignNode) {
this.checkStatementExecutionValidity(assignNode);
analyzeExpr(assignNode.varRef);
analyzeExpr(assignNode.expr);
}
public void visit(BLangRecordDestructure stmt) {
this.checkDuplicateVarRefs(getVarRefs(stmt.varRef));
this.checkStatementExecutionValidity(stmt);
analyzeExpr(stmt.varRef);
analyzeExpr(stmt.expr);
}
public void visit(BLangErrorDestructure stmt) {
this.checkDuplicateVarRefs(getVarRefs(stmt.varRef));
this.checkStatementExecutionValidity(stmt);
analyzeExpr(stmt.varRef);
analyzeExpr(stmt.expr);
}
@Override
public void visit(BLangTupleDestructure stmt) {
this.checkDuplicateVarRefs(getVarRefs(stmt.varRef));
this.checkStatementExecutionValidity(stmt);
analyzeExpr(stmt.varRef);
analyzeExpr(stmt.expr);
}
private void checkDuplicateVarRefs(List<BLangExpression> varRefs) {
checkDuplicateVarRefs(varRefs, new HashSet<>());
}
private void checkDuplicateVarRefs(List<BLangExpression> varRefs, Set<BSymbol> symbols) {
for (BLangExpression varRef : varRefs) {
if (varRef == null || (varRef.getKind() != NodeKind.SIMPLE_VARIABLE_REF
&& varRef.getKind() != NodeKind.RECORD_VARIABLE_REF
&& varRef.getKind() != NodeKind.ERROR_VARIABLE_REF
&& varRef.getKind() != NodeKind.TUPLE_VARIABLE_REF)) {
continue;
}
if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& names.fromIdNode(((BLangSimpleVarRef) varRef).variableName) == Names.IGNORE) {
continue;
}
if (varRef.getKind() == NodeKind.TUPLE_VARIABLE_REF) {
checkDuplicateVarRefs(getVarRefs((BLangTupleVarRef) varRef), symbols);
}
if (varRef.getKind() == NodeKind.RECORD_VARIABLE_REF) {
checkDuplicateVarRefs(getVarRefs((BLangRecordVarRef) varRef), symbols);
}
if (varRef.getKind() == NodeKind.ERROR_VARIABLE_REF) {
checkDuplicateVarRefs(getVarRefs((BLangErrorVarRef) varRef), symbols);
}
BLangVariableReference varRefExpr = (BLangVariableReference) varRef;
if (varRefExpr.symbol != null && !symbols.add(varRefExpr.symbol)) {
this.dlog.error(varRef.pos, DiagnosticCode.DUPLICATE_VARIABLE_IN_BINDING_PATTERN,
varRefExpr.symbol);
}
}
}
private List<BLangExpression> getVarRefs(BLangRecordVarRef varRef) {
List<BLangExpression> varRefs = varRef.recordRefFields.stream()
.map(e -> e.variableReference).collect(Collectors.toList());
varRefs.add((BLangExpression) varRef.restParam);
return varRefs;
}
private List<BLangExpression> getVarRefs(BLangErrorVarRef varRef) {
List<BLangExpression> varRefs = new ArrayList<>();
varRefs.add(varRef.reason);
varRefs.addAll(varRef.detail.stream().map(e -> e.expr).collect(Collectors.toList()));
varRefs.add(varRef.restVar);
return varRefs;
}
private List<BLangExpression> getVarRefs(BLangTupleVarRef varRef) {
List<BLangExpression> varRefs = new ArrayList<>(varRef.expressions);
varRefs.add((BLangExpression) varRef.restParam);
return varRefs;
}
public void visit(BLangBreak breakNode) {
this.checkStatementExecutionValidity(breakNode);
if (this.loopCount == 0) {
this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction()) {
this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
public void visit(BLangThrow throwNode) {
/* ignore */
}
public void visit(BLangPanic panicNode) {
this.checkStatementExecutionValidity(panicNode);
this.statementReturns = true;
analyzeExpr(panicNode.expr);
}
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
this.checkStatementExecutionValidity(xmlnsStmtNode);
}
public void visit(BLangExpressionStmt exprStmtNode) {
this.checkStatementExecutionValidity(exprStmtNode);
analyzeExpr(exprStmtNode.expr);
validateExprStatementExpression(exprStmtNode);
}
private void validateExprStatementExpression(BLangExpressionStmt exprStmtNode) {
BLangExpression expr = exprStmtNode.expr;
while (expr.getKind() == NodeKind.MATCH_EXPRESSION ||
expr.getKind() == NodeKind.CHECK_EXPR ||
expr.getKind() == NodeKind.CHECK_PANIC_EXPR) {
if (expr.getKind() == NodeKind.MATCH_EXPRESSION) {
expr = ((BLangMatchExpression) expr).expr;
} else if (expr.getKind() == NodeKind.CHECK_EXPR) {
expr = ((BLangCheckedExpr) expr).expr;
} else if (expr.getKind() == NodeKind.CHECK_PANIC_EXPR) {
expr = ((BLangCheckPanickedExpr) expr).expr;
}
}
if (expr.getKind() == NodeKind.INVOCATION || expr.getKind() == NodeKind.WAIT_EXPR) {
return;
}
if (expr.type == symTable.nilType) {
dlog.error(exprStmtNode.pos, DiagnosticCode.INVALID_EXPR_STATEMENT);
}
}
public void visit(BLangTryCatchFinally tryNode) {
/* ignore */
}
public void visit(BLangCatch catchNode) {
/* ignore */
}
private boolean isTopLevel() {
SymbolEnv env = this.env;
return env.enclInvokable.body == env.node;
}
private boolean isInWorker() {
return env.enclInvokable.flagSet.contains(Flag.WORKER);
}
private boolean isCommunicationAllowedLocation(String workerIdentifier) {
return (isDefaultWorkerCommunication(workerIdentifier) && isInWorker()) || isTopLevel();
}
private boolean isDefaultWorkerCommunication(String workerIdentifier) {
return workerIdentifier.equals(DEFAULT_WORKER_NAME);
}
private boolean workerExists(BType type, String workerName) {
if (isDefaultWorkerCommunication(workerName) && isInWorker()) {
return true;
}
if (type == symTable.semanticError) {
return false;
}
return type.tag == TypeTags.FUTURE && ((BFutureType) type).workerDerivative;
}
public void visit(BLangWorkerSend workerSendNode) {
this.checkStatementExecutionValidity(workerSendNode);
if (workerSendNode.isChannel) {
analyzeExpr(workerSendNode.expr);
if (workerSendNode.keyExpr != null) {
analyzeExpr(workerSendNode.keyExpr);
}
return;
}
WorkerActionSystem was = this.workerActionSystemStack.peek();
BType type = workerSendNode.expr.type;
if (type == symTable.semanticError) {
was.hasErrors = true;
} else if (!types.isAnydata(type)) {
this.dlog.error(workerSendNode.pos, DiagnosticCode.INVALID_TYPE_FOR_SEND, type);
}
String workerName = workerSendNode.workerIdentifier.getValue();
boolean allowedLocation = isCommunicationAllowedLocation(workerName);
if (!allowedLocation) {
this.dlog.error(workerSendNode.pos, DiagnosticCode.INVALID_WORKER_SEND_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(workerSendNode.type, workerName)) {
this.dlog.error(workerSendNode.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
workerSendNode.type = createAccumulatedErrorTypeForMatchingRecive(workerSendNode);
was.addWorkerAction(workerSendNode);
analyzeExpr(workerSendNode.expr);
validateActionParentNode(workerSendNode.pos, workerSendNode.expr);
}
private BType createAccumulatedErrorTypeForMatchingRecive(BLangWorkerSend workerSendNode) {
Set<BType> returnTypesUpToNow = this.returnTypes.peek();
LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<BType>() {
{
Comparator.comparing(BType::toString);
}
};
for (BType returnType : returnTypesUpToNow) {
if (returnType.tag == TypeTags.ERROR) {
returnTypeAndSendType.add(returnType);
} else {
this.dlog.error(workerSendNode.pos, DiagnosticCode.WORKER_SEND_AFTER_RETURN);
}
}
returnTypeAndSendType.add(workerSendNode.expr.type);
if (returnTypeAndSendType.size() > 1) {
return BUnionType.create(null, returnTypeAndSendType);
} else {
return workerSendNode.expr.type;
}
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
validateActionParentNode(syncSendExpr.pos, syncSendExpr);
String workerName = syncSendExpr.workerIdentifier.getValue();
WorkerActionSystem was = this.workerActionSystemStack.peek();
boolean allowedLocation = isCommunicationAllowedLocation(workerName);
if (!allowedLocation) {
this.dlog.error(syncSendExpr.pos, DiagnosticCode.INVALID_WORKER_SEND_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(syncSendExpr.workerType, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
was.addWorkerAction(syncSendExpr);
analyzeExpr(syncSendExpr.expr);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
validateActionParentNode(workerReceiveNode.pos, workerReceiveNode);
if (workerReceiveNode.isChannel) {
if (workerReceiveNode.keyExpr != null) {
analyzeExpr(workerReceiveNode.keyExpr);
}
return;
}
WorkerActionSystem was = this.workerActionSystemStack.peek();
String workerName = workerReceiveNode.workerIdentifier.getValue();
boolean allowedLocation = isCommunicationAllowedLocation(workerName);
if (!allowedLocation) {
this.dlog.error(workerReceiveNode.pos, DiagnosticCode.INVALID_WORKER_RECEIVE_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(workerReceiveNode.workerType, workerName)) {
this.dlog.error(workerReceiveNode.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
workerReceiveNode.matchingSendsError = createAccumulatedErrorTypeForMatchingSyncSend(workerReceiveNode);
was.addWorkerAction(workerReceiveNode);
}
public BType createAccumulatedErrorTypeForMatchingSyncSend(BLangWorkerReceive workerReceiveNode) {
Set<BType> returnTypesUpToNow = this.returnTypes.peek();
LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<>();
for (BType returnType : returnTypesUpToNow) {
if (returnType.tag == TypeTags.ERROR) {
returnTypeAndSendType.add(returnType);
} else {
this.dlog.error(workerReceiveNode.pos, DiagnosticCode.WORKER_RECEIVE_AFTER_RETURN);
}
}
returnTypeAndSendType.add(symTable.nilType);
if (returnTypeAndSendType.size() > 1) {
return BUnionType.create(null, returnTypeAndSendType);
} else {
return symTable.nilType;
}
}
public void visit(BLangLiteral literalExpr) {
if (literalExpr.type.tag == TypeTags.NIL &&
NULL_LITERAL.equals(literalExpr.originalValue) &&
!literalExpr.isJSONContext && !this.isJSONContext) {
dlog.error(literalExpr.pos, DiagnosticCode.INVALID_USE_OF_NULL_LITERAL);
}
}
public void visit(BLangListConstructorExpr listConstructorExpr) {
analyzeExprs(listConstructorExpr.exprs);
}
public void visit(BLangRecordLiteral recordLiteral) {
List<BLangRecordKeyValue> keyValuePairs = recordLiteral.keyValuePairs;
keyValuePairs.forEach(kv -> analyzeExpr(kv.valueExpr));
Set<Object> names = new HashSet<>();
BType type = recordLiteral.type;
boolean isOpenRecord = type != null && type.tag == TypeTags.RECORD && !((BRecordType) type).sealed;
for (BLangRecordKeyValue recFieldDecl : keyValuePairs) {
BLangExpression key = recFieldDecl.getKey();
if (recFieldDecl.key.computedKey) {
analyzeExpr(key);
continue;
}
if (key.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef keyRef = (BLangSimpleVarRef) key;
String fieldName = keyRef.variableName.value;
if (names.contains(fieldName)) {
String assigneeType = recordLiteral.parent.type.getKind().typeName();
this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, assigneeType, keyRef);
}
if (isOpenRecord && ((BRecordType) type).fields.stream()
.noneMatch(field -> fieldName.equals(field.name.value))) {
dlog.error(key.pos, DiagnosticCode.INVALID_RECORD_LITERAL_IDENTIFIER_KEY, fieldName);
}
names.add(fieldName);
} else if (key.getKind() == NodeKind.LITERAL || key.getKind() == NodeKind.NUMERIC_LITERAL) {
BLangLiteral keyLiteral = (BLangLiteral) key;
if (names.contains(keyLiteral.value)) {
String assigneeType = recordLiteral.parent.type.getKind().typeName();
this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, assigneeType, keyLiteral);
}
names.add(keyLiteral.value);
}
}
}
public void visit(BLangTableLiteral tableLiteral) {
/* ignore */
}
public void visit(BLangSimpleVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangRecordVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangErrorVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangTupleVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
analyzeExpr(fieldAccessExpr.expr);
if (fieldAccessExpr.expr.type.tag == TypeTags.XML) {
checkExperimentalFeatureValidity(ExperimentalFeatures.XML_ACCESS, fieldAccessExpr.pos);
}
}
public void visit(BLangIndexBasedAccess indexAccessExpr) {
analyzeExpr(indexAccessExpr.indexExpr);
analyzeExpr(indexAccessExpr.expr);
if (indexAccessExpr.expr.type.tag == TypeTags.XML) {
checkExperimentalFeatureValidity(ExperimentalFeatures.XML_ACCESS, indexAccessExpr.pos);
}
}
public void visit(BLangInvocation invocationExpr) {
analyzeExpr(invocationExpr.expr);
analyzeExprs(invocationExpr.requiredArgs);
analyzeExprs(invocationExpr.restArgs);
if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) {
BSymbol funcSymbol = invocationExpr.symbol;
if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) {
dlog.warning(invocationExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_FUNCTION,
names.fromIdNode(invocationExpr.name));
}
}
if (invocationExpr.actionInvocation || invocationExpr.async) {
validateActionInvocation(invocationExpr.pos, invocationExpr);
}
}
private void validateActionInvocation(DiagnosticPos pos, BLangInvocation iExpr) {
if (iExpr.expr != null) {
final NodeKind clientNodeKind = iExpr.expr.getKind();
if (clientNodeKind != NodeKind.SIMPLE_VARIABLE_REF && clientNodeKind != NodeKind.FIELD_BASED_ACCESS_EXPR) {
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
} else if (clientNodeKind == NodeKind.FIELD_BASED_ACCESS_EXPR) {
final BLangFieldBasedAccess fieldBasedAccess = (BLangFieldBasedAccess) iExpr.expr;
if (fieldBasedAccess.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
} else {
final BLangSimpleVarRef selfName = (BLangSimpleVarRef) fieldBasedAccess.expr;
if (!Names.SELF.equals(selfName.symbol.name)) {
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
}
}
}
validateActionParentNode(pos, iExpr);
}
/**
* Actions can only occur as part of a statement or nested inside other actions.
*/
private void validateActionParentNode(DiagnosticPos pos, BLangNode node) {
BLangNode parent = node.parent;
if (parent.getKind() == NodeKind.BLOCK) {
return;
}
while (parent != null) {
final NodeKind kind = parent.getKind();
if (kind == NodeKind.ASSIGNMENT
|| kind == NodeKind.EXPRESSION_STATEMENT || kind == NodeKind.RETURN
|| kind == NodeKind.RECORD_DESTRUCTURE || kind == NodeKind.ERROR_DESTRUCTURE
|| kind == NodeKind.TUPLE_DESTRUCTURE || kind == NodeKind.VARIABLE
|| kind == NodeKind.MATCH || kind == NodeKind.FOREACH) {
return;
} else if (kind == NodeKind.CHECK_PANIC_EXPR || kind == NodeKind.CHECK_EXPR
|| kind == NodeKind.WORKER_RECEIVE || kind == NodeKind.WORKER_FLUSH
|| kind == NodeKind.WORKER_SEND || kind == NodeKind.WAIT_EXPR
|| kind == NodeKind.GROUP_EXPR || kind == NodeKind.TRAP_EXPR) {
parent = parent.parent;
if (parent.getKind() == NodeKind.BLOCK) {
return;
}
continue;
} else if (kind == NodeKind.ELVIS_EXPR
&& ((BLangElvisExpr) parent).lhsExpr.getKind() == NodeKind.INVOCATION
&& ((BLangInvocation) ((BLangElvisExpr) parent).lhsExpr).actionInvocation) {
parent = parent.parent;
continue;
}
break;
}
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
public void visit(BLangTypeInit cIExpr) {
analyzeExprs(cIExpr.argsExpr);
analyzeExpr(cIExpr.initInvocation);
}
public void visit(BLangTernaryExpr ternaryExpr) {
analyzeExpr(ternaryExpr.expr);
boolean isJSONCtx = getIsJSONContext(ternaryExpr.type);
this.isJSONContext = isJSONCtx;
analyzeExpr(ternaryExpr.thenExpr);
this.isJSONContext = isJSONCtx;
analyzeExpr(ternaryExpr.elseExpr);
}
public void visit(BLangWaitExpr awaitExpr) {
analyzeExpr(awaitExpr.getExpression());
validateActionParentNode(awaitExpr.pos, awaitExpr);
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
waitForAllExpr.keyValuePairs.forEach(keyValue -> {
BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr;
analyzeExpr(expr);
});
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
BLangIdentifier flushWrkIdentifier = workerFlushExpr.workerIdentifier;
Stack<WorkerActionSystem> workerActionSystems = this.workerActionSystemStack;
WorkerActionSystem currentWrkerAction = workerActionSystems.peek();
List<BLangWorkerSend> sendStmts = getAsyncSendStmtsOfWorker(currentWrkerAction);
if (flushWrkIdentifier != null) {
List<BLangWorkerSend> sendsToGivenWrkr = sendStmts.stream()
.filter(bLangNode -> bLangNode.workerIdentifier.equals
(flushWrkIdentifier))
.collect(Collectors.toList());
if (sendsToGivenWrkr.size() == 0) {
this.dlog.error(workerFlushExpr.pos, DiagnosticCode.INVALID_WORKER_FLUSH_FOR_WORKER, flushWrkIdentifier,
currentWrkerAction.currentWorkerId());
return;
} else {
sendStmts = sendsToGivenWrkr;
}
} else {
if (sendStmts.size() == 0) {
this.dlog.error(workerFlushExpr.pos, DiagnosticCode.INVALID_WORKER_FLUSH,
currentWrkerAction.currentWorkerId());
return;
}
}
workerFlushExpr.cachedWorkerSendStmts = sendStmts;
validateActionParentNode(workerFlushExpr.pos, workerFlushExpr);
}
private List<BLangWorkerSend> getAsyncSendStmtsOfWorker(WorkerActionSystem currentWorkerAction) {
List<BLangNode> actions = currentWorkerAction.workerActionStateMachines.peek().actions;
return actions.stream()
.filter(CodeAnalyzer::isWorkerSend)
.map(bLangNode -> (BLangWorkerSend) bLangNode)
.collect(Collectors.toList());
}
@Override
public void visit(BLangTrapExpr trapExpr) {
analyzeExpr(trapExpr.expr);
}
public void visit(BLangBinaryExpr binaryExpr) {
if (validateBinaryExpr(binaryExpr)) {
boolean isJSONCtx = getIsJSONContext(binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type);
this.isJSONContext = isJSONCtx;
analyzeExpr(binaryExpr.lhsExpr);
this.isJSONContext = isJSONCtx;
analyzeExpr(binaryExpr.rhsExpr);
}
}
private boolean validateBinaryExpr(BLangBinaryExpr binaryExpr) {
if (binaryExpr.lhsExpr.type.tag != TypeTags.FUTURE && binaryExpr.rhsExpr.type.tag != TypeTags.FUTURE) {
return true;
}
BLangNode parentNode = binaryExpr.parent;
if (binaryExpr.lhsExpr.type.tag == TypeTags.FUTURE || binaryExpr.rhsExpr.type.tag == TypeTags.FUTURE) {
if (parentNode == null) {
return false;
}
if (parentNode.getKind() == NodeKind.WAIT_EXPR) {
return true;
}
}
if (parentNode.getKind() != NodeKind.BINARY_EXPR && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
dlog.error(binaryExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED, OperatorKind.BITWISE_OR,
symTable.futureType);
return false;
}
if (parentNode.getKind() == NodeKind.BINARY_EXPR) {
return validateBinaryExpr((BLangBinaryExpr) parentNode);
}
return true;
}
public void visit(BLangElvisExpr elvisExpr) {
analyzeExpr(elvisExpr.lhsExpr);
analyzeExpr(elvisExpr.rhsExpr);
}
@Override
public void visit(BLangGroupExpr groupExpr) {
analyzeExpr(groupExpr.expression);
}
public void visit(BLangUnaryExpr unaryExpr) {
analyzeExpr(unaryExpr.expr);
}
public void visit(BLangTypedescExpr accessExpr) {
/* ignore */
}
public void visit(BLangTypeConversionExpr conversionExpr) {
analyzeExpr(conversionExpr.expr);
}
public void visit(BLangXMLQName xmlQName) {
/* ignore */
}
public void visit(BLangXMLAttribute xmlAttribute) {
analyzeExpr(xmlAttribute.name);
analyzeExpr(xmlAttribute.value);
}
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
analyzeExpr(xmlElementLiteral.startTagName);
analyzeExpr(xmlElementLiteral.endTagName);
analyzeExprs(xmlElementLiteral.attributes);
analyzeExprs(xmlElementLiteral.children);
}
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
analyzeExprs(xmlTextLiteral.textFragments);
}
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
analyzeExprs(xmlCommentLiteral.textFragments);
}
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
analyzeExprs(xmlProcInsLiteral.dataFragments);
analyzeExpr(xmlProcInsLiteral.target);
}
public void visit(BLangXMLQuotedString xmlQuotedString) {
analyzeExprs(xmlQuotedString.textFragments);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
analyzeExprs(stringTemplateLiteral.exprs);
}
public void visit(BLangLambdaFunction bLangLambdaFunction) {
boolean isWorker = false;
if (bLangLambdaFunction.parent.getKind() == NodeKind.VARIABLE) {
String workerVarName = ((BLangSimpleVariable) bLangLambdaFunction.parent).name.value;
if (workerVarName.startsWith(WORKER_LAMBDA_VAR_PREFIX)) {
String workerName = workerVarName.substring(1);
isWorker = true;
this.workerActionSystemStack.peek().startWorkerActionStateMachine(workerName,
bLangLambdaFunction.function.pos,
bLangLambdaFunction.function);
}
}
boolean statementReturn = this.statementReturns;
this.visitFunction(bLangLambdaFunction.function);
this.statementReturns = statementReturn;
if (isWorker) {
this.workerActionSystemStack.peek().endWorkerActionStateMachine();
}
}
public void visit(BLangArrowFunction bLangArrowFunction) {
analyzeExpr(bLangArrowFunction.expression);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
checkExperimentalFeatureValidity(ExperimentalFeatures.XML_ATTRIBUTES_ACCESS, xmlAttributeAccessExpr.pos);
analyzeExpr(xmlAttributeAccessExpr.expr);
analyzeExpr(xmlAttributeAccessExpr.indexExpr);
}
public void visit(BLangIntRangeExpression intRangeExpression) {
analyzeExpr(intRangeExpression.startExpr);
analyzeExpr(intRangeExpression.endExpr);
}
/* Type Nodes */
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
SymbolEnv recordEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env);
if (recordTypeNode.isFieldAnalyseRequired) {
recordTypeNode.fields.forEach(field -> analyzeNode(field, recordEnv));
}
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env);
if (objectTypeNode.isFieldAnalyseRequired) {
objectTypeNode.fields.forEach(field -> analyzeNode(field, objectEnv));
}
Stream.concat(objectTypeNode.functions.stream(),
Optional.ofNullable(objectTypeNode.initFunction).map(Stream::of).orElseGet(Stream::empty))
.sorted(Comparator.comparingInt(fn -> fn.pos.sLine))
.forEachOrdered(fn -> this.analyzeNode(fn, objectEnv));
}
@Override
public void visit(BLangValueType valueType) {
/* ignore */
}
@Override
public void visit(BLangArrayType arrayType) {
analyzeTypeNode(arrayType.elemtype, env);
}
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
/* ignore */
}
public void visit(BLangConstrainedType constrainedType) {
if (constrainedType.type.type.tag == TypeTags.STREAM) {
checkExperimentalFeatureValidity(ExperimentalFeatures.STREAMS, constrainedType.pos);
}
analyzeTypeNode(constrainedType.constraint, env);
}
public void visit(BLangErrorType errorType) {
analyzeTypeNode(errorType.reasonType, env);
analyzeTypeNode(errorType.detailType, env);
}
public void visit(BLangUserDefinedType userDefinedType) {
/* Ignore */
}
public void visit(BLangTupleTypeNode tupleTypeNode) {
tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, env));
analyzeTypeNode(tupleTypeNode.restParamType, env);
}
public void visit(BLangUnionTypeNode unionTypeNode) {
unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, env));
}
public void visit(BLangFunctionTypeNode functionTypeNode) {
functionTypeNode.params.forEach(node -> analyzeNode(node, env));
analyzeTypeNode(functionTypeNode.returnTypeNode, env);
}
@Override
public void visit(BLangFiniteTypeNode finiteTypeNode) {
/* Ignore */
}
@Override
public void visit(BLangTableQueryExpression tableQueryExpression) {
checkExperimentalFeatureValidity(ExperimentalFeatures.TABLE_QUERIES, tableQueryExpression.pos);
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
analyzeExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
analyzeExpr(bLangNamedArgsExpression.expr);
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
analyzeExpr(checkedExpr.expr);
boolean enclInvokableHasErrorReturn = false;
if (this.env.scope.owner.getKind() == SymbolKind.PACKAGE) {
return;
}
BType exprType = env.enclInvokable.getReturnTypeNode().type;
if (exprType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) env.enclInvokable.getReturnTypeNode().type;
enclInvokableHasErrorReturn = unionType.getMemberTypes().stream()
.anyMatch(memberType -> types.isAssignable(memberType, symTable.errorType));
} else if (types.isAssignable(exprType, symTable.errorType)) {
enclInvokableHasErrorReturn = true;
}
if (!enclInvokableHasErrorReturn) {
dlog.error(checkedExpr.pos, DiagnosticCode.CHECKED_EXPR_NO_ERROR_RETURN_IN_ENCL_INVOKABLE);
}
returnTypes.peek().add(exprType);
}
@Override
public void visit(BLangCheckPanickedExpr checkPanicExpr) {
analyzeExpr(checkPanicExpr.expr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
analyzeNode(typeTestExpr.expr, env);
if (typeTestExpr.typeNode.type == symTable.semanticError || typeTestExpr.expr.type == symTable.semanticError) {
return;
}
if (types.isAssignable(typeTestExpr.expr.type, typeTestExpr.typeNode.type)) {
dlog.error(typeTestExpr.pos, DiagnosticCode.UNNECESSARY_CONDITION);
return;
}
if (!types.isAssignable(typeTestExpr.typeNode.type, typeTestExpr.expr.type) &&
!indirectIntersectionExists(typeTestExpr.expr, typeTestExpr.typeNode.type)) {
dlog.error(typeTestExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPE_CHECK, typeTestExpr.expr.type,
typeTestExpr.typeNode.type);
}
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
analyzeExpr(annotAccessExpr.expr);
}
private boolean indirectIntersectionExists(BLangExpression expression, BType testType) {
BType expressionType = expression.type;
switch (expressionType.tag) {
case TypeTags.UNION:
if (types.getTypeForUnionTypeMembersAssignableToType((BUnionType) expressionType, testType) !=
symTable.semanticError) {
return true;
}
break;
case TypeTags.FINITE:
if (types.getTypeForFiniteTypeValuesAssignableToType((BFiniteType) expressionType, testType) !=
symTable.semanticError) {
return true;
}
}
switch (testType.tag) {
case TypeTags.UNION:
return types.getTypeForUnionTypeMembersAssignableToType((BUnionType) testType, expressionType) !=
symTable.semanticError;
case TypeTags.FINITE:
return types.getTypeForFiniteTypeValuesAssignableToType((BFiniteType) testType, expressionType) !=
symTable.semanticError;
}
return false;
}
private <E extends BLangExpression> void analyzeExpr(E node) {
if (node == null) {
return;
}
BLangNode myParent = parent;
node.parent = parent;
parent = node;
node.accept(this);
this.isJSONContext = false;
parent = myParent;
checkAccess(node);
}
@Override
public void visit(BLangConstant constant) {
analyzeTypeNode(constant.typeNode, env);
analyzeNode(constant.expr, env);
analyzeExportableTypeRef(constant.symbol, constant.symbol.type.tsymbol, false, constant.pos);
}
/**
* This method checks for private symbols being accessed or used outside of package and|or private symbols being
* used in public fields of objects/records and will fail those occurrences.
*
* @param node expression node to analyze
*/
private <E extends BLangExpression> void checkAccess(E node) {
if (node.type != null) {
checkAccessSymbol(node.type.tsymbol, node.pos);
}
if (node.getKind() == NodeKind.INVOCATION) {
BLangInvocation bLangInvocation = (BLangInvocation) node;
checkAccessSymbol(bLangInvocation.symbol, bLangInvocation.pos);
}
}
private void checkAccessSymbol(BSymbol symbol, DiagnosticPos position) {
if (symbol == null) {
return;
}
if (env.enclPkg.symbol.pkgID != symbol.pkgID && !Symbols.isPublic(symbol)) {
dlog.error(position, DiagnosticCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name);
}
}
private <E extends BLangExpression> void analyzeExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
analyzeExpr(nodeList.get(i));
}
}
private void initNewWorkerActionSystem() {
this.workerActionSystemStack.push(new WorkerActionSystem());
}
private void finalizeCurrentWorkerActionSystem() {
WorkerActionSystem was = this.workerActionSystemStack.pop();
if (!was.hasErrors) {
this.validateWorkerInteractions(was);
}
}
private static boolean isWorkerSend(BLangNode action) {
return action.getKind() == NodeKind.WORKER_SEND;
}
private static boolean isWorkerSyncSend(BLangNode action) {
return action.getKind() == NodeKind.WORKER_SYNC_SEND;
}
private String extractWorkerId(BLangNode action) {
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).workerIdentifier.value;
} else if (isWorkerSyncSend(action)) {
return ((BLangWorkerSyncSendExpr) action).workerIdentifier.value;
} else {
return ((BLangWorkerReceive) action).workerIdentifier.value;
}
}
private void validateWorkerInteractions(WorkerActionSystem workerActionSystem) {
BLangNode currentAction;
boolean systemRunning;
do {
systemRunning = false;
for (WorkerActionStateMachine worker : workerActionSystem.finshedWorkers) {
if (worker.done()) {
continue;
}
currentAction = worker.currentAction();
if (!isWorkerSend(currentAction) && !isWorkerSyncSend(currentAction)) {
continue;
}
WorkerActionStateMachine otherSM = workerActionSystem.find(this.extractWorkerId(currentAction));
if (otherSM == null || !otherSM.currentIsReceive(worker.workerId)) {
continue;
}
BLangWorkerReceive receive = (BLangWorkerReceive) otherSM.currentAction();
if (isWorkerSyncSend(currentAction)) {
this.validateWorkerActionParameters((BLangWorkerSyncSendExpr) currentAction, receive);
} else {
this.validateWorkerActionParameters((BLangWorkerSend) currentAction, receive);
}
otherSM.next();
worker.next();
systemRunning = true;
String channelName = WorkerDataChannelInfo.generateChannelName(worker.workerId, otherSM.workerId);
otherSM.node.sendsToThis.add(channelName);
worker.node.sendsToThis.add(channelName);
}
} while (systemRunning);
if (!workerActionSystem.everyoneDone()) {
this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem);
}
}
private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) {
this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticCode.INVALID_WORKER_INTERACTION,
workerActionSystem.toString());
}
private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) {
types.checkType(receive, send.type, receive.type);
addImplicitCast(send.type, receive);
NodeKind kind = receive.parent.getKind();
if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR) {
typeChecker.checkExpr((BLangExpression) receive.parent, receive.env);
}
receive.sendExpression = send.expr;
}
private void validateWorkerActionParameters(BLangWorkerSyncSendExpr send, BLangWorkerReceive receive) {
this.typeChecker.checkExpr(send.expr, send.env, receive.type);
types.checkType(send, send.type, receive.matchingSendsError);
addImplicitCast(send.expr.type, receive);
receive.sendExpression = send;
}
private void addImplicitCast(BType actualType, BLangWorkerReceive receive) {
if (receive.type != null && receive.type != symTable.semanticError) {
types.setImplicitCastExpr(receive, actualType, receive.type);
receive.type = actualType;
}
}
private boolean checkNextBreakValidityInTransaction() {
return !this.loopWithintransactionCheckStack.peek() && transactionCount > 0;
}
private boolean checkReturnValidityInTransaction() {
return (this.returnWithintransactionCheckStack.empty() || !this.returnWithintransactionCheckStack.peek())
&& transactionCount > 0;
}
private boolean isValidTransactionBlock() {
return !(this.withinRetryBlock || this.withinAbortedBlock || this.withinCommittedBlock);
}
private void validateMainFunction(BLangFunction funcNode) {
if (!MAIN_FUNCTION_NAME.equals(funcNode.name.value)) {
return;
}
if (!Symbols.isPublic(funcNode.symbol)) {
this.dlog.error(funcNode.pos, DiagnosticCode.MAIN_SHOULD_BE_PUBLIC);
}
funcNode.requiredParams.forEach(param -> {
if (!types.isAnydata(param.type)) {
this.dlog.error(param.pos, DiagnosticCode.MAIN_PARAMS_SHOULD_BE_ANYDATA, param.type);
}
});
if (funcNode.restParam != null && !types.isAnydata(funcNode.restParam.type)) {
this.dlog.error(funcNode.restParam.pos, DiagnosticCode.MAIN_PARAMS_SHOULD_BE_ANYDATA,
funcNode.restParam.type);
}
if (!types.isAssignable(funcNode.returnTypeNode.type,
BUnionType.create(null, symTable.nilType, symTable.errorType))) {
this.dlog.error(funcNode.returnTypeNode.pos, DiagnosticCode.MAIN_RETURN_SHOULD_BE_ERROR_OR_NIL,
funcNode.returnTypeNode.type);
}
}
private void validateModuleInitFunction(BLangFunction funcNode) {
if (funcNode.attachedFunction || !Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcNode.name.value)) {
return;
}
if (Symbols.isPublic(funcNode.symbol)) {
this.dlog.error(funcNode.pos, DiagnosticCode.MODULE_INIT_CANNOT_BE_PUBLIC);
}
if (!funcNode.requiredParams.isEmpty() || funcNode.restParam != null) {
this.dlog.error(funcNode.pos, DiagnosticCode.MODULE_INIT_CANNOT_HAVE_PARAMS);
}
if (!funcNode.returnTypeNode.type.isNullable() ||
!types.isAssignable(funcNode.returnTypeNode.type,
BUnionType.create(null, symTable.nilType, symTable.errorType))) {
this.dlog.error(funcNode.returnTypeNode.pos, DiagnosticCode.MODULE_INIT_RETURN_SHOULD_BE_ERROR_OR_NIL,
funcNode.returnTypeNode.type);
}
}
private void checkDuplicateNamedArgs(List<BLangExpression> args) {
List<BLangIdentifier> existingArgs = new ArrayList<>();
args.forEach(arg -> {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
if (existingArgs.contains(namedArg.name)) {
dlog.error(namedArg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, namedArg.name);
}
existingArgs.add(namedArg.name);
});
}
private boolean getIsJSONContext(BType... arg) {
if (this.isJSONContext) {
return true;
}
for (BType type : arg) {
if (types.isJSONContext(type)) {
return true;
}
}
return false;
}
/**
* This class contains the state machines for a set of workers.
*/
private static class WorkerActionSystem {
public List<WorkerActionStateMachine> finshedWorkers = new ArrayList<>();
private Stack<WorkerActionStateMachine> workerActionStateMachines = new Stack<>();
private boolean hasErrors = false;
public void startWorkerActionStateMachine(String workerId, DiagnosticPos pos, BLangFunction node) {
workerActionStateMachines.push(new WorkerActionStateMachine(pos, workerId, node));
}
public void endWorkerActionStateMachine() {
finshedWorkers.add(workerActionStateMachines.pop());
}
public void addWorkerAction(BLangNode action) {
this.workerActionStateMachines.peek().actions.add(action);
}
public WorkerActionStateMachine find(String workerId) {
for (WorkerActionStateMachine worker : this.finshedWorkers) {
if (worker.workerId.equals(workerId)) {
return worker;
}
}
throw new AssertionError("Reference to non existing worker " + workerId);
}
public boolean everyoneDone() {
return this.finshedWorkers.stream().allMatch(WorkerActionStateMachine::done);
}
public DiagnosticPos getRootPosition() {
return this.finshedWorkers.iterator().next().pos;
}
@Override
public String toString() {
return this.finshedWorkers.toString();
}
public String currentWorkerId() {
return workerActionStateMachines.peek().workerId;
}
}
/**
* This class represents a state machine to maintain the state of the send/receive
* actions of a worker.
*/
private static class WorkerActionStateMachine {
private static final String WORKER_SM_FINISHED = "FINISHED";
public int currentState;
public List<BLangNode> actions = new ArrayList<>();
public DiagnosticPos pos;
public String workerId;
public BLangFunction node;
public WorkerActionStateMachine(DiagnosticPos pos, String workerId, BLangFunction node) {
this.pos = pos;
this.workerId = workerId;
this.node = node;
}
public boolean done() {
return this.actions.size() == this.currentState;
}
public BLangNode currentAction() {
return this.actions.get(this.currentState);
}
public boolean currentIsReceive(String sourceWorkerId) {
if (this.done()) {
return false;
}
BLangNode action = this.currentAction();
return !isWorkerSend(action) && !isWorkerSyncSend(action) &&
((BLangWorkerReceive) action).workerIdentifier.value.equals(sourceWorkerId);
}
public void next() {
this.currentState++;
}
@Override
public String toString() {
if (this.done()) {
return WORKER_SM_FINISHED;
} else {
BLangNode action = this.currentAction();
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).toActionString();
} else if (isWorkerSyncSend(action)) {
return ((BLangWorkerSyncSendExpr) action).toActionString();
} else {
return ((BLangWorkerReceive) action).toActionString();
}
}
}
}
private void checkExperimentalFeatureValidity(ExperimentalFeatures constructName, DiagnosticPos pos) {
if (enableExperimentalFeatures) {
return;
}
dlog.error(pos, DiagnosticCode.INVALID_USE_OF_EXPERIMENTAL_FEATURE, constructName.value);
}
/**
* Experimental feature list for JBallerina 1.0.0.
*
* @since JBallerina 1.0.0
*/
private enum ExperimentalFeatures {
STREAMS("stream"),
TABLE_QUERIES("table queries"),
STREAMING_QUERIES("streaming queries"),
TRANSACTIONS("transaction"),
LOCK("lock"),
XML_ACCESS("xml access expression"),
XML_ATTRIBUTES_ACCESS("xml attribute expression"),
;
private String value;
private ExperimentalFeatures(String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
}
}
|
|
(although it's not like the test was actually verifying this before...)
|
public void testPutSuspendedJobOnClusterShutdown() throws Exception {
final Duration timeout = Duration.ofSeconds(5);
try (final MiniCluster miniCluster =
new PersistingMiniCluster(new MiniClusterConfiguration.Builder().build())) {
miniCluster.start();
final JobGraph jobGraph = JobGraphTestUtils.singleNoOpJobGraph();
final JobID jobId = jobGraph.getJobID();
miniCluster.submitJob(jobGraph);
CommonTestUtils.waitUntilCondition(
() -> {
try {
return miniCluster.getJobStatus(jobId).get() == JobStatus.FINISHED;
} catch (Exception e) {
if (ExceptionUtils.findThrowable(e, FlinkJobNotFoundException.class)
.isPresent()) {
return false;
}
throw e;
}
},
Deadline.fromNow(timeout));
}
}
|
return miniCluster.getJobStatus(jobId).get() == JobStatus.FINISHED;
|
public void testPutSuspendedJobOnClusterShutdown() throws Exception {
try (final MiniCluster miniCluster =
new PersistingMiniCluster(new MiniClusterConfiguration.Builder().build())) {
miniCluster.start();
final JobVertex vertex = new JobVertex("blockingVertex");
vertex.setInvokableClass(SignallingBlockingNoOpInvokable.class);
final JobGraph jobGraph = JobGraphTestUtils.streamingJobGraph(vertex);
miniCluster.submitJob(jobGraph);
SignallingBlockingNoOpInvokable.LATCH.await();
}
}
|
class FileExecutionGraphInfoStoreTest extends TestLogger {
private static final List<JobStatus> GLOBALLY_TERMINAL_JOB_STATUS =
Arrays.stream(JobStatus.values())
.filter(JobStatus::isGloballyTerminalState)
.collect(Collectors.toList());
@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();
/**
* Tests that we can put {@link ExecutionGraphInfo} into the {@link FileExecutionGraphInfoStore}
* and that the graph is persisted.
*/
@Test
public void testPut() throws IOException {
assertPutJobGraphWithStatus(JobStatus.FINISHED);
}
/** Tests that a SUSPENDED job can be persisted. */
@Test
public void testPutSuspendedJob() throws IOException {
assertPutJobGraphWithStatus(JobStatus.SUSPENDED);
}
/** Tests that null is returned if we request an unknown JobID. */
@Test
public void testUnknownGet() throws IOException {
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
assertThat(executionGraphStore.get(new JobID()), Matchers.nullValue());
}
}
/** Tests that we obtain the correct jobs overview. */
@Test
public void testStoredJobsOverview() throws IOException {
final int numberExecutionGraphs = 10;
final Collection<ExecutionGraphInfo> executionGraphInfos =
generateTerminalExecutionGraphInfos(numberExecutionGraphs);
final List<JobStatus> jobStatuses =
executionGraphInfos.stream()
.map(ExecutionGraphInfo::getArchivedExecutionGraph)
.map(ArchivedExecutionGraph::getState)
.collect(Collectors.toList());
final JobsOverview expectedJobsOverview = JobsOverview.create(jobStatuses);
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
for (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {
executionGraphInfoStore.put(executionGraphInfo);
}
assertThat(
executionGraphInfoStore.getStoredJobsOverview(),
Matchers.equalTo(expectedJobsOverview));
}
}
/** Tests that we obtain the correct collection of available job details. */
@Test
public void testAvailableJobDetails() throws IOException {
final int numberExecutionGraphs = 10;
final Collection<ExecutionGraphInfo> executionGraphInfos =
generateTerminalExecutionGraphInfos(numberExecutionGraphs);
final Collection<JobDetails> jobDetails = generateJobDetails(executionGraphInfos);
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
for (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {
executionGraphInfoStore.put(executionGraphInfo);
}
assertThat(
executionGraphInfoStore.getAvailableJobDetails(),
Matchers.containsInAnyOrder(jobDetails.toArray()));
}
}
/** Tests that an expired execution graph is removed from the execution graph store. */
@Test
public void testExecutionGraphExpiration() throws Exception {
final File rootDir = temporaryFolder.newFolder();
final Time expirationTime = Time.milliseconds(1L);
final ManuallyTriggeredScheduledExecutor scheduledExecutor =
new ManuallyTriggeredScheduledExecutor();
final ManualTicker manualTicker = new ManualTicker();
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
new FileExecutionGraphInfoStore(
rootDir,
expirationTime,
Integer.MAX_VALUE,
10000L,
scheduledExecutor,
manualTicker)) {
final ExecutionGraphInfo executionGraphInfo =
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder()
.setState(JobStatus.FINISHED)
.build());
executionGraphInfoStore.put(executionGraphInfo);
assertThat(executionGraphInfoStore.size(), Matchers.equalTo(1));
manualTicker.advanceTime(expirationTime.toMilliseconds(), TimeUnit.MILLISECONDS);
scheduledExecutor.triggerScheduledTasks();
assertThat(executionGraphInfoStore.size(), Matchers.equalTo(0));
assertThat(
executionGraphInfoStore.get(executionGraphInfo.getJobId()),
Matchers.nullValue());
final File storageDirectory = executionGraphInfoStore.getStorageDir();
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));
}
}
/** Tests that all persisted files are cleaned up after closing the store. */
@Test
public void testCloseCleansUp() throws IOException {
final File rootDir = temporaryFolder.newFolder();
assertThat(rootDir.listFiles().length, Matchers.equalTo(0));
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
assertThat(rootDir.listFiles().length, Matchers.equalTo(1));
final File storageDirectory = executionGraphInfoStore.getStorageDir();
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));
executionGraphInfoStore.put(
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder()
.setState(JobStatus.FINISHED)
.build()));
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(1));
}
assertThat(rootDir.listFiles().length, Matchers.equalTo(0));
}
/** Tests that evicted {@link ExecutionGraphInfo} are loaded from disk again. */
@Test
public void testCacheLoading() throws IOException {
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
new FileExecutionGraphInfoStore(
rootDir,
Time.hours(1L),
Integer.MAX_VALUE,
100L << 10,
TestingUtils.defaultScheduledExecutor(),
Ticker.systemTicker())) {
final LoadingCache<JobID, ExecutionGraphInfo> executionGraphInfoCache =
executionGraphInfoStore.getExecutionGraphInfoCache();
Collection<ExecutionGraphInfo> executionGraphInfos = new ArrayList<>(64);
boolean continueInserting = true;
while (continueInserting) {
final ExecutionGraphInfo executionGraphInfo =
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder()
.setState(JobStatus.FINISHED)
.build());
executionGraphInfoStore.put(executionGraphInfo);
executionGraphInfos.add(executionGraphInfo);
continueInserting = executionGraphInfoCache.size() == executionGraphInfos.size();
}
final File storageDirectory = executionGraphInfoStore.getStorageDir();
assertThat(
storageDirectory.listFiles().length,
Matchers.equalTo(executionGraphInfos.size()));
for (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {
assertThat(
executionGraphInfoStore.get(executionGraphInfo.getJobId()),
matchesPartiallyWith(executionGraphInfo));
}
}
}
/**
* Tests that the size of {@link FileExecutionGraphInfoStore} is no more than the configured max
* capacity and the old execution graphs will be purged if the total added number exceeds the
* max capacity.
*/
@Test
public void testMaximumCapacity() throws IOException {
final File rootDir = temporaryFolder.newFolder();
final int maxCapacity = 10;
final int numberExecutionGraphs = 10;
final Collection<ExecutionGraphInfo> oldExecutionGraphInfos =
generateTerminalExecutionGraphInfos(numberExecutionGraphs);
final Collection<ExecutionGraphInfo> newExecutionGraphInfos =
generateTerminalExecutionGraphInfos(numberExecutionGraphs);
final Collection<JobDetails> jobDetails = generateJobDetails(newExecutionGraphInfos);
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
new FileExecutionGraphInfoStore(
rootDir,
Time.hours(1L),
maxCapacity,
10000L,
TestingUtils.defaultScheduledExecutor(),
Ticker.systemTicker())) {
for (ExecutionGraphInfo executionGraphInfo : oldExecutionGraphInfos) {
executionGraphInfoStore.put(executionGraphInfo);
assertTrue(executionGraphInfoStore.size() <= maxCapacity);
}
for (ExecutionGraphInfo executionGraphInfo : newExecutionGraphInfos) {
executionGraphInfoStore.put(executionGraphInfo);
assertEquals(maxCapacity, executionGraphInfoStore.size());
}
assertThat(
executionGraphInfoStore.getAvailableJobDetails(),
Matchers.containsInAnyOrder(jobDetails.toArray()));
}
}
/** Tests that a session cluster can terminate gracefully when jobs are still running. */
@Test
private class PersistingMiniCluster extends MiniCluster {
PersistingMiniCluster(MiniClusterConfiguration miniClusterConfiguration) {
super(miniClusterConfiguration);
}
@Override
protected Collection<? extends DispatcherResourceManagerComponent>
createDispatcherResourceManagerComponents(
Configuration configuration,
RpcServiceFactory rpcServiceFactory,
HighAvailabilityServices haServices,
BlobServer blobServer,
HeartbeatServices heartbeatServices,
MetricRegistry metricRegistry,
MetricQueryServiceRetriever metricQueryServiceRetriever,
FatalErrorHandler fatalErrorHandler)
throws Exception {
final DispatcherResourceManagerComponentFactory
dispatcherResourceManagerComponentFactory =
DefaultDispatcherResourceManagerComponentFactory
.createSessionComponentFactory(
StandaloneResourceManagerFactory.getInstance());
final File rootDir = temporaryFolder.newFolder();
final ExecutionGraphInfoStore executionGraphInfoStore =
createDefaultExecutionGraphInfoStore(rootDir);
return Collections.singleton(
dispatcherResourceManagerComponentFactory.create(
configuration,
getIOExecutor(),
rpcServiceFactory.createRpcService(),
haServices,
blobServer,
heartbeatServices,
metricRegistry,
executionGraphInfoStore,
metricQueryServiceRetriever,
fatalErrorHandler));
}
}
private Collection<ExecutionGraphInfo> generateTerminalExecutionGraphInfos(int number) {
final Collection<ExecutionGraphInfo> executionGraphInfos = new ArrayList<>(number);
for (int i = 0; i < number; i++) {
final JobStatus state =
GLOBALLY_TERMINAL_JOB_STATUS.get(
ThreadLocalRandom.current()
.nextInt(GLOBALLY_TERMINAL_JOB_STATUS.size()));
executionGraphInfos.add(
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder().setState(state).build()));
}
return executionGraphInfos;
}
private FileExecutionGraphInfoStore createDefaultExecutionGraphInfoStore(File storageDirectory)
throws IOException {
return new FileExecutionGraphInfoStore(
storageDirectory,
Time.hours(1L),
Integer.MAX_VALUE,
10000L,
TestingUtils.defaultScheduledExecutor(),
Ticker.systemTicker());
}
private static final class PartialExecutionGraphInfoMatcher
extends BaseMatcher<ExecutionGraphInfo> {
private final ExecutionGraphInfo expectedExecutionGraphInfo;
private PartialExecutionGraphInfoMatcher(ExecutionGraphInfo expectedExecutionGraphInfo) {
this.expectedExecutionGraphInfo =
Preconditions.checkNotNull(expectedExecutionGraphInfo);
}
@Override
public boolean matches(Object o) {
if (expectedExecutionGraphInfo == o) {
return true;
}
if (o == null || expectedExecutionGraphInfo.getClass() != o.getClass()) {
return false;
}
ExecutionGraphInfo that = (ExecutionGraphInfo) o;
ArchivedExecutionGraph thisExecutionGraph =
expectedExecutionGraphInfo.getArchivedExecutionGraph();
ArchivedExecutionGraph thatExecutionGraph = that.getArchivedExecutionGraph();
return thisExecutionGraph.isStoppable() == thatExecutionGraph.isStoppable()
&& Objects.equals(thisExecutionGraph.getJobID(), thatExecutionGraph.getJobID())
&& Objects.equals(
thisExecutionGraph.getJobName(), thatExecutionGraph.getJobName())
&& thisExecutionGraph.getState() == thatExecutionGraph.getState()
&& Objects.equals(
thisExecutionGraph.getJsonPlan(), thatExecutionGraph.getJsonPlan())
&& Objects.equals(
thisExecutionGraph.getAccumulatorsSerialized(),
thatExecutionGraph.getAccumulatorsSerialized())
&& Objects.equals(
thisExecutionGraph.getCheckpointCoordinatorConfiguration(),
thatExecutionGraph.getCheckpointCoordinatorConfiguration())
&& thisExecutionGraph.getAllVertices().size()
== thatExecutionGraph.getAllVertices().size()
&& Objects.equals(
expectedExecutionGraphInfo.getExceptionHistory(),
that.getExceptionHistory());
}
@Override
public void describeTo(Description description) {
description.appendText(
"Matches against " + ExecutionGraphInfo.class.getSimpleName() + '.');
}
}
private void assertPutJobGraphWithStatus(JobStatus jobStatus) throws IOException {
final ExecutionGraphInfo dummyExecutionGraphInfo =
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder().setState(jobStatus).build());
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
final File storageDirectory = executionGraphStore.getStorageDir();
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));
executionGraphStore.put(dummyExecutionGraphInfo);
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(1));
assertThat(
executionGraphStore.get(dummyExecutionGraphInfo.getJobId()),
new PartialExecutionGraphInfoMatcher(dummyExecutionGraphInfo));
}
}
private static Matcher<ExecutionGraphInfo> matchesPartiallyWith(
ExecutionGraphInfo executionGraphInfo) {
return new PartialExecutionGraphInfoMatcher(executionGraphInfo);
}
private static Collection<JobDetails> generateJobDetails(
Collection<ExecutionGraphInfo> executionGraphInfos) {
return executionGraphInfos.stream()
.map(ExecutionGraphInfo::getArchivedExecutionGraph)
.map(JobDetails::createDetailsForJob)
.collect(Collectors.toList());
}
}
|
class FileExecutionGraphInfoStoreTest extends TestLogger {
private static final List<JobStatus> GLOBALLY_TERMINAL_JOB_STATUS =
Arrays.stream(JobStatus.values())
.filter(JobStatus::isGloballyTerminalState)
.collect(Collectors.toList());
@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();
/**
* Tests that we can put {@link ExecutionGraphInfo} into the {@link FileExecutionGraphInfoStore}
* and that the graph is persisted.
*/
@Test
public void testPut() throws IOException {
assertPutJobGraphWithStatus(JobStatus.FINISHED);
}
/** Tests that a SUSPENDED job can be persisted. */
@Test
public void testPutSuspendedJob() throws IOException {
assertPutJobGraphWithStatus(JobStatus.SUSPENDED);
}
/** Tests that null is returned if we request an unknown JobID. */
@Test
public void testUnknownGet() throws IOException {
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
assertThat(executionGraphStore.get(new JobID()), Matchers.nullValue());
}
}
/** Tests that we obtain the correct jobs overview. */
@Test
public void testStoredJobsOverview() throws IOException {
final int numberExecutionGraphs = 10;
final Collection<ExecutionGraphInfo> executionGraphInfos =
generateTerminalExecutionGraphInfos(numberExecutionGraphs);
final List<JobStatus> jobStatuses =
executionGraphInfos.stream()
.map(ExecutionGraphInfo::getArchivedExecutionGraph)
.map(ArchivedExecutionGraph::getState)
.collect(Collectors.toList());
final JobsOverview expectedJobsOverview = JobsOverview.create(jobStatuses);
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
for (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {
executionGraphInfoStore.put(executionGraphInfo);
}
assertThat(
executionGraphInfoStore.getStoredJobsOverview(),
Matchers.equalTo(expectedJobsOverview));
}
}
/** Tests that we obtain the correct collection of available job details. */
@Test
public void testAvailableJobDetails() throws IOException {
final int numberExecutionGraphs = 10;
final Collection<ExecutionGraphInfo> executionGraphInfos =
generateTerminalExecutionGraphInfos(numberExecutionGraphs);
final Collection<JobDetails> jobDetails = generateJobDetails(executionGraphInfos);
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
for (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {
executionGraphInfoStore.put(executionGraphInfo);
}
assertThat(
executionGraphInfoStore.getAvailableJobDetails(),
Matchers.containsInAnyOrder(jobDetails.toArray()));
}
}
/** Tests that an expired execution graph is removed from the execution graph store. */
@Test
public void testExecutionGraphExpiration() throws Exception {
final File rootDir = temporaryFolder.newFolder();
final Time expirationTime = Time.milliseconds(1L);
final ManuallyTriggeredScheduledExecutor scheduledExecutor =
new ManuallyTriggeredScheduledExecutor();
final ManualTicker manualTicker = new ManualTicker();
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
new FileExecutionGraphInfoStore(
rootDir,
expirationTime,
Integer.MAX_VALUE,
10000L,
scheduledExecutor,
manualTicker)) {
final ExecutionGraphInfo executionGraphInfo =
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder()
.setState(JobStatus.FINISHED)
.build());
executionGraphInfoStore.put(executionGraphInfo);
assertThat(executionGraphInfoStore.size(), Matchers.equalTo(1));
manualTicker.advanceTime(expirationTime.toMilliseconds(), TimeUnit.MILLISECONDS);
scheduledExecutor.triggerScheduledTasks();
assertThat(executionGraphInfoStore.size(), Matchers.equalTo(0));
assertThat(
executionGraphInfoStore.get(executionGraphInfo.getJobId()),
Matchers.nullValue());
final File storageDirectory = executionGraphInfoStore.getStorageDir();
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));
}
}
/** Tests that all persisted files are cleaned up after closing the store. */
@Test
public void testCloseCleansUp() throws IOException {
final File rootDir = temporaryFolder.newFolder();
assertThat(rootDir.listFiles().length, Matchers.equalTo(0));
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
assertThat(rootDir.listFiles().length, Matchers.equalTo(1));
final File storageDirectory = executionGraphInfoStore.getStorageDir();
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));
executionGraphInfoStore.put(
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder()
.setState(JobStatus.FINISHED)
.build()));
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(1));
}
assertThat(rootDir.listFiles().length, Matchers.equalTo(0));
}
/** Tests that evicted {@link ExecutionGraphInfo} are loaded from disk again. */
@Test
public void testCacheLoading() throws IOException {
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
new FileExecutionGraphInfoStore(
rootDir,
Time.hours(1L),
Integer.MAX_VALUE,
100L << 10,
TestingUtils.defaultScheduledExecutor(),
Ticker.systemTicker())) {
final LoadingCache<JobID, ExecutionGraphInfo> executionGraphInfoCache =
executionGraphInfoStore.getExecutionGraphInfoCache();
Collection<ExecutionGraphInfo> executionGraphInfos = new ArrayList<>(64);
boolean continueInserting = true;
while (continueInserting) {
final ExecutionGraphInfo executionGraphInfo =
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder()
.setState(JobStatus.FINISHED)
.build());
executionGraphInfoStore.put(executionGraphInfo);
executionGraphInfos.add(executionGraphInfo);
continueInserting = executionGraphInfoCache.size() == executionGraphInfos.size();
}
final File storageDirectory = executionGraphInfoStore.getStorageDir();
assertThat(
storageDirectory.listFiles().length,
Matchers.equalTo(executionGraphInfos.size()));
for (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {
assertThat(
executionGraphInfoStore.get(executionGraphInfo.getJobId()),
matchesPartiallyWith(executionGraphInfo));
}
}
}
/**
* Tests that the size of {@link FileExecutionGraphInfoStore} is no more than the configured max
* capacity and the old execution graphs will be purged if the total added number exceeds the
* max capacity.
*/
@Test
public void testMaximumCapacity() throws IOException {
final File rootDir = temporaryFolder.newFolder();
final int maxCapacity = 10;
final int numberExecutionGraphs = 10;
final Collection<ExecutionGraphInfo> oldExecutionGraphInfos =
generateTerminalExecutionGraphInfos(numberExecutionGraphs);
final Collection<ExecutionGraphInfo> newExecutionGraphInfos =
generateTerminalExecutionGraphInfos(numberExecutionGraphs);
final Collection<JobDetails> jobDetails = generateJobDetails(newExecutionGraphInfos);
try (final FileExecutionGraphInfoStore executionGraphInfoStore =
new FileExecutionGraphInfoStore(
rootDir,
Time.hours(1L),
maxCapacity,
10000L,
TestingUtils.defaultScheduledExecutor(),
Ticker.systemTicker())) {
for (ExecutionGraphInfo executionGraphInfo : oldExecutionGraphInfos) {
executionGraphInfoStore.put(executionGraphInfo);
assertTrue(executionGraphInfoStore.size() <= maxCapacity);
}
for (ExecutionGraphInfo executionGraphInfo : newExecutionGraphInfos) {
executionGraphInfoStore.put(executionGraphInfo);
assertEquals(maxCapacity, executionGraphInfoStore.size());
}
assertThat(
executionGraphInfoStore.getAvailableJobDetails(),
Matchers.containsInAnyOrder(jobDetails.toArray()));
}
}
/** Tests that a session cluster can terminate gracefully when jobs are still running. */
@Test
/**
* Invokable which signals with {@link SignallingBlockingNoOpInvokable
* and blocks forever afterwards.
*/
public static class SignallingBlockingNoOpInvokable extends AbstractInvokable {
/** Latch used to signal an initial invocation. */
public static final OneShotLatch LATCH = new OneShotLatch();
public SignallingBlockingNoOpInvokable(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
LATCH.trigger();
Thread.sleep(Long.MAX_VALUE);
}
}
private class PersistingMiniCluster extends MiniCluster {
PersistingMiniCluster(MiniClusterConfiguration miniClusterConfiguration) {
super(miniClusterConfiguration);
}
@Override
protected Collection<? extends DispatcherResourceManagerComponent>
createDispatcherResourceManagerComponents(
Configuration configuration,
RpcServiceFactory rpcServiceFactory,
HighAvailabilityServices haServices,
BlobServer blobServer,
HeartbeatServices heartbeatServices,
MetricRegistry metricRegistry,
MetricQueryServiceRetriever metricQueryServiceRetriever,
FatalErrorHandler fatalErrorHandler)
throws Exception {
final DispatcherResourceManagerComponentFactory
dispatcherResourceManagerComponentFactory =
DefaultDispatcherResourceManagerComponentFactory
.createSessionComponentFactory(
StandaloneResourceManagerFactory.getInstance());
final File rootDir = temporaryFolder.newFolder();
final ExecutionGraphInfoStore executionGraphInfoStore =
createDefaultExecutionGraphInfoStore(rootDir);
return Collections.singleton(
dispatcherResourceManagerComponentFactory.create(
configuration,
getIOExecutor(),
rpcServiceFactory.createRpcService(),
haServices,
blobServer,
heartbeatServices,
metricRegistry,
executionGraphInfoStore,
metricQueryServiceRetriever,
fatalErrorHandler));
}
}
private Collection<ExecutionGraphInfo> generateTerminalExecutionGraphInfos(int number) {
final Collection<ExecutionGraphInfo> executionGraphInfos = new ArrayList<>(number);
for (int i = 0; i < number; i++) {
final JobStatus state =
GLOBALLY_TERMINAL_JOB_STATUS.get(
ThreadLocalRandom.current()
.nextInt(GLOBALLY_TERMINAL_JOB_STATUS.size()));
executionGraphInfos.add(
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder().setState(state).build()));
}
return executionGraphInfos;
}
private FileExecutionGraphInfoStore createDefaultExecutionGraphInfoStore(File storageDirectory)
throws IOException {
return new FileExecutionGraphInfoStore(
storageDirectory,
Time.hours(1L),
Integer.MAX_VALUE,
10000L,
TestingUtils.defaultScheduledExecutor(),
Ticker.systemTicker());
}
private static final class PartialExecutionGraphInfoMatcher
extends BaseMatcher<ExecutionGraphInfo> {
private final ExecutionGraphInfo expectedExecutionGraphInfo;
private PartialExecutionGraphInfoMatcher(ExecutionGraphInfo expectedExecutionGraphInfo) {
this.expectedExecutionGraphInfo =
Preconditions.checkNotNull(expectedExecutionGraphInfo);
}
@Override
public boolean matches(Object o) {
if (expectedExecutionGraphInfo == o) {
return true;
}
if (o == null || expectedExecutionGraphInfo.getClass() != o.getClass()) {
return false;
}
ExecutionGraphInfo that = (ExecutionGraphInfo) o;
ArchivedExecutionGraph thisExecutionGraph =
expectedExecutionGraphInfo.getArchivedExecutionGraph();
ArchivedExecutionGraph thatExecutionGraph = that.getArchivedExecutionGraph();
return thisExecutionGraph.isStoppable() == thatExecutionGraph.isStoppable()
&& Objects.equals(thisExecutionGraph.getJobID(), thatExecutionGraph.getJobID())
&& Objects.equals(
thisExecutionGraph.getJobName(), thatExecutionGraph.getJobName())
&& thisExecutionGraph.getState() == thatExecutionGraph.getState()
&& Objects.equals(
thisExecutionGraph.getJsonPlan(), thatExecutionGraph.getJsonPlan())
&& Objects.equals(
thisExecutionGraph.getAccumulatorsSerialized(),
thatExecutionGraph.getAccumulatorsSerialized())
&& Objects.equals(
thisExecutionGraph.getCheckpointCoordinatorConfiguration(),
thatExecutionGraph.getCheckpointCoordinatorConfiguration())
&& thisExecutionGraph.getAllVertices().size()
== thatExecutionGraph.getAllVertices().size()
&& Objects.equals(
expectedExecutionGraphInfo.getExceptionHistory(),
that.getExceptionHistory());
}
@Override
public void describeTo(Description description) {
description.appendText(
"Matches against " + ExecutionGraphInfo.class.getSimpleName() + '.');
}
}
private void assertPutJobGraphWithStatus(JobStatus jobStatus) throws IOException {
final ExecutionGraphInfo dummyExecutionGraphInfo =
new ExecutionGraphInfo(
new ArchivedExecutionGraphBuilder().setState(jobStatus).build());
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphStore =
createDefaultExecutionGraphInfoStore(rootDir)) {
final File storageDirectory = executionGraphStore.getStorageDir();
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));
executionGraphStore.put(dummyExecutionGraphInfo);
assertThat(storageDirectory.listFiles().length, Matchers.equalTo(1));
assertThat(
executionGraphStore.get(dummyExecutionGraphInfo.getJobId()),
new PartialExecutionGraphInfoMatcher(dummyExecutionGraphInfo));
}
}
private static Matcher<ExecutionGraphInfo> matchesPartiallyWith(
ExecutionGraphInfo executionGraphInfo) {
return new PartialExecutionGraphInfoMatcher(executionGraphInfo);
}
private static Collection<JobDetails> generateJobDetails(
Collection<ExecutionGraphInfo> executionGraphInfos) {
return executionGraphInfos.stream()
.map(ExecutionGraphInfo::getArchivedExecutionGraph)
.map(JobDetails::createDetailsForJob)
.collect(Collectors.toList());
}
}
|
nit: possibly just make this the same exception as above? (New types are going to be added without this list being updated, this different wording could lead to user confusion.)
|
void validateJavaUdfZetaSqlType(Type type) {
switch (type.getKind()) {
case TYPE_INT64:
case TYPE_DOUBLE:
case TYPE_BOOL:
case TYPE_STRING:
case TYPE_BYTES:
break;
case TYPE_NUMERIC:
case TYPE_DATE:
case TYPE_TIME:
case TYPE_DATETIME:
case TYPE_TIMESTAMP:
case TYPE_ARRAY:
case TYPE_STRUCT:
throw new UnsupportedOperationException(
"ZetaSQL type not allowed in Java UDF: " + type.getKind().name());
default:
throw new UnsupportedOperationException("Unknown ZetaSQL type: " + type.getKind().name());
}
}
|
throw new UnsupportedOperationException("Unknown ZetaSQL type: " + type.getKind().name());
|
void validateJavaUdfZetaSqlType(Type type) {
switch (type.getKind()) {
case TYPE_INT64:
case TYPE_DOUBLE:
case TYPE_BOOL:
case TYPE_STRING:
case TYPE_BYTES:
break;
case TYPE_NUMERIC:
case TYPE_DATE:
case TYPE_TIME:
case TYPE_DATETIME:
case TYPE_TIMESTAMP:
case TYPE_ARRAY:
case TYPE_STRUCT:
default:
throw new UnsupportedOperationException(
"ZetaSQL type not allowed in Java UDF: " + type.getKind().name());
}
}
|
class BeamZetaSqlCatalog {
public static final String PRE_DEFINED_WINDOW_FUNCTIONS = "pre_defined_window_functions";
public static final String USER_DEFINED_SQL_FUNCTIONS = "user_defined_functions";
public static final String USER_DEFINED_JAVA_SCALAR_FUNCTIONS =
"user_defined_java_scalar_functions";
/**
* Same as {@link Function}.ZETASQL_FUNCTION_GROUP_NAME. Identifies built-in ZetaSQL functions.
*/
public static final String ZETASQL_FUNCTION_GROUP_NAME = "ZetaSQL";
private static final ImmutableList<String> PRE_DEFINED_WINDOW_FUNCTION_DECLARATIONS =
ImmutableList.of(
"CREATE FUNCTION TUMBLE(ts TIMESTAMP, window_size STRING) AS (1);",
"CREATE FUNCTION TUMBLE_START(window_size STRING) RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION TUMBLE_END(window_size STRING) RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION HOP(ts TIMESTAMP, emit_frequency STRING, window_size STRING) AS (1);",
"CREATE FUNCTION HOP_START(emit_frequency STRING, window_size STRING) "
+ "RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION HOP_END(emit_frequency STRING, window_size STRING) "
+ "RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION SESSION(ts TIMESTAMP, session_gap STRING) AS (1);",
"CREATE FUNCTION SESSION_START(session_gap STRING) RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION SESSION_END(session_gap STRING) RETURNS TIMESTAMP AS (null);");
/** The top-level Calcite schema, which may contain sub-schemas. */
private final SchemaPlus calciteSchema;
/**
* The top-level ZetaSQL catalog, which may contain nested catalogs for qualified table and
* function references.
*/
private final SimpleCatalog zetaSqlCatalog;
private final JavaTypeFactory typeFactory;
private final JavaUdfLoader javaUdfLoader = new JavaUdfLoader();
private final Map<List<String>, ResolvedNodes.ResolvedCreateFunctionStmt> sqlScalarUdfs =
new HashMap<>();
/** User-defined table valued functions. */
private final Map<List<String>, ResolvedNode> sqlUdtvfs = new HashMap<>();
private final Map<List<String>, UserFunctionDefinitions.JavaScalarFunction> javaScalarUdfs =
new HashMap<>();
private BeamZetaSqlCatalog(
SchemaPlus calciteSchema, SimpleCatalog zetaSqlCatalog, JavaTypeFactory typeFactory) {
this.calciteSchema = calciteSchema;
this.zetaSqlCatalog = zetaSqlCatalog;
this.typeFactory = typeFactory;
}
/** Return catalog pre-populated with builtin functions. */
static BeamZetaSqlCatalog create(
SchemaPlus calciteSchema, JavaTypeFactory typeFactory, AnalyzerOptions options) {
BeamZetaSqlCatalog catalog =
new BeamZetaSqlCatalog(
calciteSchema, new SimpleCatalog(calciteSchema.getName()), typeFactory);
catalog.addFunctionsToCatalog(options);
return catalog;
}
SimpleCatalog getZetaSqlCatalog() {
return zetaSqlCatalog;
}
void addTables(List<List<String>> tables, QueryTrait queryTrait) {
tables.forEach(table -> addTableToLeafCatalog(table, queryTrait));
}
void addFunction(ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt) {
String functionGroup = getFunctionGroup(createFunctionStmt);
switch (functionGroup) {
case USER_DEFINED_SQL_FUNCTIONS:
sqlScalarUdfs.put(createFunctionStmt.getNamePath(), createFunctionStmt);
break;
case USER_DEFINED_JAVA_SCALAR_FUNCTIONS:
validateJavaUdf(createFunctionStmt);
String jarPath = getJarPath(createFunctionStmt);
ScalarFn scalarFn =
javaUdfLoader.loadScalarFunction(createFunctionStmt.getNamePath(), jarPath);
Method method = ScalarFnReflector.getApplyMethod(scalarFn);
javaScalarUdfs.put(
createFunctionStmt.getNamePath(),
UserFunctionDefinitions.JavaScalarFunction.create(method, jarPath));
break;
default:
throw new IllegalArgumentException(
String.format("Encountered unrecognized function group %s.", functionGroup));
}
zetaSqlCatalog.addFunction(
new Function(
createFunctionStmt.getNamePath(),
functionGroup,
createFunctionStmt.getIsAggregate()
? ZetaSQLFunctions.FunctionEnums.Mode.AGGREGATE
: ZetaSQLFunctions.FunctionEnums.Mode.SCALAR,
ImmutableList.of(createFunctionStmt.getSignature())));
}
void validateJavaUdf(ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt) {
for (FunctionArgumentType argumentType :
createFunctionStmt.getSignature().getFunctionArgumentList()) {
Type type = argumentType.getType();
if (type == null) {
throw new UnsupportedOperationException("UDF templated argument types are not supported.");
}
validateJavaUdfZetaSqlType(type);
}
if (createFunctionStmt.getReturnType() == null) {
throw new NullPointerException("UDF return type must not be null.");
}
validateJavaUdfZetaSqlType(createFunctionStmt.getReturnType());
}
/**
* Throws {@link UnsupportedOperationException} if ZetaSQL type is not supported in Java UDF.
* Supported types are a subset of the types supported by {@link BeamJavaUdfCalcRule}.
*/
void addTableValuedFunction(
ResolvedNodes.ResolvedCreateTableFunctionStmt createTableFunctionStmt) {
zetaSqlCatalog.addTableValuedFunction(
new TableValuedFunction.FixedOutputSchemaTVF(
createTableFunctionStmt.getNamePath(),
createTableFunctionStmt.getSignature(),
TVFRelation.createColumnBased(
createTableFunctionStmt.getQuery().getColumnList().stream()
.map(c -> TVFRelation.Column.create(c.getName(), c.getType()))
.collect(Collectors.toList()))));
sqlUdtvfs.put(createTableFunctionStmt.getNamePath(), createTableFunctionStmt.getQuery());
}
UserFunctionDefinitions getUserFunctionDefinitions() {
return UserFunctionDefinitions.newBuilder()
.setSqlScalarFunctions(ImmutableMap.copyOf(sqlScalarUdfs))
.setSqlTableValuedFunctions(ImmutableMap.copyOf(sqlUdtvfs))
.setJavaScalarFunctions(ImmutableMap.copyOf(javaScalarUdfs))
.build();
}
private void addFunctionsToCatalog(AnalyzerOptions options) {
ZetaSQLBuiltinFunctionOptions zetasqlBuiltinFunctionOptions =
new ZetaSQLBuiltinFunctionOptions(options.getLanguageOptions());
SupportedZetaSqlBuiltinFunctions.ALLOWLIST.forEach(
zetasqlBuiltinFunctionOptions::includeFunctionSignatureId);
zetaSqlCatalog.addZetaSQLFunctions(zetasqlBuiltinFunctionOptions);
addWindowScalarFunctions(options);
addWindowTvfs();
addUdfsFromSchema();
}
private void addWindowScalarFunctions(AnalyzerOptions options) {
PRE_DEFINED_WINDOW_FUNCTION_DECLARATIONS.stream()
.map(
func ->
(ResolvedNodes.ResolvedCreateFunctionStmt)
Analyzer.analyzeStatement(func, options, zetaSqlCatalog))
.map(
resolvedFunc ->
new Function(
String.join(".", resolvedFunc.getNamePath()),
PRE_DEFINED_WINDOW_FUNCTIONS,
ZetaSQLFunctions.FunctionEnums.Mode.SCALAR,
ImmutableList.of(resolvedFunc.getSignature())))
.forEach(zetaSqlCatalog::addFunction);
}
@SuppressWarnings({
"nullness"
})
private void addWindowTvfs() {
FunctionArgumentType retType =
new FunctionArgumentType(ZetaSQLFunctions.SignatureArgumentKind.ARG_TYPE_RELATION);
FunctionArgumentType inputTableType =
new FunctionArgumentType(ZetaSQLFunctions.SignatureArgumentKind.ARG_TYPE_RELATION);
FunctionArgumentType descriptorType =
new FunctionArgumentType(
ZetaSQLFunctions.SignatureArgumentKind.ARG_TYPE_DESCRIPTOR,
FunctionArgumentType.FunctionArgumentTypeOptions.builder()
.setDescriptorResolutionTableOffset(0)
.build(),
1);
FunctionArgumentType stringType =
new FunctionArgumentType(TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_STRING));
zetaSqlCatalog.addTableValuedFunction(
new TableValuedFunction.ForwardInputSchemaToOutputSchemaWithAppendedColumnTVF(
ImmutableList.of(TVFStreamingUtils.FIXED_WINDOW_TVF),
new FunctionSignature(
retType, ImmutableList.of(inputTableType, descriptorType, stringType), -1),
ImmutableList.of(
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_START,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP)),
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_END,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP))),
null,
null));
zetaSqlCatalog.addTableValuedFunction(
new TableValuedFunction.ForwardInputSchemaToOutputSchemaWithAppendedColumnTVF(
ImmutableList.of(TVFStreamingUtils.SLIDING_WINDOW_TVF),
new FunctionSignature(
retType,
ImmutableList.of(inputTableType, descriptorType, stringType, stringType),
-1),
ImmutableList.of(
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_START,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP)),
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_END,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP))),
null,
null));
zetaSqlCatalog.addTableValuedFunction(
new TableValuedFunction.ForwardInputSchemaToOutputSchemaWithAppendedColumnTVF(
ImmutableList.of(TVFStreamingUtils.SESSION_WINDOW_TVF),
new FunctionSignature(
retType,
ImmutableList.of(inputTableType, descriptorType, descriptorType, stringType),
-1),
ImmutableList.of(
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_START,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP)),
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_END,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP))),
null,
null));
}
private void addUdfsFromSchema() {
for (String functionName : calciteSchema.getFunctionNames()) {
Collection<org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.schema.Function>
functions = calciteSchema.getFunctions(functionName);
if (functions.size() != 1) {
throw new IllegalArgumentException(
String.format(
"Expected exactly 1 definition for function '%s', but found %d."
+ " Beam ZetaSQL supports only a single function definition per function name (BEAM-12073).",
functionName, functions.size()));
}
for (org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.schema.Function function :
functions) {
if (function instanceof ScalarFunctionImpl) {
ScalarFunctionImpl scalarFunction = (ScalarFunctionImpl) function;
validateScalarFunctionImpl(scalarFunction);
List<String> path = Arrays.asList(functionName.split("\\."));
Method method = scalarFunction.method;
javaScalarUdfs.put(path, UserFunctionDefinitions.JavaScalarFunction.create(method, ""));
FunctionArgumentType resultType =
new FunctionArgumentType(
ZetaSqlCalciteTranslationUtils.toZetaSqlType(
scalarFunction.getReturnType(typeFactory)));
List<FunctionArgumentType> argumentTypes =
scalarFunction.getParameters().stream()
.map(
(arg) ->
new FunctionArgumentType(
ZetaSqlCalciteTranslationUtils.toZetaSqlType(
arg.getType(typeFactory))))
.collect(Collectors.toList());
FunctionSignature functionSignature =
new FunctionSignature(resultType, argumentTypes, 0L);
zetaSqlCatalog.addFunction(
new Function(
path,
USER_DEFINED_JAVA_SCALAR_FUNCTIONS,
ZetaSQLFunctions.FunctionEnums.Mode.SCALAR,
ImmutableList.of(functionSignature)));
} else {
throw new IllegalArgumentException(
String.format(
"Function %s has unrecognized implementation type %s.",
functionName, function.getClass().getName()));
}
}
}
}
private void validateScalarFunctionImpl(ScalarFunctionImpl scalarFunction) {
for (FunctionParameter parameter : scalarFunction.getParameters()) {
validateJavaUdfCalciteType(parameter.getType(typeFactory));
}
validateJavaUdfCalciteType(scalarFunction.getReturnType(typeFactory));
}
/**
* Throws {@link UnsupportedOperationException} if Calcite type is not supported in Java UDF.
* Supported types are a subset of the corresponding Calcite types supported by {@link
* BeamJavaUdfCalcRule}.
*/
private void validateJavaUdfCalciteType(RelDataType type) {
switch (type.getSqlTypeName()) {
case BIGINT:
case DOUBLE:
case BOOLEAN:
case VARCHAR:
case VARBINARY:
break;
case DECIMAL:
case DATE:
case TIME:
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
case TIMESTAMP:
case ARRAY:
case ROW:
throw new UnsupportedOperationException(
"Calcite type not allowed in Java UDF: " + type.getSqlTypeName().getName());
default:
throw new UnsupportedOperationException(
"Unknown Calcite type: " + type.getSqlTypeName().getName());
}
}
private String getFunctionGroup(ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt) {
switch (createFunctionStmt.getLanguage().toUpperCase()) {
case "JAVA":
if (createFunctionStmt.getIsAggregate()) {
throw new UnsupportedOperationException(
"Java SQL aggregate functions are not supported (BEAM-10925).");
}
return USER_DEFINED_JAVA_SCALAR_FUNCTIONS;
case "SQL":
if (createFunctionStmt.getIsAggregate()) {
throw new UnsupportedOperationException(
"Native SQL aggregate functions are not supported (BEAM-9954).");
}
return USER_DEFINED_SQL_FUNCTIONS;
case "PY":
case "PYTHON":
case "JS":
case "JAVASCRIPT":
throw new UnsupportedOperationException(
String.format(
"Function %s uses unsupported language %s.",
String.join(".", createFunctionStmt.getNamePath()),
createFunctionStmt.getLanguage()));
default:
throw new IllegalArgumentException(
String.format(
"Function %s uses unrecognized language %s.",
String.join(".", createFunctionStmt.getNamePath()),
createFunctionStmt.getLanguage()));
}
}
/**
* Assume last element in tablePath is a table name, and everything before is catalogs. So the
* logic is to create nested catalogs until the last level, then add a table at the last level.
*
* <p>Table schema is extracted from Calcite schema based on the table name resolution strategy,
* e.g. either by drilling down the schema.getSubschema() path or joining the table name with dots
* to construct a single compound identifier (e.g. Data Catalog use case).
*/
private void addTableToLeafCatalog(List<String> tablePath, QueryTrait queryTrait) {
SimpleCatalog leafCatalog = createNestedCatalogs(zetaSqlCatalog, tablePath);
org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.schema.Table calciteTable =
TableResolution.resolveCalciteTable(calciteSchema, tablePath);
if (calciteTable == null) {
throw new ZetaSqlException(
"Wasn't able to resolve the path "
+ tablePath
+ " in schema: "
+ calciteSchema.getName());
}
RelDataType rowType = calciteTable.getRowType(typeFactory);
TableResolution.SimpleTableWithPath tableWithPath =
TableResolution.SimpleTableWithPath.of(tablePath);
queryTrait.addResolvedTable(tableWithPath);
addFieldsToTable(tableWithPath, rowType);
leafCatalog.addSimpleTable(tableWithPath.getTable());
}
private static void addFieldsToTable(
TableResolution.SimpleTableWithPath tableWithPath, RelDataType rowType) {
for (RelDataTypeField field : rowType.getFieldList()) {
tableWithPath
.getTable()
.addSimpleColumn(
field.getName(), ZetaSqlCalciteTranslationUtils.toZetaSqlType(field.getType()));
}
}
/** For table path like a.b.c we assume c is the table and a.b are the nested catalogs/schemas. */
private static SimpleCatalog createNestedCatalogs(SimpleCatalog catalog, List<String> tablePath) {
SimpleCatalog currentCatalog = catalog;
for (int i = 0; i < tablePath.size() - 1; i++) {
String nextCatalogName = tablePath.get(i);
Optional<SimpleCatalog> existing = tryGetExisting(currentCatalog, nextCatalogName);
currentCatalog =
existing.isPresent() ? existing.get() : addNewCatalog(currentCatalog, nextCatalogName);
}
return currentCatalog;
}
private static Optional<SimpleCatalog> tryGetExisting(
SimpleCatalog currentCatalog, String nextCatalogName) {
return currentCatalog.getCatalogList().stream()
.filter(c -> nextCatalogName.equals(c.getFullName()))
.findFirst();
}
private static SimpleCatalog addNewCatalog(SimpleCatalog currentCatalog, String nextCatalogName) {
SimpleCatalog nextCatalog = new SimpleCatalog(nextCatalogName);
currentCatalog.addSimpleCatalog(nextCatalog);
return nextCatalog;
}
private static String getJarPath(ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt) {
String jarPath = getOptionStringValue(createFunctionStmt, "path");
if (jarPath.isEmpty()) {
throw new IllegalArgumentException(
String.format(
"No jar was provided to define function %s. Add 'OPTIONS (path=<jar location>)' to the CREATE FUNCTION statement.",
String.join(".", createFunctionStmt.getNamePath())));
}
return jarPath;
}
private static String getOptionStringValue(
ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt, String optionName) {
for (ResolvedNodes.ResolvedOption option : createFunctionStmt.getOptionList()) {
if (optionName.equals(option.getName())) {
if (option.getValue() == null) {
throw new IllegalArgumentException(
String.format(
"Option '%s' has null value (expected %s).",
optionName, ZetaSQLType.TypeKind.TYPE_STRING));
}
if (option.getValue().getType().getKind() != ZetaSQLType.TypeKind.TYPE_STRING) {
throw new IllegalArgumentException(
String.format(
"Option '%s' has type %s (expected %s).",
optionName,
option.getValue().getType().getKind(),
ZetaSQLType.TypeKind.TYPE_STRING));
}
return ((ResolvedNodes.ResolvedLiteral) option.getValue()).getValue().getStringValue();
}
}
return "";
}
}
|
class BeamZetaSqlCatalog {
public static final String PRE_DEFINED_WINDOW_FUNCTIONS = "pre_defined_window_functions";
public static final String USER_DEFINED_SQL_FUNCTIONS = "user_defined_functions";
public static final String USER_DEFINED_JAVA_SCALAR_FUNCTIONS =
"user_defined_java_scalar_functions";
/**
* Same as {@link Function}.ZETASQL_FUNCTION_GROUP_NAME. Identifies built-in ZetaSQL functions.
*/
public static final String ZETASQL_FUNCTION_GROUP_NAME = "ZetaSQL";
private static final ImmutableList<String> PRE_DEFINED_WINDOW_FUNCTION_DECLARATIONS =
ImmutableList.of(
"CREATE FUNCTION TUMBLE(ts TIMESTAMP, window_size STRING) AS (1);",
"CREATE FUNCTION TUMBLE_START(window_size STRING) RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION TUMBLE_END(window_size STRING) RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION HOP(ts TIMESTAMP, emit_frequency STRING, window_size STRING) AS (1);",
"CREATE FUNCTION HOP_START(emit_frequency STRING, window_size STRING) "
+ "RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION HOP_END(emit_frequency STRING, window_size STRING) "
+ "RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION SESSION(ts TIMESTAMP, session_gap STRING) AS (1);",
"CREATE FUNCTION SESSION_START(session_gap STRING) RETURNS TIMESTAMP AS (null);",
"CREATE FUNCTION SESSION_END(session_gap STRING) RETURNS TIMESTAMP AS (null);");
/** The top-level Calcite schema, which may contain sub-schemas. */
private final SchemaPlus calciteSchema;
/**
* The top-level ZetaSQL catalog, which may contain nested catalogs for qualified table and
* function references.
*/
private final SimpleCatalog zetaSqlCatalog;
private final JavaTypeFactory typeFactory;
private final JavaUdfLoader javaUdfLoader = new JavaUdfLoader();
private final Map<List<String>, ResolvedNodes.ResolvedCreateFunctionStmt> sqlScalarUdfs =
new HashMap<>();
/** User-defined table valued functions. */
private final Map<List<String>, ResolvedNode> sqlUdtvfs = new HashMap<>();
private final Map<List<String>, UserFunctionDefinitions.JavaScalarFunction> javaScalarUdfs =
new HashMap<>();
private BeamZetaSqlCatalog(
SchemaPlus calciteSchema, SimpleCatalog zetaSqlCatalog, JavaTypeFactory typeFactory) {
this.calciteSchema = calciteSchema;
this.zetaSqlCatalog = zetaSqlCatalog;
this.typeFactory = typeFactory;
}
/** Return catalog pre-populated with builtin functions. */
static BeamZetaSqlCatalog create(
SchemaPlus calciteSchema, JavaTypeFactory typeFactory, AnalyzerOptions options) {
BeamZetaSqlCatalog catalog =
new BeamZetaSqlCatalog(
calciteSchema, new SimpleCatalog(calciteSchema.getName()), typeFactory);
catalog.addFunctionsToCatalog(options);
return catalog;
}
SimpleCatalog getZetaSqlCatalog() {
return zetaSqlCatalog;
}
void addTables(List<List<String>> tables, QueryTrait queryTrait) {
tables.forEach(table -> addTableToLeafCatalog(table, queryTrait));
}
void addFunction(ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt) {
String functionGroup = getFunctionGroup(createFunctionStmt);
switch (functionGroup) {
case USER_DEFINED_SQL_FUNCTIONS:
sqlScalarUdfs.put(createFunctionStmt.getNamePath(), createFunctionStmt);
break;
case USER_DEFINED_JAVA_SCALAR_FUNCTIONS:
validateJavaUdf(createFunctionStmt);
String jarPath = getJarPath(createFunctionStmt);
ScalarFn scalarFn =
javaUdfLoader.loadScalarFunction(createFunctionStmt.getNamePath(), jarPath);
Method method = ScalarFnReflector.getApplyMethod(scalarFn);
javaScalarUdfs.put(
createFunctionStmt.getNamePath(),
UserFunctionDefinitions.JavaScalarFunction.create(method, jarPath));
break;
default:
throw new IllegalArgumentException(
String.format("Encountered unrecognized function group %s.", functionGroup));
}
zetaSqlCatalog.addFunction(
new Function(
createFunctionStmt.getNamePath(),
functionGroup,
createFunctionStmt.getIsAggregate()
? ZetaSQLFunctions.FunctionEnums.Mode.AGGREGATE
: ZetaSQLFunctions.FunctionEnums.Mode.SCALAR,
ImmutableList.of(createFunctionStmt.getSignature())));
}
void validateJavaUdf(ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt) {
for (FunctionArgumentType argumentType :
createFunctionStmt.getSignature().getFunctionArgumentList()) {
Type type = argumentType.getType();
if (type == null) {
throw new UnsupportedOperationException("UDF templated argument types are not supported.");
}
validateJavaUdfZetaSqlType(type);
}
if (createFunctionStmt.getReturnType() == null) {
throw new IllegalArgumentException("UDF return type must not be null.");
}
validateJavaUdfZetaSqlType(createFunctionStmt.getReturnType());
}
/**
* Throws {@link UnsupportedOperationException} if ZetaSQL type is not supported in Java UDF.
* Supported types are a subset of the types supported by {@link BeamJavaUdfCalcRule}.
*/
void addTableValuedFunction(
ResolvedNodes.ResolvedCreateTableFunctionStmt createTableFunctionStmt) {
zetaSqlCatalog.addTableValuedFunction(
new TableValuedFunction.FixedOutputSchemaTVF(
createTableFunctionStmt.getNamePath(),
createTableFunctionStmt.getSignature(),
TVFRelation.createColumnBased(
createTableFunctionStmt.getQuery().getColumnList().stream()
.map(c -> TVFRelation.Column.create(c.getName(), c.getType()))
.collect(Collectors.toList()))));
sqlUdtvfs.put(createTableFunctionStmt.getNamePath(), createTableFunctionStmt.getQuery());
}
UserFunctionDefinitions getUserFunctionDefinitions() {
return UserFunctionDefinitions.newBuilder()
.setSqlScalarFunctions(ImmutableMap.copyOf(sqlScalarUdfs))
.setSqlTableValuedFunctions(ImmutableMap.copyOf(sqlUdtvfs))
.setJavaScalarFunctions(ImmutableMap.copyOf(javaScalarUdfs))
.build();
}
private void addFunctionsToCatalog(AnalyzerOptions options) {
ZetaSQLBuiltinFunctionOptions zetasqlBuiltinFunctionOptions =
new ZetaSQLBuiltinFunctionOptions(options.getLanguageOptions());
SupportedZetaSqlBuiltinFunctions.ALLOWLIST.forEach(
zetasqlBuiltinFunctionOptions::includeFunctionSignatureId);
zetaSqlCatalog.addZetaSQLFunctions(zetasqlBuiltinFunctionOptions);
addWindowScalarFunctions(options);
addWindowTvfs();
addUdfsFromSchema();
}
private void addWindowScalarFunctions(AnalyzerOptions options) {
PRE_DEFINED_WINDOW_FUNCTION_DECLARATIONS.stream()
.map(
func ->
(ResolvedNodes.ResolvedCreateFunctionStmt)
Analyzer.analyzeStatement(func, options, zetaSqlCatalog))
.map(
resolvedFunc ->
new Function(
String.join(".", resolvedFunc.getNamePath()),
PRE_DEFINED_WINDOW_FUNCTIONS,
ZetaSQLFunctions.FunctionEnums.Mode.SCALAR,
ImmutableList.of(resolvedFunc.getSignature())))
.forEach(zetaSqlCatalog::addFunction);
}
@SuppressWarnings({
"nullness"
})
private void addWindowTvfs() {
FunctionArgumentType retType =
new FunctionArgumentType(ZetaSQLFunctions.SignatureArgumentKind.ARG_TYPE_RELATION);
FunctionArgumentType inputTableType =
new FunctionArgumentType(ZetaSQLFunctions.SignatureArgumentKind.ARG_TYPE_RELATION);
FunctionArgumentType descriptorType =
new FunctionArgumentType(
ZetaSQLFunctions.SignatureArgumentKind.ARG_TYPE_DESCRIPTOR,
FunctionArgumentType.FunctionArgumentTypeOptions.builder()
.setDescriptorResolutionTableOffset(0)
.build(),
1);
FunctionArgumentType stringType =
new FunctionArgumentType(TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_STRING));
zetaSqlCatalog.addTableValuedFunction(
new TableValuedFunction.ForwardInputSchemaToOutputSchemaWithAppendedColumnTVF(
ImmutableList.of(TVFStreamingUtils.FIXED_WINDOW_TVF),
new FunctionSignature(
retType, ImmutableList.of(inputTableType, descriptorType, stringType), -1),
ImmutableList.of(
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_START,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP)),
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_END,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP))),
null,
null));
zetaSqlCatalog.addTableValuedFunction(
new TableValuedFunction.ForwardInputSchemaToOutputSchemaWithAppendedColumnTVF(
ImmutableList.of(TVFStreamingUtils.SLIDING_WINDOW_TVF),
new FunctionSignature(
retType,
ImmutableList.of(inputTableType, descriptorType, stringType, stringType),
-1),
ImmutableList.of(
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_START,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP)),
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_END,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP))),
null,
null));
zetaSqlCatalog.addTableValuedFunction(
new TableValuedFunction.ForwardInputSchemaToOutputSchemaWithAppendedColumnTVF(
ImmutableList.of(TVFStreamingUtils.SESSION_WINDOW_TVF),
new FunctionSignature(
retType,
ImmutableList.of(inputTableType, descriptorType, descriptorType, stringType),
-1),
ImmutableList.of(
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_START,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP)),
TVFRelation.Column.create(
TVFStreamingUtils.WINDOW_END,
TypeFactory.createSimpleType(ZetaSQLType.TypeKind.TYPE_TIMESTAMP))),
null,
null));
}
private void addUdfsFromSchema() {
for (String functionName : calciteSchema.getFunctionNames()) {
Collection<org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.schema.Function>
functions = calciteSchema.getFunctions(functionName);
if (functions.size() != 1) {
throw new IllegalArgumentException(
String.format(
"Expected exactly 1 definition for function '%s', but found %d."
+ " Beam ZetaSQL supports only a single function definition per function name (BEAM-12073).",
functionName, functions.size()));
}
for (org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.schema.Function function :
functions) {
if (function instanceof ScalarFunctionImpl) {
ScalarFunctionImpl scalarFunction = (ScalarFunctionImpl) function;
validateScalarFunctionImpl(scalarFunction);
List<String> path = Arrays.asList(functionName.split("\\."));
Method method = scalarFunction.method;
javaScalarUdfs.put(path, UserFunctionDefinitions.JavaScalarFunction.create(method, ""));
FunctionArgumentType resultType =
new FunctionArgumentType(
ZetaSqlCalciteTranslationUtils.toZetaSqlType(
scalarFunction.getReturnType(typeFactory)));
List<FunctionArgumentType> argumentTypes =
scalarFunction.getParameters().stream()
.map(
(arg) ->
new FunctionArgumentType(
ZetaSqlCalciteTranslationUtils.toZetaSqlType(
arg.getType(typeFactory))))
.collect(Collectors.toList());
FunctionSignature functionSignature =
new FunctionSignature(resultType, argumentTypes, 0L);
zetaSqlCatalog.addFunction(
new Function(
path,
USER_DEFINED_JAVA_SCALAR_FUNCTIONS,
ZetaSQLFunctions.FunctionEnums.Mode.SCALAR,
ImmutableList.of(functionSignature)));
} else {
throw new IllegalArgumentException(
String.format(
"Function %s has unrecognized implementation type %s.",
functionName, function.getClass().getName()));
}
}
}
}
private void validateScalarFunctionImpl(ScalarFunctionImpl scalarFunction) {
for (FunctionParameter parameter : scalarFunction.getParameters()) {
validateJavaUdfCalciteType(parameter.getType(typeFactory));
}
validateJavaUdfCalciteType(scalarFunction.getReturnType(typeFactory));
}
/**
* Throws {@link UnsupportedOperationException} if Calcite type is not supported in Java UDF.
* Supported types are a subset of the corresponding Calcite types supported by {@link
* BeamJavaUdfCalcRule}.
*/
private void validateJavaUdfCalciteType(RelDataType type) {
switch (type.getSqlTypeName()) {
case BIGINT:
case DOUBLE:
case BOOLEAN:
case VARCHAR:
case VARBINARY:
break;
case DECIMAL:
case DATE:
case TIME:
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
case TIMESTAMP:
case ARRAY:
case ROW:
default:
throw new UnsupportedOperationException(
"Calcite type not allowed in ZetaSQL Java UDF: " + type.getSqlTypeName().getName());
}
}
private String getFunctionGroup(ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt) {
switch (createFunctionStmt.getLanguage().toUpperCase()) {
case "JAVA":
if (createFunctionStmt.getIsAggregate()) {
throw new UnsupportedOperationException(
"Java SQL aggregate functions are not supported (BEAM-10925).");
}
return USER_DEFINED_JAVA_SCALAR_FUNCTIONS;
case "SQL":
if (createFunctionStmt.getIsAggregate()) {
throw new UnsupportedOperationException(
"Native SQL aggregate functions are not supported (BEAM-9954).");
}
return USER_DEFINED_SQL_FUNCTIONS;
case "PY":
case "PYTHON":
case "JS":
case "JAVASCRIPT":
throw new UnsupportedOperationException(
String.format(
"Function %s uses unsupported language %s.",
String.join(".", createFunctionStmt.getNamePath()),
createFunctionStmt.getLanguage()));
default:
throw new IllegalArgumentException(
String.format(
"Function %s uses unrecognized language %s.",
String.join(".", createFunctionStmt.getNamePath()),
createFunctionStmt.getLanguage()));
}
}
/**
* Assume last element in tablePath is a table name, and everything before is catalogs. So the
* logic is to create nested catalogs until the last level, then add a table at the last level.
*
* <p>Table schema is extracted from Calcite schema based on the table name resolution strategy,
* e.g. either by drilling down the schema.getSubschema() path or joining the table name with dots
* to construct a single compound identifier (e.g. Data Catalog use case).
*/
private void addTableToLeafCatalog(List<String> tablePath, QueryTrait queryTrait) {
SimpleCatalog leafCatalog = createNestedCatalogs(zetaSqlCatalog, tablePath);
org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.schema.Table calciteTable =
TableResolution.resolveCalciteTable(calciteSchema, tablePath);
if (calciteTable == null) {
throw new ZetaSqlException(
"Wasn't able to resolve the path "
+ tablePath
+ " in schema: "
+ calciteSchema.getName());
}
RelDataType rowType = calciteTable.getRowType(typeFactory);
TableResolution.SimpleTableWithPath tableWithPath =
TableResolution.SimpleTableWithPath.of(tablePath);
queryTrait.addResolvedTable(tableWithPath);
addFieldsToTable(tableWithPath, rowType);
leafCatalog.addSimpleTable(tableWithPath.getTable());
}
private static void addFieldsToTable(
TableResolution.SimpleTableWithPath tableWithPath, RelDataType rowType) {
for (RelDataTypeField field : rowType.getFieldList()) {
tableWithPath
.getTable()
.addSimpleColumn(
field.getName(), ZetaSqlCalciteTranslationUtils.toZetaSqlType(field.getType()));
}
}
/** For table path like a.b.c we assume c is the table and a.b are the nested catalogs/schemas. */
private static SimpleCatalog createNestedCatalogs(SimpleCatalog catalog, List<String> tablePath) {
SimpleCatalog currentCatalog = catalog;
for (int i = 0; i < tablePath.size() - 1; i++) {
String nextCatalogName = tablePath.get(i);
Optional<SimpleCatalog> existing = tryGetExisting(currentCatalog, nextCatalogName);
currentCatalog =
existing.isPresent() ? existing.get() : addNewCatalog(currentCatalog, nextCatalogName);
}
return currentCatalog;
}
private static Optional<SimpleCatalog> tryGetExisting(
SimpleCatalog currentCatalog, String nextCatalogName) {
return currentCatalog.getCatalogList().stream()
.filter(c -> nextCatalogName.equals(c.getFullName()))
.findFirst();
}
private static SimpleCatalog addNewCatalog(SimpleCatalog currentCatalog, String nextCatalogName) {
SimpleCatalog nextCatalog = new SimpleCatalog(nextCatalogName);
currentCatalog.addSimpleCatalog(nextCatalog);
return nextCatalog;
}
private static String getJarPath(ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt) {
String jarPath = getOptionStringValue(createFunctionStmt, "path");
if (jarPath.isEmpty()) {
throw new IllegalArgumentException(
String.format(
"No jar was provided to define function %s. Add 'OPTIONS (path=<jar location>)' to the CREATE FUNCTION statement.",
String.join(".", createFunctionStmt.getNamePath())));
}
return jarPath;
}
private static String getOptionStringValue(
ResolvedNodes.ResolvedCreateFunctionStmt createFunctionStmt, String optionName) {
for (ResolvedNodes.ResolvedOption option : createFunctionStmt.getOptionList()) {
if (optionName.equals(option.getName())) {
if (option.getValue() == null) {
throw new IllegalArgumentException(
String.format(
"Option '%s' has null value (expected %s).",
optionName, ZetaSQLType.TypeKind.TYPE_STRING));
}
if (option.getValue().getType().getKind() != ZetaSQLType.TypeKind.TYPE_STRING) {
throw new IllegalArgumentException(
String.format(
"Option '%s' has type %s (expected %s).",
optionName,
option.getValue().getType().getKind(),
ZetaSQLType.TypeKind.TYPE_STRING));
}
return ((ResolvedNodes.ResolvedLiteral) option.getValue()).getValue().getStringValue();
}
}
return "";
}
}
|
Consider matching every value as switch expression on enum is exhaustive, i.e. not matching a value is a compile error if there isn't a `default` clause.
|
private static Object decodePayload(Inspector entry) {
return switch (entry.type()) {
case STRING -> entry.asString();
case LONG -> entry.asLong();
case BOOL -> entry.asBool();
case DOUBLE -> entry.asDouble();
case DATA -> entry.asData();
default -> null;
};
}
|
default -> null;
|
private static Object decodePayload(Inspector entry) {
return switch (entry.type()) {
case STRING -> entry.asString();
case LONG -> entry.asLong();
case BOOL -> entry.asBool();
case DOUBLE -> entry.asDouble();
case DATA -> entry.asData();
default -> null;
};
}
|
class SlimeTraceDeserializer {
private final Inspector entry;
public SlimeTraceDeserializer(Inspector inspector) {
this.entry = inspector;
}
public TraceNode deserialize() {
return deserialize(entry);
}
private static TraceNode deserialize(Inspector entry) {
Object payload = decodePayload(entry.field(SlimeTraceSerializer.PAYLOAD));
long timestamp = decodeTimestamp(entry.field(SlimeTraceSerializer.TIMESTAMP));
final TraceNode node = new TraceNode(payload, timestamp);
Inspector children = entry.field(SlimeTraceSerializer.CHILDREN);
children.traverse(new ArrayTraverser() {
@Override
public void entry(int idx, Inspector inspector) {
node.add(deserialize(inspector));
}
});
return node;
}
private static long decodeTimestamp(Inspector entry) {
return entry.asLong();
}
}
|
class SlimeTraceDeserializer {
private final Inspector entry;
public SlimeTraceDeserializer(Inspector inspector) {
this.entry = inspector;
}
public TraceNode deserialize() {
return deserialize(entry);
}
private static TraceNode deserialize(Inspector entry) {
Object payload = decodePayload(entry.field(SlimeTraceSerializer.PAYLOAD));
long timestamp = decodeTimestamp(entry.field(SlimeTraceSerializer.TIMESTAMP));
final TraceNode node = new TraceNode(payload, timestamp);
Inspector children = entry.field(SlimeTraceSerializer.CHILDREN);
children.traverse(new ArrayTraverser() {
@Override
public void entry(int idx, Inspector inspector) {
node.add(deserialize(inspector));
}
});
return node;
}
private static long decodeTimestamp(Inspector entry) {
return entry.asLong();
}
}
|
should we have a more specific error message? something like `function invocations are not allowed in lhs...`
|
public void testCompoundAssignmentNegative() {
CompileResult compileResult = BCompileUtil.compile(
"test-src/statements/compoundassignment/compound_assignment_negative.bal");
int i = 0;
Assert.assertEquals(compileResult.getErrorCount(), 22);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'any' and 'int'", 5, 5);
BAssertUtil.validateError(compileResult, i++, "operator '-' not defined for 'any' and 'int'", 13, 5);
BAssertUtil.validateError(compileResult, i++, "invalid assignment in variable 'getInt()'", 20, 5);
BAssertUtil.validateError(compileResult, i++, "invalid assignment in variable 'getInt()'", 25, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'string' and 'int'", 35, 5);
BAssertUtil.validateError(compileResult, i++, "operator '-' not defined for 'string' and 'int'", 41, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int' and '(int|error)'", 47, 5);
BAssertUtil.validateError(compileResult, i++, "invalid assignment in variable 'getInt()'", 53, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'json' and 'string'", 59, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int' and 'string'", 65, 5);
BAssertUtil.validateError(compileResult, i++, "incompatible types: expected 'int', found 'float'", 72, 10);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int' and '[int,int]'", 78, 5);
BAssertUtil.validateError(compileResult, i++, "operator '&' not defined for 'int' and 'string'", 90, 5);
BAssertUtil.validateError(compileResult, i++, "operator '|' not defined for 'int' and 'string'", 96, 5);
BAssertUtil.validateError(compileResult, i++, "operator '^' not defined for 'int' and 'string'", 102, 5);
BAssertUtil.validateError(compileResult, i++, "operator '<<' not defined for 'int' and 'string'", 108, 5);
BAssertUtil.validateError(compileResult, i++, "operator '>>' not defined for 'int' and 'string'", 114, 5);
BAssertUtil.validateError(compileResult, i++, "operator '>>>' not defined for 'int' and 'string'", 120, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int?' and 'int?'", 132, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int?' and 'int?'", 140, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int?' and 'int'", 150, 11);
BAssertUtil.validateError(compileResult, i, "invalid assignment in variable 'foo(bar)'", 156, 5);
}
|
BAssertUtil.validateError(compileResult, i, "invalid assignment in variable 'foo(bar)'", 156, 5);
|
public void testCompoundAssignmentNegative() {
CompileResult compileResult = BCompileUtil.compile(
"test-src/statements/compoundassignment/compound_assignment_negative.bal");
int i = 0;
Assert.assertEquals(compileResult.getErrorCount(), 22);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'any' and 'int'", 5, 5);
BAssertUtil.validateError(compileResult, i++, "operator '-' not defined for 'any' and 'int'", 13, 5);
BAssertUtil.validateError(compileResult, i++, "invocations are not supported on the left hand side of an " +
"assignment", 20, 5);
BAssertUtil.validateError(compileResult, i++, "invocations are not supported on the left hand side of an " +
"assignment", 25, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'string' and 'int'", 35, 5);
BAssertUtil.validateError(compileResult, i++, "operator '-' not defined for 'string' and 'int'", 41, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int' and '(int|error)'", 47, 5);
BAssertUtil.validateError(compileResult, i++, "invocations are not supported on the left hand side of an " +
"assignment", 53, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'json' and 'string'", 59, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int' and 'string'", 65, 5);
BAssertUtil.validateError(compileResult, i++, "incompatible types: expected 'int', found 'float'", 72, 10);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int' and '[int,int]'", 78, 5);
BAssertUtil.validateError(compileResult, i++, "operator '&' not defined for 'int' and 'string'", 90, 5);
BAssertUtil.validateError(compileResult, i++, "operator '|' not defined for 'int' and 'string'", 96, 5);
BAssertUtil.validateError(compileResult, i++, "operator '^' not defined for 'int' and 'string'", 102, 5);
BAssertUtil.validateError(compileResult, i++, "operator '<<' not defined for 'int' and 'string'", 108, 5);
BAssertUtil.validateError(compileResult, i++, "operator '>>' not defined for 'int' and 'string'", 114, 5);
BAssertUtil.validateError(compileResult, i++, "operator '>>>' not defined for 'int' and 'string'", 120, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int?' and 'int?'", 132, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int?' and 'int?'", 140, 5);
BAssertUtil.validateError(compileResult, i++, "operator '+' not defined for 'int?' and 'int'", 150, 11);
BAssertUtil.validateError(compileResult, i, "invocations are not supported on the left hand side of an " +
"assignment", 156, 5);
}
|
class CompoundAssignmentTest {
private CompileResult result;
@BeforeClass
public void setup() {
result = BCompileUtil.compile("test-src/statements/compoundassignment/compound_assignment.bal");
}
@Test(description = "Test compound assignment with addition.")
public void testCompoundAssignmentAddition() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAddition");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 15);
}
@Test(description = "Test compound assignment with subtraction.")
public void testCompoundAssignmentSubtraction() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentSubtraction");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), -5);
}
@Test(description = "Test compound assignment with multiplication.")
public void testCompoundAssignmentMultiplication() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentMultiplication");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 50);
}
@Test(description = "Test compound assignment with division.")
public void testCompoundAssignmentDivision() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentDivision");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test compound assignment with bitwise AND.")
public void testCompoundAssignmentBitwiseAND() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentBitwiseAND");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 5);
}
@Test(description = "Test compound assignment with bitwise OR.")
public void testCompoundAssignmentBitwiseOR() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentBitwiseOR");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 15);
}
@Test(description = "Test compound assignment with bitwise XOR.")
public void testCompoundAssignmentBitwiseXOR() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentBitwiseXOR");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test compound assignment with left shift.")
public void testCompoundAssignmentLeftShift() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentLeftShift");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 16);
}
@Test(description = "Test compound assignment with right shift.")
public void testCompoundAssignmentRightShift() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentRightShift");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 4);
}
@Test(description = "Test compound assignment with logical shift.")
public void testCompoundAssignmentLogicalShift() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentLogicalShift");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 4);
}
@Test(description = "Test compound assignment with addition on array element.")
public void testCompoundAssignmentAdditionArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 110);
}
@Test(description = "Test compound assignment with subtraction on array element.")
public void testCompoundAssignmentSubtractionArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentSubtractionArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 90);
}
@Test(description = "Test compound assignment with multiplication on array element.")
public void testCompoundAssignmentMultiplicationArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentMultiplicationArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 1000);
}
@Test(description = "Test compound assignment with division on array element.")
public void testCompoundAssignmentDivisionArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentDivisionArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test compound assignment with addition on struct element.")
public void testCompoundAssignmentAdditionStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 110);
}
@Test(description = "Test compound assignment with subtraction on struct element.")
public void testCompoundAssignmentSubtractionStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentSubtractionStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 90);
}
@Test(description = "Test compound assignment with multiplication on struct element.")
public void testCompoundAssignmentMultiplicationStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentMultiplicationStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 1000);
}
@Test(description = "Test compound assignment with division on struct element.")
public void testCompoundAssignmentDivisionStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentDivisionStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test increment operator on struct element.")
public void testIncrementOperatorArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testIncrementOperatorArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 101);
}
@Test(description = "Test decrement operator on array element.")
public void testDecrementOperatorArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testDecrementOperatorArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 99);
}
@Test(description = "Test increment operator on struct element.")
public void testIncrementOperatorStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testIncrementOperatorStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 889);
}
@Test(description = "Test increment operator on float.")
public void testDecrementOperatorStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testDecrementOperatorStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 887);
}
@Test(description = "Test increment operator on float.")
public void testIncrementOperatorFloat() {
BValue[] returns = BRunUtil.invoke(result, "testIncrementOperatorFloat");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BFloat);
Assert.assertEquals(((BFloat) returns[0]).floatValue(), 101.0);
}
@Test(description = "Test decrement operator on float.")
public void testDecrementOperatorFloat() {
BValue[] returns = BRunUtil.invoke(result, "testDecrementOperatorFloat");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BFloat);
Assert.assertEquals(((BFloat) returns[0]).floatValue(), 99.0);
}
@Test(description = "Test compound assignment with addition of int and string.")
public void testStringIntCompoundAssignmentAddition() {
BValue[] returns = BRunUtil.invoke(result, "testStringIntCompoundAssignmentAddition");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BString);
Assert.assertEquals(((BString) returns[0]).stringValue(), "test5");
}
@Test(description = "Test compound assignment with addition of int and float.")
public void testIntFloatCompoundAssignmentAddition() {
BValue[] returns = BRunUtil.invoke(result, "testIntFloatCompoundAssignmentAddition");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BFloat);
Assert.assertEquals(((BFloat) returns[0]).floatValue(), 7.5);
}
@Test(description = "Test compound assignment with addition of xml attribute and string.")
public void testXMLAttributeWithCompoundAssignment() {
BValue[] returns = BRunUtil.invoke(result, "testXMLAttributeWithCompoundAssignment");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BString);
Assert.assertEquals(((BString) returns[0]).stringValue(), "bar1bar2");
}
@Test(description = "Test compound assignment with addition recursive integer reference.")
public void testCompoundAssignmentAdditionRecursive() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionRecursive");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test compound assignment with addition recursive struct element reference.")
public void testCompoundAssignmentAdditionStructElementRecursive() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionStructElementRecursive");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 200);
}
@Test(description = "Test compound assignment with addition to expression.")
public void testCompoundAssignmentAdditionWithExpression() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionWithExpression");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 19);
}
@Test(description = "Test compound assignment with multiple addition.")
public void testCompoundAssignmentAdditionMultiple() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionMultiple");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 20);
}
@Test(description = "Test compound assignment with multiple addition and increments.")
public void testCompoundAssignmentAdditionMultipleWithIncrement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionMultipleWithIncrement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 19);
}
@Test(description = "Test compound assignment with addition with struct access expression.")
public void testCompoundAssignmentAdditionWithStructAccess() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionWithStructAccess");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 305);
}
@Test(description = "Test compound assignment with addition with function invocation expression.")
public void testCompoundAssignmentAdditionWithFunctionInvocation() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionWithFunctionInvocation");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 205);
}
@Test(description = "Test compound assignment with addition with two struct elements.")
public void testCompoundAssignmentAdditionStructElements() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionStructElements");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 500);
}
@Test(description = "Test compound assignment with addition.")
public void testCompoundAssignmentOfXmlAndString() {
BValue[] returns = BRunUtil.invoke(result, "xmlCompoundAssignment");
Assert.assertTrue(returns[0] instanceof BXMLSequence);
BXMLSequence sequence = (BXMLSequence) returns[0];
Assert.assertEquals(sequence.size(), 13);
Assert.assertEquals(sequence.stringValue(),
"hello<hello xmlns:ns0=\"http:
}
@Test
public void testCompoundAssignmentAdditionRecordElementRecursive() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionRecordElementRecursive");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 200);
}
@Test
public void testCompoundAssignmentAdditionRecordElements() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionRecordElements");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 500);
}
@Test
public void testCompoundAssignmentAdditionWithRecordAccess() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionWithRecordAccess");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 305);
}
@Test(description = "Test compound operator negative cases.")
}
|
class CompoundAssignmentTest {
private CompileResult result;
@BeforeClass
public void setup() {
result = BCompileUtil.compile("test-src/statements/compoundassignment/compound_assignment.bal");
}
@Test(description = "Test compound assignment with addition.")
public void testCompoundAssignmentAddition() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAddition");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 15);
}
@Test(description = "Test compound assignment with subtraction.")
public void testCompoundAssignmentSubtraction() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentSubtraction");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), -5);
}
@Test(description = "Test compound assignment with multiplication.")
public void testCompoundAssignmentMultiplication() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentMultiplication");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 50);
}
@Test(description = "Test compound assignment with division.")
public void testCompoundAssignmentDivision() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentDivision");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test compound assignment with bitwise AND.")
public void testCompoundAssignmentBitwiseAND() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentBitwiseAND");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 5);
}
@Test(description = "Test compound assignment with bitwise OR.")
public void testCompoundAssignmentBitwiseOR() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentBitwiseOR");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 15);
}
@Test(description = "Test compound assignment with bitwise XOR.")
public void testCompoundAssignmentBitwiseXOR() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentBitwiseXOR");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test compound assignment with left shift.")
public void testCompoundAssignmentLeftShift() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentLeftShift");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 16);
}
@Test(description = "Test compound assignment with right shift.")
public void testCompoundAssignmentRightShift() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentRightShift");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 4);
}
@Test(description = "Test compound assignment with logical shift.")
public void testCompoundAssignmentLogicalShift() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentLogicalShift");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 4);
}
@Test(description = "Test compound assignment with addition on array element.")
public void testCompoundAssignmentAdditionArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 110);
}
@Test(description = "Test compound assignment with subtraction on array element.")
public void testCompoundAssignmentSubtractionArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentSubtractionArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 90);
}
@Test(description = "Test compound assignment with multiplication on array element.")
public void testCompoundAssignmentMultiplicationArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentMultiplicationArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 1000);
}
@Test(description = "Test compound assignment with division on array element.")
public void testCompoundAssignmentDivisionArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentDivisionArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test compound assignment with addition on struct element.")
public void testCompoundAssignmentAdditionStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 110);
}
@Test(description = "Test compound assignment with subtraction on struct element.")
public void testCompoundAssignmentSubtractionStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentSubtractionStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 90);
}
@Test(description = "Test compound assignment with multiplication on struct element.")
public void testCompoundAssignmentMultiplicationStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentMultiplicationStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 1000);
}
@Test(description = "Test compound assignment with division on struct element.")
public void testCompoundAssignmentDivisionStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentDivisionStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test increment operator on struct element.")
public void testIncrementOperatorArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testIncrementOperatorArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 101);
}
@Test(description = "Test decrement operator on array element.")
public void testDecrementOperatorArrayElement() {
BValue[] returns = BRunUtil.invoke(result, "testDecrementOperatorArrayElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 99);
}
@Test(description = "Test increment operator on struct element.")
public void testIncrementOperatorStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testIncrementOperatorStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 889);
}
@Test(description = "Test increment operator on float.")
public void testDecrementOperatorStructElement() {
BValue[] returns = BRunUtil.invoke(result, "testDecrementOperatorStructElement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 887);
}
@Test(description = "Test increment operator on float.")
public void testIncrementOperatorFloat() {
BValue[] returns = BRunUtil.invoke(result, "testIncrementOperatorFloat");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BFloat);
Assert.assertEquals(((BFloat) returns[0]).floatValue(), 101.0);
}
@Test(description = "Test decrement operator on float.")
public void testDecrementOperatorFloat() {
BValue[] returns = BRunUtil.invoke(result, "testDecrementOperatorFloat");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BFloat);
Assert.assertEquals(((BFloat) returns[0]).floatValue(), 99.0);
}
@Test(description = "Test compound assignment with addition of int and string.")
public void testStringIntCompoundAssignmentAddition() {
BValue[] returns = BRunUtil.invoke(result, "testStringIntCompoundAssignmentAddition");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BString);
Assert.assertEquals(((BString) returns[0]).stringValue(), "test5");
}
@Test(description = "Test compound assignment with addition of int and float.")
public void testIntFloatCompoundAssignmentAddition() {
BValue[] returns = BRunUtil.invoke(result, "testIntFloatCompoundAssignmentAddition");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BFloat);
Assert.assertEquals(((BFloat) returns[0]).floatValue(), 7.5);
}
@Test(description = "Test compound assignment with addition of xml attribute and string.")
public void testXMLAttributeWithCompoundAssignment() {
BValue[] returns = BRunUtil.invoke(result, "testXMLAttributeWithCompoundAssignment");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BString);
Assert.assertEquals(((BString) returns[0]).stringValue(), "bar1bar2");
}
@Test(description = "Test compound assignment with addition recursive integer reference.")
public void testCompoundAssignmentAdditionRecursive() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionRecursive");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 10);
}
@Test(description = "Test compound assignment with addition recursive struct element reference.")
public void testCompoundAssignmentAdditionStructElementRecursive() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionStructElementRecursive");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 200);
}
@Test(description = "Test compound assignment with addition to expression.")
public void testCompoundAssignmentAdditionWithExpression() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionWithExpression");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 19);
}
@Test(description = "Test compound assignment with multiple addition.")
public void testCompoundAssignmentAdditionMultiple() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionMultiple");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 20);
}
@Test(description = "Test compound assignment with multiple addition and increments.")
public void testCompoundAssignmentAdditionMultipleWithIncrement() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionMultipleWithIncrement");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 19);
}
@Test(description = "Test compound assignment with addition with struct access expression.")
public void testCompoundAssignmentAdditionWithStructAccess() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionWithStructAccess");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 305);
}
@Test(description = "Test compound assignment with addition with function invocation expression.")
public void testCompoundAssignmentAdditionWithFunctionInvocation() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionWithFunctionInvocation");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 205);
}
@Test(description = "Test compound assignment with addition with two struct elements.")
public void testCompoundAssignmentAdditionStructElements() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionStructElements");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 500);
}
@Test(description = "Test compound assignment with addition.")
public void testCompoundAssignmentOfXmlAndString() {
BValue[] returns = BRunUtil.invoke(result, "xmlCompoundAssignment");
Assert.assertTrue(returns[0] instanceof BXMLSequence);
BXMLSequence sequence = (BXMLSequence) returns[0];
Assert.assertEquals(sequence.size(), 13);
Assert.assertEquals(sequence.stringValue(),
"hello<hello xmlns:ns0=\"http:
}
@Test
public void testCompoundAssignmentAdditionRecordElementRecursive() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionRecordElementRecursive");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 200);
}
@Test
public void testCompoundAssignmentAdditionRecordElements() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionRecordElements");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 500);
}
@Test
public void testCompoundAssignmentAdditionWithRecordAccess() {
BValue[] returns = BRunUtil.invoke(result, "testCompoundAssignmentAdditionWithRecordAccess");
Assert.assertEquals(returns.length, 1);
Assert.assertTrue(returns[0] instanceof BInteger);
Assert.assertEquals(((BInteger) returns[0]).intValue(), 305);
}
@Test(description = "Test compound operator negative cases.")
}
|
Fixed. ApplicationDispatcherBootstrap now returns "unknown" JobResult in case job result can no longer be retrieved after fail-over.
|
public void testDuplicateJobSubmissionWithTerminatedJobId() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
final CompletableFuture<Acknowledge> submit =
new CompletableFuture<>();
submit.completeExceptionally(
new DuplicateJobSubmissionException(testJobID, true));
return submit;
})
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
|
CompletableFuture.completedFuture(
|
public void testDuplicateJobSubmissionWithTerminatedJobId() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph ->
FutureUtils.completedExceptionally(
DuplicateJobSubmissionException
.ofGloballyTerminated(testJobID)))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
|
class ApplicationDispatcherBootstrapTest extends TestLogger {
private static final String MULTI_EXECUTE_JOB_CLASS_NAME =
"org.apache.flink.client.testjar.MultiExecuteJob";
private static final int TIMEOUT_SECONDS = 10;
final ScheduledExecutorService executor = Executors.newScheduledThreadPool(4);
final ScheduledExecutor scheduledExecutor = new ScheduledExecutorServiceAdapter(executor);
@After
public void cleanup() {
ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, executor);
}
@Test
public void testExceptionThrownWhenApplicationContainsNoJobs() throws Throwable {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()));
final CompletableFuture<Void> applicationFuture = runApplication(dispatcherBuilder, 0);
assertException(applicationFuture, ApplicationExecutionException.class);
}
@Test
public void testOnlyOneJobIsAllowedWithHa() throws Throwable {
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final CompletableFuture<Void> applicationFuture = runApplication(configurationUnderTest, 2);
assertException(applicationFuture, FlinkRuntimeException.class);
}
@Test
public void testOnlyOneJobAllowedWithStaticJobId() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
final CompletableFuture<Void> applicationFuture = runApplication(configurationUnderTest, 2);
assertException(applicationFuture, FlinkRuntimeException.class);
}
@Test
public void testOnlyOneJobAllowedWithStaticJobIdAndHa() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final CompletableFuture<Void> applicationFuture = runApplication(configurationUnderTest, 2);
assertException(applicationFuture, FlinkRuntimeException.class);
}
@Test
public void testJobIdDefaultsToZeroWithHa() throws Throwable {
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final CompletableFuture<JobID> submittedJobId = new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
submittedJobId.complete(jobGraph.getJobID());
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(submittedJobId.get(TIMEOUT_SECONDS, TimeUnit.SECONDS), is(new JobID(0L, 0L)));
}
@Test
public void testStaticJobId() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
final CompletableFuture<JobID> submittedJobId = new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
submittedJobId.complete(jobGraph.getJobID());
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(submittedJobId.get(TIMEOUT_SECONDS, TimeUnit.SECONDS), is(new JobID(0L, 2L)));
}
@Test
public void testStaticJobIdWithHa() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final CompletableFuture<JobID> submittedJobId = new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
submittedJobId.complete(jobGraph.getJobID());
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(submittedJobId.get(TIMEOUT_SECONDS, TimeUnit.SECONDS), is(new JobID(0L, 2L)));
}
@Test
public void testApplicationFailsAsSoonAsOneJobFails() throws Throwable {
final ConcurrentLinkedDeque<JobID> submittedJobIds = new ConcurrentLinkedDeque<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
submittedJobIds.add(jobGraph.getJobID());
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobStatusFunction(
jobId -> {
if (jobId.equals(submittedJobIds.peek())) {
return CompletableFuture.completedFuture(JobStatus.FAILED);
}
return CompletableFuture.completedFuture(JobStatus.RUNNING);
})
.setRequestJobResultFunction(
jobId -> {
if (jobId.equals(submittedJobIds.peek())) {
return CompletableFuture.completedFuture(
createFailedJobResult(jobId));
}
return new CompletableFuture<>();
});
final CompletableFuture<Void> applicationFuture = runApplication(dispatcherBuilder, 2);
final UnsuccessfulExecutionException exception =
assertException(applicationFuture, UnsuccessfulExecutionException.class);
assertEquals(exception.getStatus(), ApplicationStatus.FAILED);
}
@Test
public void testApplicationSucceedsWhenAllJobsSucceed() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture = runApplication(dispatcherBuilder, 3);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
@Test
public void testDispatcherIsCancelledWhenOneJobIsCancelled() throws Exception {
final CompletableFuture<ApplicationStatus> clusterShutdownStatus =
new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.CANCELED))
.setClusterShutdownFunction(
(status) -> {
clusterShutdownStatus.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createCancelledJobResult(jobId)));
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(
clusterShutdownStatus.get(TIMEOUT_SECONDS, TimeUnit.SECONDS),
is(ApplicationStatus.CANCELED));
}
@Test
public void testApplicationTaskFinishesWhenApplicationFinishes() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
ScheduledFuture<?> applicationExecutionFuture = bootstrap.getApplicationExecutionFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
applicationExecutionFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
@Test
public void testApplicationIsStoppedWhenStoppingBootstrap() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.RUNNING));
final CompletableFuture<Void> errorHandlerFuture = new CompletableFuture<>();
final ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3,
dispatcherBuilder.build(),
scheduledExecutor,
errorHandlerFuture::completeExceptionally);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
ScheduledFuture<?> applicationExecutionFuture = bootstrap.getApplicationExecutionFuture();
bootstrap.stop();
assertException(errorHandlerFuture, CancellationException.class);
assertException(shutdownFuture, CancellationException.class);
assertThat(applicationExecutionFuture.isCancelled(), is(true));
}
@Test
public void testErrorHandlerIsCalledWhenStoppingBootstrap() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.RUNNING));
final CompletableFuture<Void> errorHandlerFuture = new CompletableFuture<>();
final ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
2,
dispatcherBuilder.build(),
scheduledExecutor,
errorHandlerFuture::completeExceptionally);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
bootstrap.stop();
assertException(errorHandlerFuture, CancellationException.class);
assertException(shutdownFuture, CancellationException.class);
}
@Test
public void testErrorHandlerIsCalledWhenSubmissionFails() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
throw new FlinkRuntimeException("Nope!");
})
.setClusterShutdownFunction(
status -> {
fail("We should not call shutdownCluster()");
return CompletableFuture.completedFuture(Acknowledge.get());
});
final CompletableFuture<Void> errorHandlerFuture = new CompletableFuture<>();
final TestingDispatcherGateway dispatcherGateway = dispatcherBuilder.build();
final ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3,
dispatcherGateway,
scheduledExecutor,
errorHandlerFuture::completeExceptionally);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
assertException(errorHandlerFuture, ApplicationExecutionException.class);
assertException(shutdownFuture, ApplicationExecutionException.class);
}
@Test
public void testClusterIsShutdownInAttachedModeWhenJobCancelled() throws Exception {
final CompletableFuture<ApplicationStatus> clusterShutdown = new CompletableFuture<>();
final TestingDispatcherGateway dispatcherGateway =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.CANCELED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createCancelledJobResult(jobId)))
.setClusterShutdownFunction(
status -> {
clusterShutdown.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.build();
final PackagedProgram program =
PackagedProgram.newBuilder()
.setUserClassPaths(
Collections.singletonList(
new File(CliFrontendTestUtils.getTestJarPath())
.toURI()
.toURL()))
.setEntryPointClassName(MULTI_EXECUTE_JOB_CLASS_NAME)
.setArguments(String.valueOf(2), String.valueOf(true))
.build();
final Configuration configuration = getConfiguration();
configuration.set(DeploymentOptions.ATTACHED, true);
final ApplicationDispatcherBootstrap bootstrap =
new ApplicationDispatcherBootstrap(
program,
Collections.emptyList(),
configuration,
dispatcherGateway,
scheduledExecutor,
e -> {});
final CompletableFuture<Void> applicationFuture =
bootstrap.getApplicationCompletionFuture();
assertException(applicationFuture, UnsuccessfulExecutionException.class);
assertEquals(clusterShutdown.get(), ApplicationStatus.CANCELED);
}
@Test
public void testClusterShutdownWhenApplicationSucceeds() throws Exception {
final CompletableFuture<ApplicationStatus> externalShutdownFuture =
new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)))
.setClusterShutdownFunction(
(status) -> {
externalShutdownFuture.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
});
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(
externalShutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS),
is(ApplicationStatus.SUCCEEDED));
}
@Test
public void testClusterShutdownWhenApplicationFails() throws Exception {
final CompletableFuture<ApplicationStatus> externalShutdownFuture =
new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FAILED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createFailedJobResult(jobId)))
.setClusterShutdownFunction(
(status) -> {
externalShutdownFuture.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
});
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(
externalShutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS),
is(ApplicationStatus.FAILED));
}
@Test
public void testClusterShutdownWhenApplicationGetsCancelled() throws Exception {
final CompletableFuture<ApplicationStatus> externalShutdownFuture =
new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.CANCELED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createCancelledJobResult(jobId)))
.setClusterShutdownFunction(
(status) -> {
externalShutdownFuture.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
});
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(
externalShutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS),
is(ApplicationStatus.CANCELED));
}
@Test
public void testClusterDoesNotShutdownWhenApplicationStatusUnknown() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FAILED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createUnknownJobResult(jobId)))
.setClusterShutdownFunction(
status -> {
fail("We should not call shutdownCluster()");
return CompletableFuture.completedFuture(Acknowledge.get());
});
final TestingDispatcherGateway dispatcherGateway = dispatcherBuilder.build();
final ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(3, dispatcherGateway, scheduledExecutor);
final CompletableFuture<Acknowledge> applicationFuture =
bootstrap.getClusterShutdownFuture();
final UnsuccessfulExecutionException exception =
assertException(applicationFuture, UnsuccessfulExecutionException.class);
assertEquals(exception.getStatus(), ApplicationStatus.UNKNOWN);
}
@Test
@Test
public void testDuplicateJobSubmissionWithRunningJobId() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
final CompletableFuture<Acknowledge> submit =
new CompletableFuture<>();
submit.completeExceptionally(
new DuplicateJobSubmissionException(testJobID, false));
return submit;
});
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
final ExecutionException executionException =
assertThrows(
ExecutionException.class,
() -> applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS));
final Optional<DuplicateJobSubmissionException> maybeDuplicate =
ExceptionUtils.findThrowable(
executionException, DuplicateJobSubmissionException.class);
assertTrue(maybeDuplicate.isPresent());
assertFalse(maybeDuplicate.get().isTerminated());
}
private CompletableFuture<Void> runApplication(
TestingDispatcherGateway.Builder dispatcherBuilder, int noOfJobs)
throws FlinkException {
return runApplication(dispatcherBuilder, getConfiguration(), noOfJobs);
}
private CompletableFuture<Void> runApplication(
final Configuration configuration, final int noOfJobs) throws Throwable {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
return runApplication(dispatcherBuilder, configuration, noOfJobs);
}
private CompletableFuture<Void> runApplication(
TestingDispatcherGateway.Builder dispatcherBuilder,
Configuration configuration,
int noOfJobs)
throws FlinkException {
final PackagedProgram program = getProgram(noOfJobs);
final ApplicationDispatcherBootstrap bootstrap =
new ApplicationDispatcherBootstrap(
program,
Collections.emptyList(),
configuration,
dispatcherBuilder.build(),
scheduledExecutor,
exception -> {});
return bootstrap.getApplicationCompletionFuture();
}
private ApplicationDispatcherBootstrap createApplicationDispatcherBootstrap(
final int noOfJobs,
final DispatcherGateway dispatcherGateway,
final ScheduledExecutor scheduledExecutor)
throws FlinkException {
return createApplicationDispatcherBootstrap(
noOfJobs, dispatcherGateway, scheduledExecutor, exception -> {});
}
private ApplicationDispatcherBootstrap createApplicationDispatcherBootstrap(
final int noOfJobs,
final DispatcherGateway dispatcherGateway,
final ScheduledExecutor scheduledExecutor,
final FatalErrorHandler errorHandler)
throws FlinkException {
final PackagedProgram program = getProgram(noOfJobs);
return new ApplicationDispatcherBootstrap(
program,
Collections.emptyList(),
getConfiguration(),
dispatcherGateway,
scheduledExecutor,
errorHandler);
}
private PackagedProgram getProgram(int noOfJobs) throws FlinkException {
try {
return PackagedProgram.newBuilder()
.setUserClassPaths(
Collections.singletonList(
new File(CliFrontendTestUtils.getTestJarPath())
.toURI()
.toURL()))
.setEntryPointClassName(MULTI_EXECUTE_JOB_CLASS_NAME)
.setArguments(String.valueOf(noOfJobs))
.build();
} catch (ProgramInvocationException | FileNotFoundException | MalformedURLException e) {
throw new FlinkException("Could not load the provided entrypoint class.", e);
}
}
private static JobResult createUnknownJobResult(final JobID jobId) {
return new JobResult.Builder()
.jobId(jobId)
.netRuntime(2L)
.applicationStatus(ApplicationStatus.UNKNOWN)
.serializedThrowable(
new SerializedThrowable(
new JobExecutionException(jobId, "unknown bla bla bla")))
.build();
}
private static JobResult createFailedJobResult(final JobID jobId) {
return new JobResult.Builder()
.jobId(jobId)
.netRuntime(2L)
.applicationStatus(ApplicationStatus.FAILED)
.serializedThrowable(
new SerializedThrowable(new JobExecutionException(jobId, "bla bla bla")))
.build();
}
private static JobResult createSuccessfulJobResult(final JobID jobId) {
return new JobResult.Builder()
.jobId(jobId)
.netRuntime(2L)
.applicationStatus(ApplicationStatus.SUCCEEDED)
.build();
}
private static JobResult createCancelledJobResult(final JobID jobId) {
return new JobResult.Builder()
.jobId(jobId)
.netRuntime(2L)
.serializedThrowable(
new SerializedThrowable(new JobCancellationException(jobId, "Hello", null)))
.applicationStatus(ApplicationStatus.CANCELED)
.build();
}
private static <T, E extends Throwable> E assertException(
CompletableFuture<T> future, Class<E> exceptionClass) throws Exception {
try {
future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (Throwable e) {
Optional<E> maybeException = ExceptionUtils.findThrowable(e, exceptionClass);
if (!maybeException.isPresent()) {
throw e;
}
return maybeException.get();
}
throw new Exception(
"Future should have completed exceptionally with "
+ exceptionClass.getCanonicalName()
+ ".");
}
private Configuration getConfiguration() {
final Configuration configuration = new Configuration();
configuration.set(DeploymentOptions.TARGET, EmbeddedExecutor.NAME);
return configuration;
}
}
|
class ApplicationDispatcherBootstrapTest extends TestLogger {
private static final String MULTI_EXECUTE_JOB_CLASS_NAME =
"org.apache.flink.client.testjar.MultiExecuteJob";
private static final int TIMEOUT_SECONDS = 10;
final ScheduledExecutorService executor = Executors.newScheduledThreadPool(4);
final ScheduledExecutor scheduledExecutor = new ScheduledExecutorServiceAdapter(executor);
@After
public void cleanup() {
ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, executor);
}
@Test
public void testExceptionThrownWhenApplicationContainsNoJobs() throws Throwable {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()));
final CompletableFuture<Void> applicationFuture = runApplication(dispatcherBuilder, 0);
assertException(applicationFuture, ApplicationExecutionException.class);
}
@Test
public void testOnlyOneJobIsAllowedWithHa() throws Throwable {
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final CompletableFuture<Void> applicationFuture = runApplication(configurationUnderTest, 2);
assertException(applicationFuture, FlinkRuntimeException.class);
}
@Test
public void testOnlyOneJobAllowedWithStaticJobId() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
final CompletableFuture<Void> applicationFuture = runApplication(configurationUnderTest, 2);
assertException(applicationFuture, FlinkRuntimeException.class);
}
@Test
public void testOnlyOneJobAllowedWithStaticJobIdAndHa() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final CompletableFuture<Void> applicationFuture = runApplication(configurationUnderTest, 2);
assertException(applicationFuture, FlinkRuntimeException.class);
}
@Test
public void testJobIdDefaultsToZeroWithHa() throws Throwable {
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final CompletableFuture<JobID> submittedJobId = new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
submittedJobId.complete(jobGraph.getJobID());
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(submittedJobId.get(TIMEOUT_SECONDS, TimeUnit.SECONDS), is(new JobID(0L, 0L)));
}
@Test
public void testStaticJobId() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
final CompletableFuture<JobID> submittedJobId = new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
submittedJobId.complete(jobGraph.getJobID());
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(submittedJobId.get(TIMEOUT_SECONDS, TimeUnit.SECONDS), is(new JobID(0L, 2L)));
}
@Test
public void testStaticJobIdWithHa() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final CompletableFuture<JobID> submittedJobId = new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
submittedJobId.complete(jobGraph.getJobID());
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(submittedJobId.get(TIMEOUT_SECONDS, TimeUnit.SECONDS), is(new JobID(0L, 2L)));
}
@Test
public void testApplicationFailsAsSoonAsOneJobFails() throws Throwable {
final ConcurrentLinkedDeque<JobID> submittedJobIds = new ConcurrentLinkedDeque<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
submittedJobIds.add(jobGraph.getJobID());
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobStatusFunction(
jobId -> {
if (jobId.equals(submittedJobIds.peek())) {
return CompletableFuture.completedFuture(JobStatus.FAILED);
}
return CompletableFuture.completedFuture(JobStatus.RUNNING);
})
.setRequestJobResultFunction(
jobId -> {
if (jobId.equals(submittedJobIds.peek())) {
return CompletableFuture.completedFuture(
createFailedJobResult(jobId));
}
return new CompletableFuture<>();
});
final CompletableFuture<Void> applicationFuture = runApplication(dispatcherBuilder, 2);
final UnsuccessfulExecutionException exception =
assertException(applicationFuture, UnsuccessfulExecutionException.class);
assertEquals(exception.getStatus(), ApplicationStatus.FAILED);
}
@Test
public void testApplicationSucceedsWhenAllJobsSucceed() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
final CompletableFuture<Void> applicationFuture = runApplication(dispatcherBuilder, 3);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
@Test
public void testDispatcherIsCancelledWhenOneJobIsCancelled() throws Exception {
final CompletableFuture<ApplicationStatus> clusterShutdownStatus =
new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.CANCELED))
.setClusterShutdownFunction(
(status) -> {
clusterShutdownStatus.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createCancelledJobResult(jobId)));
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(
clusterShutdownStatus.get(TIMEOUT_SECONDS, TimeUnit.SECONDS),
is(ApplicationStatus.CANCELED));
}
@Test
public void testApplicationTaskFinishesWhenApplicationFinishes() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
ScheduledFuture<?> applicationExecutionFuture = bootstrap.getApplicationExecutionFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
applicationExecutionFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
@Test
public void testApplicationIsStoppedWhenStoppingBootstrap() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.RUNNING));
final CompletableFuture<Void> errorHandlerFuture = new CompletableFuture<>();
final ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3,
dispatcherBuilder.build(),
scheduledExecutor,
errorHandlerFuture::completeExceptionally);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
ScheduledFuture<?> applicationExecutionFuture = bootstrap.getApplicationExecutionFuture();
bootstrap.stop();
assertException(errorHandlerFuture, CancellationException.class);
assertException(shutdownFuture, CancellationException.class);
assertThat(applicationExecutionFuture.isCancelled(), is(true));
}
@Test
public void testErrorHandlerIsCalledWhenStoppingBootstrap() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.RUNNING));
final CompletableFuture<Void> errorHandlerFuture = new CompletableFuture<>();
final ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
2,
dispatcherBuilder.build(),
scheduledExecutor,
errorHandlerFuture::completeExceptionally);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
bootstrap.stop();
assertException(errorHandlerFuture, CancellationException.class);
assertException(shutdownFuture, CancellationException.class);
}
@Test
public void testErrorHandlerIsCalledWhenSubmissionFails() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> {
throw new FlinkRuntimeException("Nope!");
})
.setClusterShutdownFunction(
status -> {
fail("We should not call shutdownCluster()");
return CompletableFuture.completedFuture(Acknowledge.get());
});
final CompletableFuture<Void> errorHandlerFuture = new CompletableFuture<>();
final TestingDispatcherGateway dispatcherGateway = dispatcherBuilder.build();
final ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3,
dispatcherGateway,
scheduledExecutor,
errorHandlerFuture::completeExceptionally);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
assertException(errorHandlerFuture, ApplicationExecutionException.class);
assertException(shutdownFuture, ApplicationExecutionException.class);
}
@Test
public void testClusterIsShutdownInAttachedModeWhenJobCancelled() throws Exception {
final CompletableFuture<ApplicationStatus> clusterShutdown = new CompletableFuture<>();
final TestingDispatcherGateway dispatcherGateway =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.CANCELED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createCancelledJobResult(jobId)))
.setClusterShutdownFunction(
status -> {
clusterShutdown.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.build();
final PackagedProgram program =
PackagedProgram.newBuilder()
.setUserClassPaths(
Collections.singletonList(
new File(CliFrontendTestUtils.getTestJarPath())
.toURI()
.toURL()))
.setEntryPointClassName(MULTI_EXECUTE_JOB_CLASS_NAME)
.setArguments(String.valueOf(2), String.valueOf(true))
.build();
final Configuration configuration = getConfiguration();
configuration.set(DeploymentOptions.ATTACHED, true);
final ApplicationDispatcherBootstrap bootstrap =
new ApplicationDispatcherBootstrap(
program,
Collections.emptyList(),
configuration,
dispatcherGateway,
scheduledExecutor,
e -> {});
final CompletableFuture<Void> applicationFuture =
bootstrap.getApplicationCompletionFuture();
assertException(applicationFuture, UnsuccessfulExecutionException.class);
assertEquals(clusterShutdown.get(), ApplicationStatus.CANCELED);
}
@Test
public void testClusterShutdownWhenApplicationSucceeds() throws Exception {
final CompletableFuture<ApplicationStatus> externalShutdownFuture =
new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)))
.setClusterShutdownFunction(
(status) -> {
externalShutdownFuture.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
});
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(
externalShutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS),
is(ApplicationStatus.SUCCEEDED));
}
@Test
public void testClusterShutdownWhenApplicationFails() throws Exception {
final CompletableFuture<ApplicationStatus> externalShutdownFuture =
new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FAILED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createFailedJobResult(jobId)))
.setClusterShutdownFunction(
(status) -> {
externalShutdownFuture.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
});
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(
externalShutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS),
is(ApplicationStatus.FAILED));
}
@Test
public void testClusterShutdownWhenApplicationGetsCancelled() throws Exception {
final CompletableFuture<ApplicationStatus> externalShutdownFuture =
new CompletableFuture<>();
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.CANCELED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createCancelledJobResult(jobId)))
.setClusterShutdownFunction(
(status) -> {
externalShutdownFuture.complete(status);
return CompletableFuture.completedFuture(Acknowledge.get());
});
ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
3, dispatcherBuilder.build(), scheduledExecutor);
final CompletableFuture<Acknowledge> shutdownFuture = bootstrap.getClusterShutdownFuture();
shutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertThat(
externalShutdownFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS),
is(ApplicationStatus.CANCELED));
}
@Test
public void testClusterDoesNotShutdownWhenApplicationStatusUnknown() throws Exception {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FAILED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createUnknownJobResult(jobId)))
.setClusterShutdownFunction(
status -> {
fail("We should not call shutdownCluster()");
return CompletableFuture.completedFuture(Acknowledge.get());
});
final TestingDispatcherGateway dispatcherGateway = dispatcherBuilder.build();
final ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(3, dispatcherGateway, scheduledExecutor);
final CompletableFuture<Acknowledge> applicationFuture =
bootstrap.getClusterShutdownFuture();
final UnsuccessfulExecutionException exception =
assertException(applicationFuture, UnsuccessfulExecutionException.class);
assertEquals(exception.getStatus(), ApplicationStatus.UNKNOWN);
}
@Test
/**
* In this scenario, job result is no longer present in the {@link
* org.apache.flink.runtime.dispatcher.Dispatcher dispatcher} (job has terminated and job
* manager failed over), but we know that job has already terminated from {@link
* org.apache.flink.runtime.highavailability.RunningJobsRegistry running jobs registry}.
*/
@Test
public void testDuplicateJobSubmissionWithTerminatedJobIdWithUnknownResult() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph ->
FutureUtils.completedExceptionally(
DuplicateJobSubmissionException
.ofGloballyTerminated(testJobID)))
.setRequestJobStatusFunction(
jobId ->
FutureUtils.completedExceptionally(
new FlinkJobNotFoundException(jobId)))
.setRequestJobResultFunction(
jobId ->
FutureUtils.completedExceptionally(
new FlinkJobNotFoundException(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
/**
* In this scenario, job result is no longer present in the {@link
* org.apache.flink.runtime.dispatcher.Dispatcher dispatcher} (job has terminated and job
* manager failed over), but we know that job has already terminated from {@link
* org.apache.flink.runtime.highavailability.RunningJobsRegistry running jobs registry}.
*/
@Test
public void testDuplicateJobSubmissionWithTerminatedJobIdWithUnknownResultAttached()
throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph ->
FutureUtils.completedExceptionally(
DuplicateJobSubmissionException
.ofGloballyTerminated(testJobID)))
.setRequestJobStatusFunction(
jobId ->
FutureUtils.completedExceptionally(
new FlinkJobNotFoundException(jobId)))
.setRequestJobResultFunction(
jobId ->
FutureUtils.completedExceptionally(
new FlinkJobNotFoundException(jobId)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
@Test
public void testDuplicateJobSubmissionWithRunningJobId() throws Throwable {
final JobID testJobID = new JobID(0, 2);
final Configuration configurationUnderTest = getConfiguration();
configurationUnderTest.set(
PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, testJobID.toHexString());
configurationUnderTest.set(
HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.name());
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph ->
FutureUtils.completedExceptionally(
DuplicateJobSubmissionException.of(testJobID)));
final CompletableFuture<Void> applicationFuture =
runApplication(dispatcherBuilder, configurationUnderTest, 1);
final ExecutionException executionException =
assertThrows(
ExecutionException.class,
() -> applicationFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS));
final Optional<DuplicateJobSubmissionException> maybeDuplicate =
ExceptionUtils.findThrowable(
executionException, DuplicateJobSubmissionException.class);
assertTrue(maybeDuplicate.isPresent());
assertFalse(maybeDuplicate.get().isGloballyTerminated());
}
private CompletableFuture<Void> runApplication(
TestingDispatcherGateway.Builder dispatcherBuilder, int noOfJobs)
throws FlinkException {
return runApplication(dispatcherBuilder, getConfiguration(), noOfJobs);
}
private CompletableFuture<Void> runApplication(
final Configuration configuration, final int noOfJobs) throws Throwable {
final TestingDispatcherGateway.Builder dispatcherBuilder =
new TestingDispatcherGateway.Builder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setRequestJobStatusFunction(
jobId -> CompletableFuture.completedFuture(JobStatus.FINISHED))
.setRequestJobResultFunction(
jobId ->
CompletableFuture.completedFuture(
createSuccessfulJobResult(jobId)));
return runApplication(dispatcherBuilder, configuration, noOfJobs);
}
private CompletableFuture<Void> runApplication(
TestingDispatcherGateway.Builder dispatcherBuilder,
Configuration configuration,
int noOfJobs)
throws FlinkException {
final PackagedProgram program = getProgram(noOfJobs);
final ApplicationDispatcherBootstrap bootstrap =
new ApplicationDispatcherBootstrap(
program,
Collections.emptyList(),
configuration,
dispatcherBuilder.build(),
scheduledExecutor,
exception -> {});
return bootstrap.getApplicationCompletionFuture();
}
private ApplicationDispatcherBootstrap createApplicationDispatcherBootstrap(
final int noOfJobs,
final DispatcherGateway dispatcherGateway,
final ScheduledExecutor scheduledExecutor)
throws FlinkException {
return createApplicationDispatcherBootstrap(
noOfJobs, dispatcherGateway, scheduledExecutor, exception -> {});
}
private ApplicationDispatcherBootstrap createApplicationDispatcherBootstrap(
final int noOfJobs,
final DispatcherGateway dispatcherGateway,
final ScheduledExecutor scheduledExecutor,
final FatalErrorHandler errorHandler)
throws FlinkException {
final PackagedProgram program = getProgram(noOfJobs);
return new ApplicationDispatcherBootstrap(
program,
Collections.emptyList(),
getConfiguration(),
dispatcherGateway,
scheduledExecutor,
errorHandler);
}
private PackagedProgram getProgram(int noOfJobs) throws FlinkException {
try {
return PackagedProgram.newBuilder()
.setUserClassPaths(
Collections.singletonList(
new File(CliFrontendTestUtils.getTestJarPath())
.toURI()
.toURL()))
.setEntryPointClassName(MULTI_EXECUTE_JOB_CLASS_NAME)
.setArguments(String.valueOf(noOfJobs), Boolean.toString(true))
.build();
} catch (ProgramInvocationException | FileNotFoundException | MalformedURLException e) {
throw new FlinkException("Could not load the provided entrypoint class.", e);
}
}
private static JobResult createUnknownJobResult(final JobID jobId) {
return new JobResult.Builder()
.jobId(jobId)
.netRuntime(2L)
.applicationStatus(ApplicationStatus.UNKNOWN)
.serializedThrowable(
new SerializedThrowable(
new JobExecutionException(jobId, "unknown bla bla bla")))
.build();
}
private static JobResult createFailedJobResult(final JobID jobId) {
return new JobResult.Builder()
.jobId(jobId)
.netRuntime(2L)
.applicationStatus(ApplicationStatus.FAILED)
.serializedThrowable(
new SerializedThrowable(new JobExecutionException(jobId, "bla bla bla")))
.build();
}
private static JobResult createSuccessfulJobResult(final JobID jobId) {
return new JobResult.Builder()
.jobId(jobId)
.netRuntime(2L)
.applicationStatus(ApplicationStatus.SUCCEEDED)
.build();
}
private static JobResult createCancelledJobResult(final JobID jobId) {
return new JobResult.Builder()
.jobId(jobId)
.netRuntime(2L)
.serializedThrowable(
new SerializedThrowable(new JobCancellationException(jobId, "Hello", null)))
.applicationStatus(ApplicationStatus.CANCELED)
.build();
}
private static <T, E extends Throwable> E assertException(
CompletableFuture<T> future, Class<E> exceptionClass) throws Exception {
try {
future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (Throwable e) {
Optional<E> maybeException = ExceptionUtils.findThrowable(e, exceptionClass);
if (!maybeException.isPresent()) {
throw e;
}
return maybeException.get();
}
throw new Exception(
"Future should have completed exceptionally with "
+ exceptionClass.getCanonicalName()
+ ".");
}
private Configuration getConfiguration() {
final Configuration configuration = new Configuration();
configuration.set(DeploymentOptions.TARGET, EmbeddedExecutor.NAME);
return configuration;
}
}
|
@g2vinay: Let's try reusing the `randomByteBufferFlux`.
|
public Mono<Void> runAsync() {
return dataLakeFileAsyncClient.append(createRandomByteBufferFlux(options.getSize()), 0, options.getSize());
}
|
return dataLakeFileAsyncClient.append(createRandomByteBufferFlux(options.getSize()), 0, options.getSize());
|
public Mono<Void> runAsync() {
return dataLakeFileAsyncClient.append(byteBufferFlux, 0, options.getSize());
}
|
class AppendFileDatalakeTest extends DirectoryTest<PerfStressOptions> {
private static final String FILE_NAME = "perfstress-file-" + UUID.randomUUID().toString();
protected final DataLakeFileClient dataLakeFileClient;
protected final DataLakeFileAsyncClient dataLakeFileAsyncClient;
public AppendFileDatalakeTest(PerfStressOptions options) {
super(options);
dataLakeFileClient = dataLakeDirectoryClient.getFileClient(FILE_NAME);
dataLakeFileAsyncClient = dataLakeDirectoryAsyncClient.getFileAsyncClient(FILE_NAME);
}
public Mono<Void> globalSetupAsync() {
return super.globalSetupAsync()
.then(dataLakeFileAsyncClient.create())
.then();
}
@Override
public void run() {
dataLakeFileClient.append(TestDataCreationHelper.createRandomInputStream(options.getSize()),
0, options.getSize());
}
@Override
public Mono<Void> globalCleanupAsync() {
return dataLakeFileAsyncClient.delete()
.then(super.globalCleanupAsync())
.then();
}
}
|
class AppendFileDatalakeTest extends DirectoryTest<PerfStressOptions> {
private static final String FILE_NAME = "perfstress-file-" + UUID.randomUUID().toString();
protected final DataLakeFileClient dataLakeFileClient;
protected final DataLakeFileAsyncClient dataLakeFileAsyncClient;
protected final RepeatingInputStream inputStream;
protected final Flux<ByteBuffer> byteBufferFlux;
public AppendFileDatalakeTest(PerfStressOptions options) {
super(options);
dataLakeFileClient = dataLakeDirectoryClient.getFileClient(FILE_NAME);
dataLakeFileAsyncClient = dataLakeDirectoryAsyncClient.getFileAsyncClient(FILE_NAME);
inputStream = (RepeatingInputStream) TestDataCreationHelper.createRandomInputStream(options.getSize());
byteBufferFlux = createRandomByteBufferFlux(options.getSize());
}
public Mono<Void> globalSetupAsync() {
return super.globalSetupAsync()
.then(dataLakeFileAsyncClient.create())
.then();
}
@Override
public void run() {
inputStream.reset();
dataLakeFileClient.append(TestDataCreationHelper.createRandomInputStream(options.getSize()),
0, options.getSize());
}
@Override
public Mono<Void> globalCleanupAsync() {
return dataLakeFileAsyncClient.delete()
.then(super.globalCleanupAsync())
.then();
}
}
|
value and type. I will change it.
|
private void checkUniqueness(BLangConstant constant) {
if (constant.symbol.kind == SymbolKind.CONSTANT) {
String nameString = constant.name.value;
BLangConstantValue value = constant.symbol.value;
String valueNType;
if (value == null) {
valueNType = String.valueOf(value);
} else {
valueNType = String.valueOf(value) + value.type.getKind().typeName();
}
if (constantMap.containsKey(nameString)) {
if (!valueNType.equals(constantMap.get(nameString))) {
dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString);
}
} else {
constantMap.put(nameString, valueNType);
}
}
}
|
String valueNType;
|
private void checkUniqueness(BLangConstant constant) {
if (constant.symbol.kind == SymbolKind.CONSTANT) {
String nameString = constant.name.value;
BLangConstantValue value = constant.symbol.value;
if (constantMap.containsKey(nameString)) {
BLangConstantValue lastValue = constantMap.get(nameString);
if (!value.equals(lastValue)) {
if (lastValue == null) {
dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString);
} else {
dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL_WITH_ANOTHER,
nameString, lastValue);
}
}
} else {
constantMap.put(nameString, value);
}
}
}
|
class ConstantValueResolver extends BLangNodeVisitor {
private static final CompilerContext.Key<ConstantValueResolver> CONSTANT_VALUE_RESOLVER_KEY =
new CompilerContext.Key<>();
private BConstantSymbol currentConstSymbol;
private BLangConstantValue result;
private BLangDiagnosticLog dlog;
private Location currentPos;
private Map<BConstantSymbol, BLangConstant> unresolvedConstants = new HashMap<>();
private Map<String, String> constantMap = new HashMap<String, String>();
private ConstantValueResolver(CompilerContext context) {
context.put(CONSTANT_VALUE_RESOLVER_KEY, this);
this.dlog = BLangDiagnosticLog.getInstance(context);
}
public static ConstantValueResolver getInstance(CompilerContext context) {
ConstantValueResolver constantValueResolver = context.get(CONSTANT_VALUE_RESOLVER_KEY);
if (constantValueResolver == null) {
constantValueResolver = new ConstantValueResolver(context);
}
return constantValueResolver;
}
public void resolve(List<BLangConstant> constants, PackageID packageID) {
this.dlog.setCurrentPackageId(packageID);
constants.forEach(constant -> this.unresolvedConstants.put(constant.symbol, constant));
constants.forEach(constant -> constant.accept(this));
constantMap.clear();
constants.forEach(constant -> checkUniqueness(constant));
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol tempCurrentConstSymbol = this.currentConstSymbol;
this.currentConstSymbol = constant.symbol;
this.currentConstSymbol.value = visitExpr(constant.expr);
unresolvedConstants.remove(this.currentConstSymbol);
this.currentConstSymbol = tempCurrentConstSymbol;
}
@Override
public void visit(BLangLiteral literal) {
this.result = new BLangConstantValue(literal.value, literal.getBType());
}
@Override
public void visit(BLangNumericLiteral literal) {
this.result = new BLangConstantValue(literal.value, literal.getBType());
}
@Override
public void visit(BLangConstRef constRef) {
this.result = ((BConstantSymbol) constRef.symbol).value;
}
@Override
public void visit(BLangSimpleVarRef varRef) {
if (varRef.symbol == null || (varRef.symbol.tag & SymTag.CONSTANT) != SymTag.CONSTANT) {
this.result = null;
return;
}
BConstantSymbol constSymbol = (BConstantSymbol) varRef.symbol;
BLangConstantValue constVal = constSymbol.value;
if (constVal != null) {
this.result = constVal;
return;
}
if (this.currentConstSymbol == constSymbol) {
dlog.error(varRef.pos, DiagnosticErrorCode.SELF_REFERENCE_CONSTANT, constSymbol.name);
return;
}
if (!this.unresolvedConstants.containsKey(constSymbol)) {
dlog.error(varRef.pos, DiagnosticErrorCode.CANNOT_RESOLVE_CONST, constSymbol.name.value);
this.result = null;
return;
}
this.unresolvedConstants.get(constSymbol).accept(this);
this.result = constSymbol.value;
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
Map<String, BLangConstantValue> mapConstVal = new HashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
String key;
BLangConstantValue value;
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValuePair =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
NodeKind nodeKind = keyValuePair.key.expr.getKind();
if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
key = (String) ((BLangLiteral) keyValuePair.key.expr).value;
} else if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) {
key = ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value;
} else {
continue;
}
value = visitExpr(keyValuePair.valueExpr);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangRecordLiteral.BLangRecordVarNameField varNameField =
(BLangRecordLiteral.BLangRecordVarNameField) field;
key = varNameField.variableName.value;
value = visitExpr(varNameField);
} else {
BLangConstantValue spreadOpConstValue =
visitExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr);
if (spreadOpConstValue != null) {
mapConstVal.putAll((Map<String, BLangConstantValue>) spreadOpConstValue.value);
}
continue;
}
mapConstVal.put(key, value);
}
this.result = new BLangConstantValue(mapConstVal, recordLiteral.getBType());
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
BLangConstantValue lhs = visitExpr(binaryExpr.lhsExpr);
BLangConstantValue rhs = visitExpr(binaryExpr.rhsExpr);
this.result = calculateConstValue(lhs, rhs, binaryExpr.opKind);
}
public void visit(BLangGroupExpr groupExpr) {
this.result = visitExpr(groupExpr.expression);
}
public void visit(BLangUnaryExpr unaryExpr) {
BLangConstantValue value = visitExpr(unaryExpr.expr);
this.result = evaluateUnaryOperator(value, unaryExpr.operator);
}
private BLangConstantValue calculateConstValue(BLangConstantValue lhs, BLangConstantValue rhs, OperatorKind kind) {
if (lhs == null || rhs == null || lhs.value == null || rhs.value == null) {
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
try {
switch (kind) {
case ADD:
return calculateAddition(lhs, rhs);
case SUB:
return calculateSubtract(lhs, rhs);
case MUL:
return calculateMultiplication(lhs, rhs);
case DIV:
return calculateDivision(lhs, rhs);
case BITWISE_AND:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a & b);
case BITWISE_OR:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a | b);
case BITWISE_LEFT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a << b);
case BITWISE_RIGHT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a >> b);
case BITWISE_UNSIGNED_RIGHT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a >>> b);
case BITWISE_XOR:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a ^ b);
default:
dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED);
}
} catch (NumberFormatException nfe) {
} catch (ArithmeticException ae) {
dlog.error(currentPos, DiagnosticErrorCode.INVALID_CONST_EXPRESSION, ae.getMessage());
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue evaluateUnaryOperator(BLangConstantValue value, OperatorKind kind) {
try {
switch (kind) {
case ADD:
return new BLangConstantValue(value.value, currentConstSymbol.type);
case SUB:
return calculateNegation(value);
case BITWISE_COMPLEMENT:
return calculateBitWiseComplement(value);
case NOT:
return calculateBooleanComplement(value);
}
} catch (ClassCastException ce) {
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue calculateBitWiseOp(BLangConstantValue lhs, BLangConstantValue rhs,
BiFunction<Long, Long, Long> func) {
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
Long val = func.apply((Long) lhs.value, (Long) rhs.value);
return new BLangConstantValue(val, this.currentConstSymbol.type);
default:
dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED);
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue calculateAddition(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value + (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
+ Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.add(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
case TypeTags.STRING:
result = String.valueOf(lhs.value) + String.valueOf(rhs.value);
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateSubtract(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value - (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
- Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.subtract(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateMultiplication(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value * (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
* Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.multiply(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateDivision(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) ((Long) lhs.value / (Long) rhs.value);
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
/ Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.divide(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateMod(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) ((Long) lhs.value % (Long) rhs.value);
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
% Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.remainder(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateNegation(BLangConstantValue value) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
result = -1 * ((Long) (value.value));
break;
case TypeTags.FLOAT:
result = String.valueOf(-1 * Double.parseDouble(String.valueOf(value.value)));
break;
case TypeTags.DECIMAL:
BigDecimal valDecimal = new BigDecimal(String.valueOf(value.value), MathContext.DECIMAL128);
BigDecimal negDecimal = new BigDecimal(String.valueOf(-1), MathContext.DECIMAL128);
BigDecimal resultDecimal = valDecimal.multiply(negDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateBitWiseComplement(BLangConstantValue value) {
Object result = null;
if (this.currentConstSymbol.type.tag == TypeTags.INT) {
result = ~((Long) (value.value));
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateBooleanComplement(BLangConstantValue value) {
Object result = null;
if (this.currentConstSymbol.type.tag == TypeTags.BOOLEAN) {
result = !((Boolean) (value.value));
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue visitExpr(BLangExpression node) {
if (!node.typeChecked) {
return null;
}
switch (node.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case RECORD_LITERAL_EXPR:
case SIMPLE_VARIABLE_REF:
case BINARY_EXPR:
case GROUP_EXPR:
case UNARY_EXPR:
BLangConstantValue prevResult = this.result;
Location prevPos = this.currentPos;
this.currentPos = node.pos;
this.result = null;
node.accept(this);
BLangConstantValue newResult = this.result;
this.result = prevResult;
this.currentPos = prevPos;
return newResult;
default:
return null;
}
}
}
|
class ConstantValueResolver extends BLangNodeVisitor {
private static final CompilerContext.Key<ConstantValueResolver> CONSTANT_VALUE_RESOLVER_KEY =
new CompilerContext.Key<>();
private BConstantSymbol currentConstSymbol;
private BLangConstantValue result;
private BLangDiagnosticLog dlog;
private Location currentPos;
private Map<BConstantSymbol, BLangConstant> unresolvedConstants = new HashMap<>();
private Map<String, BLangConstantValue> constantMap = new HashMap<String, BLangConstantValue>();
private ConstantValueResolver(CompilerContext context) {
context.put(CONSTANT_VALUE_RESOLVER_KEY, this);
this.dlog = BLangDiagnosticLog.getInstance(context);
}
public static ConstantValueResolver getInstance(CompilerContext context) {
ConstantValueResolver constantValueResolver = context.get(CONSTANT_VALUE_RESOLVER_KEY);
if (constantValueResolver == null) {
constantValueResolver = new ConstantValueResolver(context);
}
return constantValueResolver;
}
public void resolve(List<BLangConstant> constants, PackageID packageID) {
this.dlog.setCurrentPackageId(packageID);
constants.forEach(constant -> this.unresolvedConstants.put(constant.symbol, constant));
constants.forEach(constant -> constant.accept(this));
constantMap.clear();
constants.forEach(constant -> checkUniqueness(constant));
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol tempCurrentConstSymbol = this.currentConstSymbol;
this.currentConstSymbol = constant.symbol;
this.currentConstSymbol.value = visitExpr(constant.expr);
unresolvedConstants.remove(this.currentConstSymbol);
this.currentConstSymbol = tempCurrentConstSymbol;
}
@Override
public void visit(BLangLiteral literal) {
this.result = new BLangConstantValue(literal.value, literal.getBType());
}
@Override
public void visit(BLangNumericLiteral literal) {
this.result = new BLangConstantValue(literal.value, literal.getBType());
}
@Override
public void visit(BLangConstRef constRef) {
this.result = ((BConstantSymbol) constRef.symbol).value;
}
@Override
public void visit(BLangSimpleVarRef varRef) {
if (varRef.symbol == null || (varRef.symbol.tag & SymTag.CONSTANT) != SymTag.CONSTANT) {
this.result = null;
return;
}
BConstantSymbol constSymbol = (BConstantSymbol) varRef.symbol;
BLangConstantValue constVal = constSymbol.value;
if (constVal != null) {
this.result = constVal;
return;
}
if (this.currentConstSymbol == constSymbol) {
dlog.error(varRef.pos, DiagnosticErrorCode.SELF_REFERENCE_CONSTANT, constSymbol.name);
return;
}
if (!this.unresolvedConstants.containsKey(constSymbol)) {
dlog.error(varRef.pos, DiagnosticErrorCode.CANNOT_RESOLVE_CONST, constSymbol.name.value);
this.result = null;
return;
}
this.unresolvedConstants.get(constSymbol).accept(this);
this.result = constSymbol.value;
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
Map<String, BLangConstantValue> mapConstVal = new HashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
String key;
BLangConstantValue value;
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValuePair =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
NodeKind nodeKind = keyValuePair.key.expr.getKind();
if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
key = (String) ((BLangLiteral) keyValuePair.key.expr).value;
} else if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) {
key = ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value;
} else {
continue;
}
value = visitExpr(keyValuePair.valueExpr);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangRecordLiteral.BLangRecordVarNameField varNameField =
(BLangRecordLiteral.BLangRecordVarNameField) field;
key = varNameField.variableName.value;
value = visitExpr(varNameField);
} else {
BLangConstantValue spreadOpConstValue =
visitExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr);
if (spreadOpConstValue != null) {
mapConstVal.putAll((Map<String, BLangConstantValue>) spreadOpConstValue.value);
}
continue;
}
mapConstVal.put(key, value);
}
this.result = new BLangConstantValue(mapConstVal, recordLiteral.getBType());
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
BLangConstantValue lhs = visitExpr(binaryExpr.lhsExpr);
BLangConstantValue rhs = visitExpr(binaryExpr.rhsExpr);
this.result = calculateConstValue(lhs, rhs, binaryExpr.opKind);
}
public void visit(BLangGroupExpr groupExpr) {
this.result = visitExpr(groupExpr.expression);
}
public void visit(BLangUnaryExpr unaryExpr) {
BLangConstantValue value = visitExpr(unaryExpr.expr);
this.result = evaluateUnaryOperator(value, unaryExpr.operator);
}
private BLangConstantValue calculateConstValue(BLangConstantValue lhs, BLangConstantValue rhs, OperatorKind kind) {
if (lhs == null || rhs == null || lhs.value == null || rhs.value == null) {
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
try {
switch (kind) {
case ADD:
return calculateAddition(lhs, rhs);
case SUB:
return calculateSubtract(lhs, rhs);
case MUL:
return calculateMultiplication(lhs, rhs);
case DIV:
return calculateDivision(lhs, rhs);
case BITWISE_AND:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a & b);
case BITWISE_OR:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a | b);
case BITWISE_LEFT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a << b);
case BITWISE_RIGHT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a >> b);
case BITWISE_UNSIGNED_RIGHT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a >>> b);
case BITWISE_XOR:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a ^ b);
default:
dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED);
}
} catch (NumberFormatException nfe) {
} catch (ArithmeticException ae) {
dlog.error(currentPos, DiagnosticErrorCode.INVALID_CONST_EXPRESSION, ae.getMessage());
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue evaluateUnaryOperator(BLangConstantValue value, OperatorKind kind) {
try {
switch (kind) {
case ADD:
return new BLangConstantValue(value.value, currentConstSymbol.type);
case SUB:
return calculateNegation(value);
case BITWISE_COMPLEMENT:
return calculateBitWiseComplement(value);
case NOT:
return calculateBooleanComplement(value);
}
} catch (ClassCastException ce) {
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue calculateBitWiseOp(BLangConstantValue lhs, BLangConstantValue rhs,
BiFunction<Long, Long, Long> func) {
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
Long val = func.apply((Long) lhs.value, (Long) rhs.value);
return new BLangConstantValue(val, this.currentConstSymbol.type);
default:
dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED);
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue calculateAddition(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value + (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
+ Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.add(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
case TypeTags.STRING:
result = String.valueOf(lhs.value) + String.valueOf(rhs.value);
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateSubtract(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value - (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
- Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.subtract(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateMultiplication(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value * (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
* Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.multiply(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateDivision(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) ((Long) lhs.value / (Long) rhs.value);
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
/ Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.divide(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateMod(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) ((Long) lhs.value % (Long) rhs.value);
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
% Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.remainder(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateNegation(BLangConstantValue value) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
result = -1 * ((Long) (value.value));
break;
case TypeTags.FLOAT:
result = String.valueOf(-1 * Double.parseDouble(String.valueOf(value.value)));
break;
case TypeTags.DECIMAL:
BigDecimal valDecimal = new BigDecimal(String.valueOf(value.value), MathContext.DECIMAL128);
BigDecimal negDecimal = new BigDecimal(String.valueOf(-1), MathContext.DECIMAL128);
BigDecimal resultDecimal = valDecimal.multiply(negDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateBitWiseComplement(BLangConstantValue value) {
Object result = null;
if (this.currentConstSymbol.type.tag == TypeTags.INT) {
result = ~((Long) (value.value));
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateBooleanComplement(BLangConstantValue value) {
Object result = null;
if (this.currentConstSymbol.type.tag == TypeTags.BOOLEAN) {
result = !((Boolean) (value.value));
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue visitExpr(BLangExpression node) {
if (!node.typeChecked) {
return null;
}
switch (node.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case RECORD_LITERAL_EXPR:
case SIMPLE_VARIABLE_REF:
case BINARY_EXPR:
case GROUP_EXPR:
case UNARY_EXPR:
BLangConstantValue prevResult = this.result;
Location prevPos = this.currentPos;
this.currentPos = node.pos;
this.result = null;
node.accept(this);
BLangConstantValue newResult = this.result;
this.result = prevResult;
this.currentPos = prevPos;
return newResult;
default:
return null;
}
}
}
|
Could we risk upgrading the package left out?
|
public void testInstall() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 1, "");
commandSupplier.expectCommand(
"yum install --assumeyes --enablerepo=repo-name package-1 package-2",
0,
"");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
commandSupplier.verifyInvocations();
}
|
"yum install --assumeyes --enablerepo=repo-name package-1 package-2",
|
public void testInstall() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 1, "");
commandSupplier.expectCommand(
"yum install --assumeyes --enablerepo=repo-name package-1 package-2",
0,
"");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
commandSupplier.verifyInvocations();
}
|
class YumTest {
@Test
public void testAlreadyInstalled() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 0, "");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
commandSupplier.verifyInvocations();
}
@Test
@Test(expected = CommandException.class)
public void testFailedInstall() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 1, "");
commandSupplier.expectCommand(
"yum install --assumeyes --enablerepo=repo-name package-1 package-2",
1,
"error");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
fail();
}
}
|
class YumTest {
@Test
public void testAlreadyInstalled() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 0, "");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
commandSupplier.verifyInvocations();
}
@Test
@Test(expected = CommandException.class)
public void testFailedInstall() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 1, "");
commandSupplier.expectCommand(
"yum install --assumeyes --enablerepo=repo-name package-1 package-2",
1,
"error");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
fail();
}
}
|
Maybe it would be a good idea to log on `info` if we fall back to another key if `fallbackKey.isDeprecated == false`. That way, the user knows from which option it has read the value.
|
public boolean contains(ConfigOption<?> configOption) {
synchronized (this.confData){
if (this.confData.containsKey(configOption.key())) {
return true;
}
else if (configOption.hasFallbackKeys()) {
for (FallbackKey fallbackKey : configOption.fallbackKeys()) {
if (this.confData.containsKey(fallbackKey.getKey())) {
if (fallbackKey.isDeprecated()) {
LOG.warn("Config uses deprecated configuration key '{}' instead of proper key '{}'",
fallbackKey.getKey(), configOption.key());
}
return true;
}
}
}
return false;
}
}
|
if (fallbackKey.isDeprecated()) {
|
public boolean contains(ConfigOption<?> configOption) {
synchronized (this.confData){
if (this.confData.containsKey(configOption.key())) {
return true;
}
else if (configOption.hasFallbackKeys()) {
for (FallbackKey fallbackKey : configOption.fallbackKeys()) {
if (this.confData.containsKey(fallbackKey.getKey())) {
loggingFallback(fallbackKey, configOption);
return true;
}
}
}
return false;
}
}
|
class loader on the caller.
*
* @param key The key of the pair to be added
* @param klazz The value of the pair to be added
* @see
*/
public void setClass(String key, Class<?> klazz) {
setValueInternal(key, klazz.getName());
}
|
class loader on the caller.
*
* @param key The key of the pair to be added
* @param klazz The value of the pair to be added
* @see
*/
public void setClass(String key, Class<?> klazz) {
setValueInternal(key, klazz.getName());
}
|
If the project went through the `package` phase then it will be the JAR.
|
public void execute() throws MojoExecutionException {
if (project.getPackaging().equals("pom")) {
getLog().info("Type of the artifact is POM, skipping build goal");
return;
}
if (skip) {
getLog().info("Skipping Quarkus build");
return;
}
boolean clear = false;
try {
final Properties projectProperties = project.getProperties();
final Properties realProperties = new Properties();
for (String name : projectProperties.stringPropertyNames()) {
if (name.startsWith("quarkus.")) {
realProperties.setProperty(name, projectProperties.getProperty(name));
}
}
if (uberJar && System.getProperty(QUARKUS_PACKAGE_UBER_JAR) == null) {
System.setProperty(QUARKUS_PACKAGE_UBER_JAR, "true");
clear = true;
}
realProperties.putIfAbsent("quarkus.application.name", project.getArtifactId());
realProperties.putIfAbsent("quarkus.application.version", project.getVersion());
MavenArtifactResolver resolver = MavenArtifactResolver.builder()
.setWorkspaceDiscovery(false)
.setRepositorySystem(repoSystem)
.setRepositorySystemSession(repoSession)
.setRemoteRepositories(repos)
.build();
final Artifact projectArtifact = project.getArtifact();
final AppArtifact appArtifact = new AppArtifact(projectArtifact.getGroupId(), projectArtifact.getArtifactId(),
projectArtifact.getClassifier(), projectArtifact.getArtifactHandler().getExtension(),
projectArtifact.getVersion());
File projectFile = projectArtifact.getFile();
if (projectFile == null) {
projectFile = new File(project.getBuild().getOutputDirectory());
if (!projectFile.exists()) {
if (hasSources(project)) {
throw new MojoExecutionException("Project " + project.getArtifact() + " has not been compiled yet");
}
if (!projectFile.mkdirs()) {
throw new MojoExecutionException("Failed to create the output dir " + projectFile);
}
}
}
appArtifact.setPaths(PathsCollection.of(projectFile.toPath()));
QuarkusBootstrap.Builder builder = QuarkusBootstrap.builder()
.setAppArtifact(appArtifact)
.setMavenArtifactResolver(resolver)
.setBaseClassLoader(BuildMojo.class.getClassLoader())
.setBuildSystemProperties(realProperties)
.setLocalProjectDiscovery(false)
.setProjectRoot(project.getBasedir().toPath())
.setBaseName(finalName)
.setTargetDirectory(buildDir.toPath());
for (MavenProject project : project.getCollectedProjects()) {
builder.addLocalArtifact(new AppArtifactKey(project.getGroupId(), project.getArtifactId(), null,
project.getArtifact().getArtifactHandler().getExtension()));
}
try (CuratedApplication curatedApplication = builder
.build().bootstrap()) {
AugmentAction action = curatedApplication.createAugmentor();
AugmentResult result = action.createProductionApplication();
Artifact original = project.getArtifact();
if (result.getJar() != null) {
if (result.getJar().isUberJar() && result.getJar().getOriginalArtifact() != null) {
final Path standardJar = curatedApplication.getAppModel().getAppArtifact().getPaths().getSinglePath();
if (Files.exists(standardJar)) {
try {
Files.deleteIfExists(result.getJar().getOriginalArtifact());
Files.move(standardJar, result.getJar().getOriginalArtifact());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
original.setFile(result.getJar().getOriginalArtifact().toFile());
}
}
if (result.getJar().isUberJar()) {
projectHelper.attachArtifact(project, result.getJar().getPath().toFile(), "runner");
}
}
}
} catch (Exception e) {
throw new MojoExecutionException("Failed to build quarkus application", e);
} finally {
if (clear) {
System.clearProperty(QUARKUS_PACKAGE_UBER_JAR);
}
}
}
|
File projectFile = projectArtifact.getFile();
|
public void execute() throws MojoExecutionException {
if (project.getPackaging().equals("pom")) {
getLog().info("Type of the artifact is POM, skipping build goal");
return;
}
if (skip) {
getLog().info("Skipping Quarkus build");
return;
}
boolean clear = false;
try {
final Properties projectProperties = project.getProperties();
final Properties realProperties = new Properties();
for (String name : projectProperties.stringPropertyNames()) {
if (name.startsWith("quarkus.")) {
realProperties.setProperty(name, projectProperties.getProperty(name));
}
}
if (uberJar && System.getProperty(QUARKUS_PACKAGE_UBER_JAR) == null) {
System.setProperty(QUARKUS_PACKAGE_UBER_JAR, "true");
clear = true;
}
realProperties.putIfAbsent("quarkus.application.name", project.getArtifactId());
realProperties.putIfAbsent("quarkus.application.version", project.getVersion());
MavenArtifactResolver resolver = MavenArtifactResolver.builder()
.setWorkspaceDiscovery(false)
.setRepositorySystem(repoSystem)
.setRepositorySystemSession(repoSession)
.setRemoteRepositories(repos)
.build();
final Artifact projectArtifact = project.getArtifact();
final AppArtifact appArtifact = new AppArtifact(projectArtifact.getGroupId(), projectArtifact.getArtifactId(),
projectArtifact.getClassifier(), projectArtifact.getArtifactHandler().getExtension(),
projectArtifact.getVersion());
File projectFile = projectArtifact.getFile();
if (projectFile == null) {
projectFile = new File(project.getBuild().getOutputDirectory());
if (!projectFile.exists()) {
if (hasSources(project)) {
throw new MojoExecutionException("Project " + project.getArtifact() + " has not been compiled yet");
}
if (!projectFile.mkdirs()) {
throw new MojoExecutionException("Failed to create the output dir " + projectFile);
}
}
}
appArtifact.setPaths(PathsCollection.of(projectFile.toPath()));
QuarkusBootstrap.Builder builder = QuarkusBootstrap.builder()
.setAppArtifact(appArtifact)
.setMavenArtifactResolver(resolver)
.setBaseClassLoader(BuildMojo.class.getClassLoader())
.setBuildSystemProperties(realProperties)
.setLocalProjectDiscovery(false)
.setProjectRoot(project.getBasedir().toPath())
.setBaseName(finalName)
.setTargetDirectory(buildDir.toPath());
for (MavenProject project : project.getCollectedProjects()) {
builder.addLocalArtifact(new AppArtifactKey(project.getGroupId(), project.getArtifactId(), null,
project.getArtifact().getArtifactHandler().getExtension()));
}
try (CuratedApplication curatedApplication = builder
.build().bootstrap()) {
AugmentAction action = curatedApplication.createAugmentor();
AugmentResult result = action.createProductionApplication();
Artifact original = project.getArtifact();
if (result.getJar() != null) {
if (result.getJar().isUberJar() && result.getJar().getOriginalArtifact() != null) {
final Path standardJar = curatedApplication.getAppModel().getAppArtifact().getPaths().getSinglePath();
if (Files.exists(standardJar)) {
try {
Files.deleteIfExists(result.getJar().getOriginalArtifact());
Files.move(standardJar, result.getJar().getOriginalArtifact());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
original.setFile(result.getJar().getOriginalArtifact().toFile());
}
}
if (result.getJar().isUberJar()) {
projectHelper.attachArtifact(project, result.getJar().getPath().toFile(), "runner");
}
}
}
} catch (Exception e) {
throw new MojoExecutionException("Failed to build quarkus application", e);
} finally {
if (clear) {
System.clearProperty(QUARKUS_PACKAGE_UBER_JAR);
}
}
}
|
class BuildMojo extends AbstractMojo {
protected static final String QUARKUS_PACKAGE_UBER_JAR = "quarkus.package.uber-jar";
/**
* The entry point to Aether, i.e. the component doing all the work.
*
* @component
*/
@Component
private RepositorySystem repoSystem;
@Component
private MavenProjectHelper projectHelper;
/**
* The current repository/network configuration of Maven.
*
* @parameter default-value="${repositorySystemSession}"
* @readonly
*/
@Parameter(defaultValue = "${repositorySystemSession}", readonly = true)
private RepositorySystemSession repoSession;
/**
* The project's remote repositories to use for the resolution of artifacts and their dependencies.
*
* @parameter default-value="${project.remoteProjectRepositories}"
* @readonly
*/
@Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true)
private List<RemoteRepository> repos;
/**
* The project's remote repositories to use for the resolution of plugins and their dependencies.
*
* @parameter default-value="${project.remotePluginRepositories}"
* @readonly
*/
@Parameter(defaultValue = "${project.remotePluginRepositories}", readonly = true, required = true)
private List<RemoteRepository> pluginRepos;
/**
* The directory for compiled classes.
*/
@Parameter(readonly = true, required = true, defaultValue = "${project.build.outputDirectory}")
@Deprecated
private File outputDirectory;
@Parameter(defaultValue = "${project}", readonly = true, required = true)
protected MavenProject project;
/**
* The directory for generated source files.
*/
@Parameter(defaultValue = "${project.build.directory}/generated-sources")
private File generatedSourcesDirectory;
@Parameter(defaultValue = "${project.build.directory}")
private File buildDir;
@Parameter(defaultValue = "${project.build.finalName}")
private String finalName;
@Parameter(property = "uberJar", defaultValue = "false")
private boolean uberJar;
/**
* When using the uberJar option, this array specifies entries that should
* be excluded from the final jar. The entries are relative to the root of
* the file. An example of this configuration could be:
* <code><pre>
* &
* &
* &
* &
* &
* &
* &
* &
* &
* </pre></code>
*/
@Parameter(property = "ignoredEntries")
private String[] ignoredEntries;
/** Skip the execution of this mojo */
@Parameter(defaultValue = "false", property = "quarkus.build.skip")
private boolean skip = false;
public BuildMojo() {
MojoLogger.logSupplier = this::getLog;
}
@Override
private static boolean hasSources(MavenProject project) {
if (new File(project.getBuild().getSourceDirectory()).exists()) {
return true;
}
for (Resource r : project.getBuild().getResources()) {
if (new File(r.getDirectory()).exists()) {
return true;
}
}
return false;
}
}
|
class BuildMojo extends AbstractMojo {
protected static final String QUARKUS_PACKAGE_UBER_JAR = "quarkus.package.uber-jar";
/**
* The entry point to Aether, i.e. the component doing all the work.
*
* @component
*/
@Component
private RepositorySystem repoSystem;
@Component
private MavenProjectHelper projectHelper;
/**
* The current repository/network configuration of Maven.
*
* @parameter default-value="${repositorySystemSession}"
* @readonly
*/
@Parameter(defaultValue = "${repositorySystemSession}", readonly = true)
private RepositorySystemSession repoSession;
/**
* The project's remote repositories to use for the resolution of artifacts and their dependencies.
*
* @parameter default-value="${project.remoteProjectRepositories}"
* @readonly
*/
@Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true)
private List<RemoteRepository> repos;
/**
* The project's remote repositories to use for the resolution of plugins and their dependencies.
*
* @parameter default-value="${project.remotePluginRepositories}"
* @readonly
*/
@Parameter(defaultValue = "${project.remotePluginRepositories}", readonly = true, required = true)
private List<RemoteRepository> pluginRepos;
/**
* The directory for compiled classes.
*/
@Parameter(readonly = true, required = true, defaultValue = "${project.build.outputDirectory}")
@Deprecated
private File outputDirectory;
@Parameter(defaultValue = "${project}", readonly = true, required = true)
protected MavenProject project;
/**
* The directory for generated source files.
*/
@Parameter(defaultValue = "${project.build.directory}/generated-sources")
private File generatedSourcesDirectory;
@Parameter(defaultValue = "${project.build.directory}")
private File buildDir;
@Parameter(defaultValue = "${project.build.finalName}")
private String finalName;
@Parameter(property = "uberJar", defaultValue = "false")
private boolean uberJar;
/**
* When using the uberJar option, this array specifies entries that should
* be excluded from the final jar. The entries are relative to the root of
* the file. An example of this configuration could be:
* <code><pre>
* &
* &
* &
* &
* &
* &
* &
* &
* &
* </pre></code>
*/
@Parameter(property = "ignoredEntries")
private String[] ignoredEntries;
/** Skip the execution of this mojo */
@Parameter(defaultValue = "false", property = "quarkus.build.skip")
private boolean skip = false;
public BuildMojo() {
MojoLogger.logSupplier = this::getLog;
}
@Override
private static boolean hasSources(MavenProject project) {
if (new File(project.getBuild().getSourceDirectory()).exists()) {
return true;
}
for (Resource r : project.getBuild().getResources()) {
if (new File(r.getDirectory()).exists()) {
return true;
}
}
return false;
}
}
|
means aggregation functions only use one column
|
public boolean check(OptExpression input, OptimizerContext context) {
LogicalAggregationOperator aggregate = (LogicalAggregationOperator) input.getOp();
LogicalRepeatOperator repeatOperator = (LogicalRepeatOperator) input.inputAt(0).getOp();
if (aggregate.getType() != AggType.GLOBAL || repeatOperator.getRepeatColumnRef().size() <= 3
|| repeatOperator.hasPushDown()) {
return false;
}
if (!aggregate.getAggregations().values().stream()
.allMatch(agg -> SUPPORT_AGGREGATE_FUNCTIONS.contains(agg.getFnName()) &&
!agg.isDistinct() && agg.getUsedColumns().cardinality() <= 1)) {
return false;
}
List<ColumnRefOperator> allRepeatRefs = repeatOperator.getRepeatColumnRef()
.get(repeatOperator.getRepeatColumnRef().size() - 1);
for (List<ColumnRefOperator> refs : repeatOperator.getRepeatColumnRef()) {
if (refs.stream().anyMatch(ref -> !allRepeatRefs.contains(ref))) {
return false;
}
}
return true;
}
|
return false;
|
public boolean check(OptExpression input, OptimizerContext context) {
LogicalAggregationOperator aggregate = (LogicalAggregationOperator) input.getOp();
LogicalRepeatOperator repeatOperator = (LogicalRepeatOperator) input.inputAt(0).getOp();
if (aggregate.getType() != AggType.GLOBAL || repeatOperator.getRepeatColumnRef().size() <= 3
|| repeatOperator.hasPushDown()) {
return false;
}
if (!aggregate.getAggregations().values().stream()
.allMatch(agg -> SUPPORT_AGGREGATE_FUNCTIONS.contains(agg.getFnName()) &&
!agg.isDistinct() && agg.getUsedColumns().cardinality() <= 1)) {
return false;
}
List<ColumnRefOperator> allRepeatRefs = repeatOperator.getRepeatColumnRef()
.get(repeatOperator.getRepeatColumnRef().size() - 1);
Set<ColumnRefOperator> checkRefs = new HashSet<>(allRepeatRefs);
for (int i = 0; i < repeatOperator.getRepeatColumnRef().size() - 1; i++) {
List<ColumnRefOperator> refs = repeatOperator.getRepeatColumnRef().get(i);
if (refs.stream().anyMatch(ref -> !allRepeatRefs.contains(ref))) {
return false;
}
refs.forEach(checkRefs::remove);
}
checkRefs.addAll(repeatOperator.getOutputGrouping());
return !checkRefs.containsAll(aggregate.getGroupingKeys());
}
|
class PushDownAggregateGroupingSetsRule extends TransformationRule {
private static final List<String> SUPPORT_AGGREGATE_FUNCTIONS = Lists.newArrayList(FunctionSet.MAX,
FunctionSet.MIN, FunctionSet.SUM);
public PushDownAggregateGroupingSetsRule() {
super(RuleType.TF_PUSHDOWN_AGG_GROUPING_SET,
Pattern.create(OperatorType.LOGICAL_AGGR)
.addChildren(Pattern.create(OperatorType.LOGICAL_REPEAT, OperatorType.PATTERN_LEAF)));
}
@Override
@Override
public List<OptExpression> transform(OptExpression input, OptimizerContext context) {
LogicalAggregationOperator aggregate = (LogicalAggregationOperator) input.getOp();
LogicalRepeatOperator repeat = (LogicalRepeatOperator) input.inputAt(0).getOp();
ColumnRefFactory factory = context.getColumnRefFactory();
int cteId = context.getCteContext().getNextCteId();
OptExpression cteProduce = buildCTEProduce(context, input, cteId);
Map<ColumnRefOperator, ColumnRefOperator> consumeOutputs1 = Maps.newHashMap();
OptExpression subRepeatConsume = buildSubRepeatConsume(factory, consumeOutputs1, aggregate, repeat, cteId);
Map<ColumnRefOperator, ColumnRefOperator> consumeOutputs2 = Maps.newHashMap();
OptExpression selectConsume = buildSelectConsume(factory, consumeOutputs2, aggregate, repeat, cteId);
OptExpression union =
buildUnionAll(aggregate, consumeOutputs1, subRepeatConsume, consumeOutputs2, selectConsume);
return Lists.newArrayList(OptExpression.create(new LogicalCTEAnchorOperator(cteId), cteProduce, union));
}
private OptExpression buildUnionAll(LogicalAggregationOperator aggregate,
Map<ColumnRefOperator, ColumnRefOperator> inputs1, OptExpression repeatConsume,
Map<ColumnRefOperator, ColumnRefOperator> inputs2,
OptExpression selectConsume) {
List<ColumnRefOperator> outputs = Lists.newArrayList();
outputs.addAll(aggregate.getGroupingKeys());
outputs.addAll(aggregate.getAggregations().keySet());
List<List<ColumnRefOperator>> childOutputs = Lists.newArrayList();
childOutputs.add(outputs.stream().map(inputs1::get).collect(Collectors.toList()));
childOutputs.add(outputs.stream().map(inputs2::get).collect(Collectors.toList()));
LogicalUnionOperator union = LogicalUnionOperator.builder()
.setOutputColumnRefOp(outputs)
.setChildOutputColumns(childOutputs)
.setLimit(aggregate.getLimit())
.setPredicate(aggregate.getPredicate())
.build();
return OptExpression.create(union, repeatConsume, selectConsume);
}
private OptExpression buildCTEProduce(OptimizerContext context, OptExpression input, int cteId) {
OptExpression repeatInput = input.inputAt(0);
LogicalAggregationOperator aggregate = (LogicalAggregationOperator) input.getOp();
LogicalRepeatOperator repeat = (LogicalRepeatOperator) repeatInput.getOp();
List<ColumnRefOperator> allGroupByRefs = repeat.getRepeatColumnRef()
.get(repeat.getRepeatColumnRef().size() - 1);
List<ColumnRefOperator> partitionRefs = Collections.emptyList();
if (null == repeatInput.getStatistics()) {
Utils.calculateStatistics(input, context);
}
if (null != repeatInput.getStatistics()) {
Statistics statistics = repeatInput.getStatistics();
partitionRefs = allGroupByRefs.stream()
.filter(ref -> !statistics.getColumnStatistic(ref).isUnknown())
.sorted((o1, o2) -> Double.compare(statistics.getColumnStatistic(o2).getDistinctValuesCount(),
statistics.getColumnStatistic(o1).getDistinctValuesCount()))
.limit(1)
.collect(Collectors.toList());
}
if (!context.getSessionVariable().isCboPushDownGroupingSetReshuffle() || partitionRefs.isEmpty()) {
partitionRefs = allGroupByRefs;
}
LogicalAggregationOperator.Builder builder = LogicalAggregationOperator.builder();
builder.setType(AggType.GLOBAL)
.setGroupingKeys(allGroupByRefs)
.setAggregations(aggregate.getAggregations())
.setPredicate(aggregate.getPredicate())
.setPartitionByColumns(partitionRefs);
if ("local".equals(context.getSessionVariable().getCboPushDownAggregate())) {
builder.setType(AggType.LOCAL);
builder.setSplit(false);
}
LogicalAggregationOperator allColumnRefsAggregate = builder.build();
LogicalCTEProduceOperator produce = new LogicalCTEProduceOperator(cteId);
return OptExpression.create(produce,
OptExpression.create(allColumnRefsAggregate, input.inputAt(0).getInputs()));
}
/*
* selec *, (grouping_id, grouping_set) fom cte1
*/
private OptExpression buildSelectConsume(ColumnRefFactory factory,
Map<ColumnRefOperator, ColumnRefOperator> outputs,
LogicalAggregationOperator aggregate, LogicalRepeatOperator repeat,
int cteId) {
Map<ColumnRefOperator, ScalarOperator> projectMap = Maps.newHashMap();
Map<ColumnRefOperator, ColumnRefOperator> cteColumnRefs = Maps.newHashMap();
for (ColumnRefOperator input : aggregate.getAggregations().keySet()) {
ColumnRefOperator cteOutput = factory.create(input, input.getType(), input.isNullable());
cteColumnRefs.put(cteOutput, input);
outputs.put(input, cteOutput);
projectMap.put(cteOutput, cteOutput);
}
for (ColumnRefOperator input : aggregate.getGroupingKeys()) {
if (!repeat.getOutputGrouping().contains(input)) {
ColumnRefOperator cteOutput = factory.create(input, input.getType(), input.isNullable());
cteColumnRefs.put(cteOutput, input);
outputs.put(input, cteOutput);
projectMap.put(cteOutput, cteOutput);
}
}
LogicalCTEConsumeOperator consume = new LogicalCTEConsumeOperator(cteId, cteColumnRefs);
int lastGroups = repeat.getRepeatColumnRef().size() - 1;
for (int i = 0; i < repeat.getOutputGrouping().size(); i++) {
ColumnRefOperator input = repeat.getOutputGrouping().get(i);
ColumnRefOperator output = factory.create(input, input.getType(), input.isNullable());
outputs.put(input, output);
projectMap.put(output, ConstantOperator.createBigint(repeat.getGroupingIds().get(i).get(lastGroups)));
}
LogicalProjectOperator projectOperator = new LogicalProjectOperator(projectMap);
return OptExpression.create(projectOperator, OptExpression.create(consume));
}
/*
* select a, b, c, d, null, sum(x) x from t group by rollup(a, b, c, d)
*/
private OptExpression buildSubRepeatConsume(ColumnRefFactory factory,
Map<ColumnRefOperator, ColumnRefOperator> outputs,
LogicalAggregationOperator aggregate, LogicalRepeatOperator repeat,
int cteId) {
int subGroups = repeat.getRepeatColumnRef().size() - 1;
List<ColumnRefOperator> nullRefs = Lists.newArrayList(repeat.getRepeatColumnRef().get(subGroups));
repeat.getRepeatColumnRef().stream().limit(subGroups).forEach(nullRefs::removeAll);
Map<ColumnRefOperator, ColumnRefOperator> cteColumnRefs = Maps.newHashMap();
for (ColumnRefOperator input : aggregate.getAggregations().keySet()) {
ColumnRefOperator cteOutput = factory.create(input, input.getType(), input.isNullable());
cteColumnRefs.put(cteOutput, input);
outputs.put(input, cteOutput);
}
for (ColumnRefOperator input : aggregate.getGroupingKeys()) {
if (!repeat.getOutputGrouping().contains(input) && !nullRefs.contains(input)) {
ColumnRefOperator cteOutput = factory.create(input, input.getType(), input.isNullable());
cteColumnRefs.put(cteOutput, input);
outputs.put(input, cteOutput);
}
}
LogicalCTEConsumeOperator consume = new LogicalCTEConsumeOperator(cteId, cteColumnRefs);
List<ColumnRefOperator> outputGrouping = Lists.newArrayList();
repeat.getOutputGrouping().forEach(k -> {
ColumnRefOperator x = factory.create(k, k.getType(), k.isNullable());
outputs.put(k, x);
outputGrouping.add(x);
});
List<List<ColumnRefOperator>> repeatRefs = repeat.getRepeatColumnRef().stream().limit(subGroups)
.map(l -> l.stream().map(outputs::get).collect(Collectors.toList()))
.collect(Collectors.toList());
List<List<Long>> groupingIds = repeat.getGroupingIds().stream()
.map(s -> s.subList(0, subGroups)).collect(Collectors.toList());
LogicalRepeatOperator newRepeat = LogicalRepeatOperator.builder()
.setOutputGrouping(outputGrouping)
.setRepeatColumnRefList(repeatRefs)
.setGroupingIds(groupingIds)
.setHasPushDown(true)
.build();
Map<ColumnRefOperator, CallOperator> aggregations = Maps.newHashMap();
aggregate.getAggregations().forEach((k, v) -> {
ColumnRefOperator x = factory.create(k, k.getType(), k.isNullable());
Function aggFunc = Expr.getBuiltinFunction(v.getFnName(), new Type[] {k.getType()},
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
aggregations.put(x, new CallOperator(v.getFnName(), k.getType(), Lists.newArrayList(outputs.get(k)), aggFunc));
outputs.put(k, x);
});
List<ColumnRefOperator> groupings = aggregate.getGroupingKeys().stream()
.filter(c -> !nullRefs.contains(c)).map(outputs::get).collect(Collectors.toList());
LogicalAggregationOperator newAggregate = LogicalAggregationOperator.builder()
.setAggregations(aggregations)
.setGroupingKeys(groupings)
.setType(AggType.GLOBAL)
.setPartitionByColumns(groupings)
.build();
Map<ColumnRefOperator, ScalarOperator> projection = Maps.newHashMap();
aggregations.keySet().forEach(k -> projection.put(k, k));
groupings.forEach(k -> projection.put(k, k));
for (ColumnRefOperator nullRef : nullRefs) {
ColumnRefOperator m = factory.create(nullRef, nullRef.getType(), true);
projection.put(m, ConstantOperator.createNull(nullRef.getType()));
outputs.put(nullRef, m);
}
LogicalProjectOperator projectOperator = new LogicalProjectOperator(projection);
return OptExpression.create(projectOperator,
OptExpression.create(newAggregate, OptExpression.create(newRepeat, OptExpression.create(consume))));
}
}
|
class PushDownAggregateGroupingSetsRule extends TransformationRule {
private static final List<String> SUPPORT_AGGREGATE_FUNCTIONS = Lists.newArrayList(FunctionSet.MAX,
FunctionSet.MIN, FunctionSet.SUM);
public PushDownAggregateGroupingSetsRule() {
super(RuleType.TF_PUSHDOWN_AGG_GROUPING_SET,
Pattern.create(OperatorType.LOGICAL_AGGR)
.addChildren(Pattern.create(OperatorType.LOGICAL_REPEAT, OperatorType.PATTERN_LEAF)));
}
@Override
@Override
public List<OptExpression> transform(OptExpression input, OptimizerContext context) {
LogicalAggregationOperator aggregate = (LogicalAggregationOperator) input.getOp();
LogicalRepeatOperator repeat = (LogicalRepeatOperator) input.inputAt(0).getOp();
ColumnRefFactory factory = context.getColumnRefFactory();
int cteId = context.getCteContext().getNextCteId();
OptExpression cteProduce = buildCTEProduce(context, input, cteId);
Map<ColumnRefOperator, ColumnRefOperator> consumeOutputs1 = Maps.newHashMap();
OptExpression subRepeatConsume = buildSubRepeatConsume(factory, consumeOutputs1, aggregate, repeat, cteId);
Map<ColumnRefOperator, ColumnRefOperator> consumeOutputs2 = Maps.newHashMap();
OptExpression selectConsume = buildSelectConsume(factory, consumeOutputs2, aggregate, repeat, cteId);
OptExpression union =
buildUnionAll(aggregate, consumeOutputs1, subRepeatConsume, consumeOutputs2, selectConsume);
return Lists.newArrayList(OptExpression.create(new LogicalCTEAnchorOperator(cteId), cteProduce, union));
}
private OptExpression buildUnionAll(LogicalAggregationOperator aggregate,
Map<ColumnRefOperator, ColumnRefOperator> inputs1, OptExpression repeatConsume,
Map<ColumnRefOperator, ColumnRefOperator> inputs2,
OptExpression selectConsume) {
List<ColumnRefOperator> outputs = Lists.newArrayList();
outputs.addAll(aggregate.getGroupingKeys());
outputs.addAll(aggregate.getAggregations().keySet());
List<List<ColumnRefOperator>> childOutputs = Lists.newArrayList();
childOutputs.add(outputs.stream().map(inputs1::get).collect(Collectors.toList()));
childOutputs.add(outputs.stream().map(inputs2::get).collect(Collectors.toList()));
LogicalUnionOperator union = LogicalUnionOperator.builder()
.setOutputColumnRefOp(outputs)
.setChildOutputColumns(childOutputs)
.setLimit(aggregate.getLimit())
.setPredicate(aggregate.getPredicate())
.build();
return OptExpression.create(union, repeatConsume, selectConsume);
}
private OptExpression buildCTEProduce(OptimizerContext context, OptExpression input, int cteId) {
OptExpression repeatInput = input.inputAt(0);
LogicalAggregationOperator aggregate = (LogicalAggregationOperator) input.getOp();
LogicalRepeatOperator repeat = (LogicalRepeatOperator) repeatInput.getOp();
List<ColumnRefOperator> allGroupByRefs = repeat.getRepeatColumnRef()
.get(repeat.getRepeatColumnRef().size() - 1);
allGroupByRefs.retainAll(aggregate.getGroupingKeys());
List<ColumnRefOperator> partitionRefs = Collections.emptyList();
if (null == repeatInput.getStatistics()) {
Utils.calculateStatistics(input, context);
}
if (null != repeatInput.getStatistics()) {
Statistics statistics = repeatInput.getStatistics();
partitionRefs = allGroupByRefs.stream()
.filter(ref -> !statistics.getColumnStatistic(ref).isUnknown())
.sorted((o1, o2) -> Double.compare(statistics.getColumnStatistic(o2).getDistinctValuesCount(),
statistics.getColumnStatistic(o1).getDistinctValuesCount()))
.limit(1)
.collect(Collectors.toList());
}
if (!context.getSessionVariable().isCboPushDownGroupingSetReshuffle() || partitionRefs.isEmpty()) {
partitionRefs = allGroupByRefs;
}
LogicalAggregationOperator.Builder builder = LogicalAggregationOperator.builder();
builder.setType(AggType.GLOBAL)
.setGroupingKeys(allGroupByRefs)
.setAggregations(aggregate.getAggregations())
.setPredicate(aggregate.getPredicate())
.setPartitionByColumns(partitionRefs);
if ("local".equals(context.getSessionVariable().getCboPushDownAggregate())) {
builder.setType(AggType.LOCAL);
builder.setSplit(false);
}
LogicalAggregationOperator allColumnRefsAggregate = builder.build();
LogicalCTEProduceOperator produce = new LogicalCTEProduceOperator(cteId);
return OptExpression.create(produce,
OptExpression.create(allColumnRefsAggregate, input.inputAt(0).getInputs()));
}
/*
* selec *, (grouping_id, grouping_set) fom cte1
*/
private OptExpression buildSelectConsume(ColumnRefFactory factory,
Map<ColumnRefOperator, ColumnRefOperator> outputs,
LogicalAggregationOperator aggregate, LogicalRepeatOperator repeat,
int cteId) {
Map<ColumnRefOperator, ScalarOperator> projectMap = Maps.newHashMap();
Map<ColumnRefOperator, ColumnRefOperator> cteColumnRefs = Maps.newHashMap();
for (ColumnRefOperator input : aggregate.getAggregations().keySet()) {
ColumnRefOperator cteOutput = factory.create(input, input.getType(), input.isNullable());
cteColumnRefs.put(cteOutput, input);
outputs.put(input, cteOutput);
projectMap.put(cteOutput, cteOutput);
}
for (ColumnRefOperator input : aggregate.getGroupingKeys()) {
if (!repeat.getOutputGrouping().contains(input)) {
ColumnRefOperator cteOutput = factory.create(input, input.getType(), input.isNullable());
cteColumnRefs.put(cteOutput, input);
outputs.put(input, cteOutput);
projectMap.put(cteOutput, cteOutput);
}
}
LogicalCTEConsumeOperator consume = new LogicalCTEConsumeOperator(cteId, cteColumnRefs);
int lastGroups = repeat.getRepeatColumnRef().size() - 1;
for (int i = 0; i < repeat.getOutputGrouping().size(); i++) {
ColumnRefOperator input = repeat.getOutputGrouping().get(i);
ColumnRefOperator output = factory.create(input, input.getType(), input.isNullable());
outputs.put(input, output);
projectMap.put(output, ConstantOperator.createBigint(repeat.getGroupingIds().get(i).get(lastGroups)));
}
LogicalProjectOperator projectOperator = new LogicalProjectOperator(projectMap);
return OptExpression.create(projectOperator, OptExpression.create(consume));
}
/*
* select a, b, c, d, null, sum(x) x from t group by rollup(a, b, c, d)
*/
private OptExpression buildSubRepeatConsume(ColumnRefFactory factory,
Map<ColumnRefOperator, ColumnRefOperator> outputs,
LogicalAggregationOperator aggregate, LogicalRepeatOperator repeat,
int cteId) {
int subGroups = repeat.getRepeatColumnRef().size() - 1;
List<ColumnRefOperator> nullRefs = Lists.newArrayList(repeat.getRepeatColumnRef().get(subGroups));
repeat.getRepeatColumnRef().stream().limit(subGroups).forEach(nullRefs::removeAll);
Map<ColumnRefOperator, ColumnRefOperator> cteColumnRefs = Maps.newHashMap();
for (ColumnRefOperator input : aggregate.getAggregations().keySet()) {
ColumnRefOperator cteOutput = factory.create(input, input.getType(), input.isNullable());
cteColumnRefs.put(cteOutput, input);
outputs.put(input, cteOutput);
}
for (ColumnRefOperator input : aggregate.getGroupingKeys()) {
if (!repeat.getOutputGrouping().contains(input) && !nullRefs.contains(input)) {
ColumnRefOperator cteOutput = factory.create(input, input.getType(), input.isNullable());
cteColumnRefs.put(cteOutput, input);
outputs.put(input, cteOutput);
}
}
LogicalCTEConsumeOperator consume = new LogicalCTEConsumeOperator(cteId, cteColumnRefs);
List<ColumnRefOperator> outputGrouping = Lists.newArrayList();
repeat.getOutputGrouping().forEach(k -> {
ColumnRefOperator x = factory.create(k, k.getType(), k.isNullable());
outputs.put(k, x);
outputGrouping.add(x);
});
List<List<ColumnRefOperator>> repeatRefs = repeat.getRepeatColumnRef().stream().limit(subGroups)
.map(l -> l.stream().map(outputs::get).filter(Objects::nonNull).collect(Collectors.toList()))
.collect(Collectors.toList());
List<List<Long>> groupingIds = repeat.getGroupingIds().stream()
.map(s -> s.subList(0, subGroups)).collect(Collectors.toList());
LogicalRepeatOperator newRepeat = LogicalRepeatOperator.builder()
.setOutputGrouping(outputGrouping)
.setRepeatColumnRefList(repeatRefs)
.setGroupingIds(groupingIds)
.setHasPushDown(true)
.build();
Map<ColumnRefOperator, CallOperator> aggregations = Maps.newHashMap();
aggregate.getAggregations().forEach((k, v) -> {
ColumnRefOperator x = factory.create(k, k.getType(), k.isNullable());
Function aggFunc = Expr.getBuiltinFunction(v.getFnName(), new Type[] {k.getType()},
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
aggregations.put(x, new CallOperator(v.getFnName(), k.getType(), Lists.newArrayList(outputs.get(k)), aggFunc));
outputs.put(k, x);
});
List<ColumnRefOperator> groupings = aggregate.getGroupingKeys().stream()
.filter(c -> !nullRefs.contains(c)).map(outputs::get).collect(Collectors.toList());
LogicalAggregationOperator newAggregate = LogicalAggregationOperator.builder()
.setAggregations(aggregations)
.setGroupingKeys(groupings)
.setType(AggType.GLOBAL)
.setPartitionByColumns(groupings)
.build();
Map<ColumnRefOperator, ScalarOperator> projection = Maps.newHashMap();
aggregations.keySet().forEach(k -> projection.put(k, k));
groupings.forEach(k -> projection.put(k, k));
for (ColumnRefOperator nullRef : nullRefs) {
ColumnRefOperator m = factory.create(nullRef, nullRef.getType(), true);
projection.put(m, ConstantOperator.createNull(nullRef.getType()));
outputs.put(nullRef, m);
}
LogicalProjectOperator projectOperator = new LogicalProjectOperator(projection);
return OptExpression.create(projectOperator,
OptExpression.create(newAggregate, OptExpression.create(newRepeat, OptExpression.create(consume))));
}
}
|
Because in this commit there is no `createAndSetUpSlotPool(Clock clock)` method in `SlotPoolImpl` related test cases. In future commits, I replace all the `createAndSetUpSlotPool` with `SlotPoolBuilder`.
|
public void testDiscardIdleSlotIfReleasingFailed() throws Exception {
final ManualClock clock = new ManualClock();
try (TestingSlotPoolImpl slotPool = createSlotPoolImpl(clock)) {
setupSlotPool(slotPool, resourceManagerGateway, mainThreadExecutor);
final AllocationID expiredAllocationId = new AllocationID();
final SlotOffer slotToExpire = new SlotOffer(expiredAllocationId, 0, ResourceProfile.ANY);
OneShotLatch freeSlotLatch = new OneShotLatch();
taskManagerGateway.setFreeSlotFunction((AllocationID allocationId, Throwable cause) -> {
freeSlotLatch.trigger();
return FutureUtils.completedExceptionally(new TimeoutException("Test failure"));
});
assertThat(slotPool.registerTaskManager(taskManagerLocation.getResourceID()), Matchers.is(true));
assertThat(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToExpire), Matchers.is(true));
clock.advanceTime(timeout.toMilliseconds() + 1, TimeUnit.MILLISECONDS);
slotPool.triggerCheckIdleSlot();
freeSlotLatch.await();
CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot(
slotPool,
new SlotRequestId()
);
try {
future.get(10L, TimeUnit.MILLISECONDS);
fail("Expected to fail with a timeout.");
} catch (TimeoutException ignored) {
assertEquals(0, slotPool.getAvailableSlots().size());
}
}
}
|
setupSlotPool(slotPool, resourceManagerGateway, mainThreadExecutor);
|
public void testDiscardIdleSlotIfReleasingFailed() throws Exception {
final ManualClock clock = new ManualClock();
try (TestingSlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway, clock, TIMEOUT)) {
final AllocationID expiredAllocationId = new AllocationID();
final SlotOffer slotToExpire = new SlotOffer(expiredAllocationId, 0, ResourceProfile.ANY);
OneShotLatch freeSlotLatch = new OneShotLatch();
taskManagerGateway.setFreeSlotFunction((AllocationID allocationId, Throwable cause) -> {
freeSlotLatch.trigger();
return FutureUtils.completedExceptionally(new TimeoutException("Test failure"));
});
assertThat(slotPool.registerTaskManager(taskManagerLocation.getResourceID()), Matchers.is(true));
assertThat(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToExpire), Matchers.is(true));
clock.advanceTime(TIMEOUT.toMilliseconds() + 1, TimeUnit.MILLISECONDS);
slotPool.triggerCheckIdleSlot();
freeSlotLatch.await();
final CompletableFuture<PhysicalSlot> allocatedSlotFuture = requestNewAllocatedSlot(
slotPool,
new SlotRequestId());
try {
allocatedSlotFuture.get(10L, TimeUnit.MILLISECONDS);
fail("Expected to fail with a timeout.");
} catch (TimeoutException ignored) {
assertEquals(0, slotPool.getAvailableSlots().size());
}
}
}
|
class SlotPoolImplTest extends TestLogger {
private final Time timeout = Time.seconds(10L);
private JobID jobId;
private TaskManagerLocation taskManagerLocation;
private SimpleAckingTaskManagerGateway taskManagerGateway;
private TestingResourceManagerGateway resourceManagerGateway;
private ComponentMainThreadExecutor mainThreadExecutor =
ComponentMainThreadExecutorServiceAdapter.forMainThread();
@Before
public void setUp() throws Exception {
this.jobId = new JobID();
taskManagerLocation = new LocalTaskManagerLocation();
taskManagerGateway = new SimpleAckingTaskManagerGateway();
resourceManagerGateway = new TestingResourceManagerGateway();
}
@Test
public void testAllocateSimpleSlot() throws Exception {
CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
SlotRequestId requestId = new SlotRequestId();
CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot(slotPool, requestId);
assertFalse(future.isDone());
final SlotRequest slotRequest = slotRequestFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS);
final SlotOffer slotOffer = new SlotOffer(
slotRequest.getAllocationId(),
0,
DEFAULT_TESTING_PROFILE);
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
PhysicalSlot physicalSlot = future.get(1, TimeUnit.SECONDS);
assertTrue(future.isDone());
assertEquals(taskManagerLocation, physicalSlot.getTaskManagerLocation());
assertEquals(slotRequest.getAllocationId(), physicalSlot.getAllocationId());
}
}
@Nonnull
private SlotPoolImpl createSlotPoolImpl() {
return new TestingSlotPoolImpl(jobId);
}
@Test
public void testAllocationFulfilledByReturnedSlot() throws Exception {
final ArrayBlockingQueue<SlotRequest> slotRequestQueue = new ArrayBlockingQueue<>(2);
resourceManagerGateway.setRequestSlotConsumer(slotRequest -> {
while (!slotRequestQueue.offer(slotRequest)) {
}
});
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
SlotRequestId requestId1 = new SlotRequestId();
CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot(
slotPool,
requestId1
);
SlotRequestId requestId2 = new SlotRequestId();
CompletableFuture<PhysicalSlot> future2 = requestNewAllocatedSlot(
slotPool,
requestId2
);
assertFalse(future1.isDone());
assertFalse(future2.isDone());
final List<SlotRequest> slotRequests = new ArrayList<>(2);
for (int i = 0; i < 2; i++) {
slotRequests.add(slotRequestQueue.poll(timeout.toMilliseconds(), TimeUnit.MILLISECONDS));
}
final SlotOffer slotOffer = new SlotOffer(
slotRequests.get(0).getAllocationId(),
0,
DEFAULT_TESTING_PROFILE);
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS);
assertTrue(future1.isDone());
assertFalse(future2.isDone());
slotPool.releaseSlot(requestId1, null);
PhysicalSlot slot2 = future2.get(1, TimeUnit.SECONDS);
assertTrue(future2.isDone());
assertEquals(slot1, slot2);
}
}
@Test
public void testAllocateWithFreeSlot() throws Exception {
final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
SlotRequestId requestId1 = new SlotRequestId();
CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot(
slotPool,
requestId1
);
assertFalse(future1.isDone());
final SlotRequest slotRequest = slotRequestFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS);
final SlotOffer slotOffer = new SlotOffer(
slotRequest.getAllocationId(),
0,
DEFAULT_TESTING_PROFILE);
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS);
assertTrue(future1.isDone());
slotPool.releaseSlot(requestId1, null);
assertEquals(1, slotPool.getAvailableSlots().size());
assertEquals(0, slotPool.getAllocatedSlots().size());
Optional<PhysicalSlot> optional = slotPool.allocateAvailableSlot(
new SlotRequestId(),
slotRequest.getAllocationId()
);
assertTrue(optional.isPresent());
PhysicalSlot slot2 = optional.get();
assertEquals(slot1, slot2);
}
}
@Test
public void testOfferSlot() throws Exception {
final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
SlotRequestId requestId = new SlotRequestId();
CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot(
slotPool,
requestId
);
assertFalse(future.isDone());
final SlotRequest slotRequest = slotRequestFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS);
final SlotOffer slotOffer = new SlotOffer(
slotRequest.getAllocationId(),
0,
DEFAULT_TESTING_PROFILE);
final TaskManagerLocation invalidTaskManagerLocation = new LocalTaskManagerLocation();
assertFalse(slotPool.offerSlot(invalidTaskManagerLocation, taskManagerGateway, slotOffer));
final SlotOffer nonRequestedSlotOffer = new SlotOffer(
new AllocationID(),
0,
DEFAULT_TESTING_PROFILE);
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, nonRequestedSlotOffer));
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
PhysicalSlot slot = future.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS);
assertEquals(1, slotPool.getAvailableSlots().size());
assertEquals(1, slotPool.getAllocatedSlots().size());
assertEquals(taskManagerLocation, slot.getTaskManagerLocation());
assertEquals(nonRequestedSlotOffer.getAllocationId(), slot.getAllocationId());
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
assertEquals(1, slotPool.getAllocatedSlots().size());
assertEquals(nonRequestedSlotOffer.getAllocationId(), slot.getAllocationId());
final SlotOffer anotherSlotOfferWithSameAllocationId = new SlotOffer(
slotRequest.getAllocationId(),
1,
DEFAULT_TESTING_PROFILE);
assertFalse(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, anotherSlotOfferWithSameAllocationId));
TaskManagerLocation anotherTaskManagerLocation = new LocalTaskManagerLocation();
assertFalse(slotPool.offerSlot(anotherTaskManagerLocation, taskManagerGateway, slotOffer));
slotPool.releaseSlot(requestId, null);
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
assertFalse(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, anotherSlotOfferWithSameAllocationId));
assertFalse(slotPool.offerSlot(anotherTaskManagerLocation, taskManagerGateway, slotOffer));
}
}
@Test
public void testReleaseResource() throws Exception {
final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
SlotRequestId requestId1 = new SlotRequestId();
CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot(
slotPool,
requestId1
);
final SlotRequest slotRequest = slotRequestFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS);
CompletableFuture<PhysicalSlot> future2 = requestNewAllocatedSlot(
slotPool,
new SlotRequestId()
);
final SlotOffer slotOffer = new SlotOffer(
slotRequest.getAllocationId(),
0,
DEFAULT_TESTING_PROFILE);
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS);
assertTrue(future1.isDone());
assertFalse(future2.isDone());
final CompletableFuture<?> releaseFuture = new CompletableFuture<>();
SingleLogicalSlot logicalSlot = SingleLogicalSlot.allocateFromPhysicalSlot(
requestId1,
slot1,
Locality.UNKNOWN,
new DummySlotOwner(),
true
);
logicalSlot.tryAssignPayload(new DummyPayload(releaseFuture));
slotPool.releaseTaskManager(taskManagerLocation.getResourceID(), null);
releaseFuture.get(1, TimeUnit.SECONDS);
assertFalse(logicalSlot.isAlive());
Thread.sleep(10);
assertFalse(future2.isDone());
}
}
/**
* Tests that unused offered slots are directly used to fulfill pending slot
* requests.
*
* <p>Moreover it tests that the old slot request is canceled
*
* <p>See FLINK-8089, FLINK-8934
*/
@Test
public void testFulfillingSlotRequestsWithUnusedOfferedSlots() throws Exception {
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(2);
resourceManagerGateway.setRequestSlotConsumer(
(SlotRequest slotRequest) -> allocationIds.offer(slotRequest.getAllocationId()));
final ArrayBlockingQueue<AllocationID> canceledAllocations = new ArrayBlockingQueue<>(2);
resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::offer);
final SlotRequestId slotRequestId1 = new SlotRequestId();
final SlotRequestId slotRequestId2 = new SlotRequestId();
CompletableFuture<PhysicalSlot> slotFuture1 = requestNewAllocatedSlot(
slotPool,
slotRequestId1
);
final AllocationID allocationId1 = allocationIds.take();
CompletableFuture<PhysicalSlot> slotFuture2 = requestNewAllocatedSlot(
slotPool,
slotRequestId2
);
final AllocationID allocationId2 = allocationIds.take();
slotPool.releaseSlot(slotRequestId1, null);
try {
slotFuture1.get();
fail("The first slot future should have failed because it was cancelled.");
} catch (ExecutionException ee) {
assertTrue(ExceptionUtils.stripExecutionException(ee) instanceof FlinkException);
}
assertEquals(allocationId1, canceledAllocations.take());
final SlotOffer slotOffer = new SlotOffer(allocationId1, 0, ResourceProfile.ANY);
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
assertEquals(allocationId1, slotFuture2.get().getAllocationId());
assertEquals(allocationId2, canceledAllocations.take());
}
}
/**
* Tests that a SlotPoolImpl shutdown releases all registered slots.
*/
@Test
public void testShutdownReleasesAllSlots() throws Exception {
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
final int numSlotOffers = 2;
final Collection<SlotOffer> slotOffers = new ArrayList<>(numSlotOffers);
for (int i = 0; i < numSlotOffers; i++) {
slotOffers.add(
new SlotOffer(
new AllocationID(),
i,
ResourceProfile.ANY));
}
final ArrayBlockingQueue<AllocationID> freedSlotQueue = new ArrayBlockingQueue<>(numSlotOffers);
taskManagerGateway.setFreeSlotFunction(
(AllocationID allocationID, Throwable cause) -> {
try {
freedSlotQueue.put(allocationID);
return CompletableFuture.completedFuture(Acknowledge.get());
} catch (InterruptedException e) {
return FutureUtils.completedExceptionally(e);
}
});
final Collection<SlotOffer> acceptedSlotOffers = slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers);
assertThat(acceptedSlotOffers, Matchers.equalTo(slotOffers));
slotPool.close();
ArrayList<AllocationID> freedSlots = new ArrayList<>(numSlotOffers);
while (freedSlots.size() < numSlotOffers) {
freedSlotQueue.drainTo(freedSlots);
}
assertThat(freedSlots, Matchers.containsInAnyOrder(slotOffers.stream().map(SlotOffer::getAllocationId).toArray()));
}
}
@Test
public void testCheckIdleSlot() throws Exception {
final ManualClock clock = new ManualClock();
try (TestingSlotPoolImpl slotPool = createSlotPoolImpl(clock)) {
final BlockingQueue<AllocationID> freedSlots = new ArrayBlockingQueue<>(1);
taskManagerGateway.setFreeSlotFunction(
(AllocationID allocationId, Throwable cause) -> {
try {
freedSlots.put(allocationId);
return CompletableFuture.completedFuture(Acknowledge.get());
} catch (InterruptedException e) {
return FutureUtils.completedExceptionally(e);
}
});
setupSlotPool(slotPool, resourceManagerGateway, mainThreadExecutor);
final AllocationID expiredSlotID = new AllocationID();
final AllocationID freshSlotID = new AllocationID();
final SlotOffer slotToExpire = new SlotOffer(expiredSlotID, 0, ResourceProfile.ANY);
final SlotOffer slotToNotExpire = new SlotOffer(freshSlotID, 1, ResourceProfile.ANY);
assertThat(slotPool.registerTaskManager(taskManagerLocation.getResourceID()),
Matchers.is(true));
assertThat(
slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToExpire),
Matchers.is(true));
clock.advanceTime(timeout.toMilliseconds(), TimeUnit.MILLISECONDS);
assertThat(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToNotExpire),
Matchers.is(true));
clock.advanceTime(1L, TimeUnit.MILLISECONDS);
slotPool.triggerCheckIdleSlot();
final AllocationID freedSlot = freedSlots.poll(timeout.toMilliseconds(), TimeUnit.MILLISECONDS);
assertThat(freedSlot, Matchers.is(expiredSlotID));
assertThat(freedSlots.isEmpty(), Matchers.is(true));
}
}
@Nonnull
private TestingSlotPoolImpl createSlotPoolImpl(ManualClock clock) {
return new TestingSlotPoolImpl(
jobId,
clock,
TestingUtils.infiniteTime(),
timeout,
TestingUtils.infiniteTime());
}
/**
* Tests that idle slots which cannot be released will be discarded. See FLINK-11059.
*/
@Test
/**
* Tests that failed slots are freed on the {@link TaskExecutor}.
*/
@Test
public void testFreeFailedSlots() throws Exception {
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
final int parallelism = 5;
final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(parallelism);
resourceManagerGateway.setRequestSlotConsumer(
slotRequest -> allocationIds.offer(slotRequest.getAllocationId()));
final Map<SlotRequestId, CompletableFuture<PhysicalSlot>> slotRequestFutures = new HashMap<>(parallelism);
for (int i = 0; i < parallelism; i++) {
final SlotRequestId slotRequestId = new SlotRequestId();
slotRequestFutures.put(slotRequestId, requestNewAllocatedSlot(slotPool, slotRequestId));
}
final List<SlotOffer> slotOffers = new ArrayList<>(parallelism);
for (int i = 0; i < parallelism; i++) {
slotOffers.add(new SlotOffer(allocationIds.take(), i, ResourceProfile.ANY));
}
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers);
FutureUtils.waitForAll(slotRequestFutures.values()).get();
final ArrayBlockingQueue<AllocationID> freedSlots = new ArrayBlockingQueue<>(1);
taskManagerGateway.setFreeSlotFunction(
(allocationID, throwable) -> {
freedSlots.offer(allocationID);
return CompletableFuture.completedFuture(Acknowledge.get());
});
final FlinkException failException = new FlinkException("Test fail exception");
for (int i = 0; i < parallelism - 1; i++) {
final SlotOffer slotOffer = slotOffers.get(i);
Optional<ResourceID> emptyTaskExecutorFuture =
slotPool.failAllocation(slotOffer.getAllocationId(), failException);
assertThat(emptyTaskExecutorFuture.isPresent(), is(false));
assertThat(freedSlots.take(), is(equalTo(slotOffer.getAllocationId())));
}
final SlotOffer slotOffer = slotOffers.get(parallelism - 1);
final Optional<ResourceID> emptyTaskExecutorFuture = slotPool.failAllocation(
slotOffer.getAllocationId(),
failException);
assertTrue(emptyTaskExecutorFuture.isPresent());
assertThat(emptyTaskExecutorFuture.get(), is(equalTo(taskManagerLocation.getResourceID())));
assertThat(freedSlots.take(), is(equalTo(slotOffer.getAllocationId())));
}
}
/**
* Tests that create report of allocated slots on a {@link TaskExecutor}.
*/
@Test
public void testCreateAllocatedSlotReport() throws Exception {
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(1);
resourceManagerGateway.setRequestSlotConsumer(
slotRequest -> allocationIds.offer(slotRequest.getAllocationId()));
final SlotRequestId slotRequestId = new SlotRequestId();
final CompletableFuture<PhysicalSlot> slotRequestFuture = requestNewAllocatedSlot(
slotPool,
slotRequestId
);
final List<AllocatedSlotInfo> allocatedSlotInfos = new ArrayList<>(2);
final List<SlotOffer> slotOffers = new ArrayList<>(2);
final AllocationID allocatedId = allocationIds.take();
slotOffers.add(new SlotOffer(allocatedId, 0, ResourceProfile.ANY));
allocatedSlotInfos.add(new AllocatedSlotInfo(0, allocatedId));
final AllocationID availableId = new AllocationID();
slotOffers.add(new SlotOffer(availableId, 1, ResourceProfile.ANY));
allocatedSlotInfos.add(new AllocatedSlotInfo(1, availableId));
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers);
slotRequestFuture.get(1, TimeUnit.SECONDS);
final AllocatedSlotReport slotReport = slotPool.createAllocatedSlotReport(taskManagerLocation.getResourceID());
assertThat(jobId, is(slotReport.getJobId()));
assertThat(slotReport.getAllocatedSlotInfos(), containsInAnyOrder(isEachEqual(allocatedSlotInfos)));
}
}
@Test
public void testCalculationOfTaskExecutorUtilization() throws Exception {
try (final SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
final TaskManagerLocation firstTaskManagerLocation = new LocalTaskManagerLocation();
final TaskManagerLocation secondTaskManagerLocation = new LocalTaskManagerLocation();
final List<AllocationID> firstTaskManagersSlots = registerAndOfferSlots(firstTaskManagerLocation, slotPool, 4);
final List<AllocationID> secondTaskManagersSlots = registerAndOfferSlots(secondTaskManagerLocation, slotPool, 4);
slotPool.allocateAvailableSlot(new SlotRequestId(), firstTaskManagersSlots.get(0));
slotPool.allocateAvailableSlot(new SlotRequestId(), firstTaskManagersSlots.get(1));
slotPool.allocateAvailableSlot(new SlotRequestId(), secondTaskManagersSlots.get(3));
final Collection<SlotInfoWithUtilization> availableSlotsInformation = slotPool.getAvailableSlotsInformation();
final Map<TaskManagerLocation, Double> utilizationPerTaskExecutor = ImmutableMap.of(
firstTaskManagerLocation, 2.0 / 4,
secondTaskManagerLocation, 1.0 / 4);
for (SlotInfoWithUtilization slotInfoWithUtilization : availableSlotsInformation) {
final double expectedTaskExecutorUtilization = utilizationPerTaskExecutor.get(slotInfoWithUtilization.getTaskManagerLocation());
assertThat(slotInfoWithUtilization.getTaskExecutorUtilization(), is(closeTo(expectedTaskExecutorUtilization, 0.1)));
}
}
}
@Test
public void testOrphanedAllocationCanBeRemapped() throws Exception {
final List<AllocationID> allocationIds = new ArrayList<>();
resourceManagerGateway.setRequestSlotConsumer(
slotRequest -> allocationIds.add(slotRequest.getAllocationId()));
final List<AllocationID> canceledAllocations = new ArrayList<>();
resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::add);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
final SlotRequestId slotRequestId1 = new SlotRequestId();
final SlotRequestId slotRequestId2 = new SlotRequestId();
requestNewAllocatedSlots(slotPool, slotRequestId1, slotRequestId2);
final AllocationID allocationId1 = allocationIds.get(0);
final AllocationID allocationId2 = allocationIds.get(1);
offerSlot(slotPool, allocationId2);
assertThat(slotPool.getPendingRequests().values(), hasSize(1));
assertThat(slotPool.getPendingRequests().containsKeyA(slotRequestId2), is(true));
assertThat(slotPool.getPendingRequests().containsKeyB(allocationId1), is(true));
assertThat(canceledAllocations, hasSize(0));
}
}
@Test
public void testOrphanedAllocationIsCanceledIfNotRemapped() throws Exception {
final List<AllocationID> allocationIds = new ArrayList<>();
resourceManagerGateway.setRequestSlotConsumer(
slotRequest -> allocationIds.add(slotRequest.getAllocationId()));
final List<AllocationID> canceledAllocations = new ArrayList<>();
resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::add);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) {
final SlotRequestId slotRequestId1 = new SlotRequestId();
final SlotRequestId slotRequestId2 = new SlotRequestId();
requestNewAllocatedSlots(slotPool, slotRequestId1, slotRequestId2);
final AllocationID allocationId1 = allocationIds.get(0);
final AllocationID allocationId2 = allocationIds.get(1);
AllocationID randomAllocationId;
do {
randomAllocationId = new AllocationID();
} while (randomAllocationId.equals(allocationId1) || randomAllocationId.equals(allocationId2));
offerSlot(slotPool, randomAllocationId);
assertThat(slotPool.getPendingRequests().values(), hasSize(1));
assertThat(canceledAllocations, contains(allocationId1));
}
}
/**
* In this case a slot is offered to the SlotPoolImpl before the ResourceManager is connected.
* It can happen in production if a TaskExecutor is reconnected to a restarted JobMaster.
*/
@Test
public void testSlotsOfferedWithoutResourceManagerConnected() throws Exception {
try (SlotPoolImpl slotPool = createSlotPoolImpl()) {
slotPool.start(JobMasterId.generate(), "mock-address", mainThreadExecutor);
final SlotRequestId slotRequestId = new SlotRequestId();
final CompletableFuture<PhysicalSlot> slotFuture = requestNewAllocatedSlot(slotPool, slotRequestId);
assertThat(slotPool.getWaitingForResourceManager().values(), hasSize(1));
final AllocationID allocationId = new AllocationID();
offerSlot(slotPool, allocationId);
assertThat(slotPool.getWaitingForResourceManager().values(), hasSize(0));
assertThat(slotFuture.isDone(), is(true));
assertThat(slotFuture.isCompletedExceptionally(), is(false));
assertThat(slotFuture.getNow(null).getAllocationId(), is(allocationId));
}
}
private void requestNewAllocatedSlots(final SlotPool slotPool, final SlotRequestId... slotRequestIds) {
for (SlotRequestId slotRequestId : slotRequestIds) {
requestNewAllocatedSlot(slotPool, slotRequestId);
}
}
private CompletableFuture<PhysicalSlot> requestNewAllocatedSlot(
final SlotPool slotPool,
final SlotRequestId slotRequestId) {
return slotPool.requestNewAllocatedSlot(slotRequestId, ResourceProfile.UNKNOWN, timeout);
}
private void offerSlot(final SlotPoolImpl slotPool, final AllocationID allocationId) {
final SlotOffer slotOffer = new SlotOffer(allocationId, 0, ResourceProfile.ANY);
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer);
}
private List<AllocationID> registerAndOfferSlots(TaskManagerLocation taskManagerLocation, SlotPoolImpl slotPool, int numberOfSlotsToRegister) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
final List<AllocationID> allocationIds = IntStream.range(0, numberOfSlotsToRegister)
.mapToObj(ignored -> new AllocationID())
.collect(Collectors.toList());
Collection<SlotOffer> slotOffers = IntStream.range(0, numberOfSlotsToRegister)
.mapToObj(index -> new SlotOffer(allocationIds.get(index), index, ResourceProfile.ANY))
.collect(Collectors.toList());
slotPool.offerSlots(
taskManagerLocation,
new SimpleAckingTaskManagerGateway(),
slotOffers);
return allocationIds;
}
private static Collection<Matcher<? super AllocatedSlotInfo>> isEachEqual(Collection<AllocatedSlotInfo> allocatedSlotInfos) {
return allocatedSlotInfos
.stream()
.map(SlotPoolImplTest::isEqualAllocatedSlotInfo)
.collect(Collectors.toList());
}
private static Matcher<AllocatedSlotInfo> isEqualAllocatedSlotInfo(AllocatedSlotInfo expectedAllocatedSlotInfo) {
return new TypeSafeDiagnosingMatcher<AllocatedSlotInfo>() {
@Override
public void describeTo(Description description) {
description.appendText(describeAllocatedSlotInformation(expectedAllocatedSlotInfo));
}
private String describeAllocatedSlotInformation(AllocatedSlotInfo expectedAllocatedSlotInformation) {
return expectedAllocatedSlotInformation.toString();
}
@Override
protected boolean matchesSafely(AllocatedSlotInfo item, Description mismatchDescription) {
final boolean matches = item.getAllocationId().equals(expectedAllocatedSlotInfo.getAllocationId()) &&
item.getSlotIndex() == expectedAllocatedSlotInfo.getSlotIndex();
if (!matches) {
mismatchDescription
.appendText("Actual value ")
.appendText(describeAllocatedSlotInformation(item))
.appendText(" differs from expected value ")
.appendText(describeAllocatedSlotInformation(expectedAllocatedSlotInfo));
}
return matches;
}
};
}
private SlotPoolImpl createAndSetUpSlotPool() throws Exception {
final SlotPoolImpl slotPool = createSlotPoolImpl();
setupSlotPool(slotPool, resourceManagerGateway, mainThreadExecutor);
return slotPool;
}
private static void setupSlotPool(
SlotPoolImpl slotPool,
ResourceManagerGateway resourceManagerGateway,
ComponentMainThreadExecutor mainThreadExecutable) throws Exception {
final String jobManagerAddress = "foobar";
slotPool.start(JobMasterId.generate(), jobManagerAddress, mainThreadExecutable);
slotPool.connectToResourceManager(resourceManagerGateway);
}
}
|
class SlotPoolImplTest extends TestLogger {
private static final Time TIMEOUT = SlotPoolUtils.TIMEOUT;
private TaskManagerLocation taskManagerLocation;
private SimpleAckingTaskManagerGateway taskManagerGateway;
private TestingResourceManagerGateway resourceManagerGateway;
private static final ComponentMainThreadExecutor mainThreadExecutor =
ComponentMainThreadExecutorServiceAdapter.forMainThread();
@Before
public void setUp() throws Exception {
taskManagerLocation = new LocalTaskManagerLocation();
taskManagerGateway = new SimpleAckingTaskManagerGateway();
resourceManagerGateway = new TestingResourceManagerGateway();
}
@Test
public void testAllocateSimpleSlot() throws Exception {
CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
final SlotRequestId requestId = new SlotRequestId();
final CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot(slotPool, requestId);
assertFalse(future.isDone());
final SlotRequest slotRequest = slotRequestFuture.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS);
assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, slotRequest.getAllocationId()));
final PhysicalSlot physicalSlot = future.get(1, TimeUnit.SECONDS);
assertTrue(future.isDone());
assertEquals(taskManagerLocation, physicalSlot.getTaskManagerLocation());
assertEquals(slotRequest.getAllocationId(), physicalSlot.getAllocationId());
}
}
@Test
public void testAllocationFulfilledByReturnedSlot() throws Exception {
final ArrayBlockingQueue<SlotRequest> slotRequestQueue = new ArrayBlockingQueue<>(2);
resourceManagerGateway.setRequestSlotConsumer(slotRequest -> {
while (!slotRequestQueue.offer(slotRequest)) {
}
});
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
final SlotRequestId requestId1 = new SlotRequestId();
final CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot(
slotPool,
requestId1);
final SlotRequestId requestId2 = new SlotRequestId();
final CompletableFuture<PhysicalSlot> future2 = requestNewAllocatedSlot(
slotPool,
requestId2);
assertFalse(future1.isDone());
assertFalse(future2.isDone());
final List<SlotRequest> slotRequests = new ArrayList<>(2);
for (int i = 0; i < 2; i++) {
slotRequests.add(slotRequestQueue.poll(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS));
}
assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, slotRequests.get(0).getAllocationId()));
final PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS);
assertTrue(future1.isDone());
assertFalse(future2.isDone());
slotPool.releaseSlot(requestId1, null);
final PhysicalSlot slot2 = future2.get(1, TimeUnit.SECONDS);
assertTrue(future2.isDone());
assertEquals(slot1, slot2);
}
}
@Test
public void testAllocateWithFreeSlot() throws Exception {
final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
final AllocationID allocationId = new AllocationID();
assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, allocationId));
assertEquals(1, slotPool.getAvailableSlots().size());
assertEquals(0, slotPool.getAllocatedSlots().size());
Optional<PhysicalSlot> physicalSlot = slotPool.allocateAvailableSlot(
new SlotRequestId(),
allocationId);
assertTrue(physicalSlot.isPresent());
assertEquals(0, slotPool.getAvailableSlots().size());
assertEquals(1, slotPool.getAllocatedSlots().size());
}
}
@Test
public void testOfferSlot() throws Exception {
final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
final SlotRequestId requestId = new SlotRequestId();
final CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot(
slotPool,
requestId);
assertFalse(future.isDone());
final SlotRequest slotRequest = slotRequestFuture.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS);
final SlotOffer slotOffer = new SlotOffer(
slotRequest.getAllocationId(),
0,
ResourceProfile.ANY);
final TaskManagerLocation invalidTaskManagerLocation = new LocalTaskManagerLocation();
assertFalse(slotPool.offerSlot(invalidTaskManagerLocation, taskManagerGateway, slotOffer));
final SlotOffer nonRequestedSlotOffer = new SlotOffer(
new AllocationID(),
0,
ResourceProfile.ANY);
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, nonRequestedSlotOffer));
assertEquals(1, slotPool.getAllocatedSlots().size());
final PhysicalSlot slot = future.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS);
assertEquals(taskManagerLocation, slot.getTaskManagerLocation());
assertEquals(nonRequestedSlotOffer.getAllocationId(), slot.getAllocationId());
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
assertEquals(1, slotPool.getAvailableSlots().size());
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
assertEquals(1, slotPool.getAvailableSlots().size());
assertEquals(1, slotPool.getAllocatedSlots().size());
final SlotOffer anotherSlotOfferWithSameAllocationId = new SlotOffer(
slotRequest.getAllocationId(),
1,
ResourceProfile.ANY);
assertFalse(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, anotherSlotOfferWithSameAllocationId));
TaskManagerLocation anotherTaskManagerLocation = new LocalTaskManagerLocation();
assertFalse(slotPool.offerSlot(anotherTaskManagerLocation, taskManagerGateway, slotOffer));
slotPool.releaseSlot(requestId, null);
assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer));
assertFalse(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, anotherSlotOfferWithSameAllocationId));
assertFalse(slotPool.offerSlot(anotherTaskManagerLocation, taskManagerGateway, slotOffer));
}
}
@Test
public void testReleaseResource() throws Exception {
final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
final SlotRequestId requestId1 = new SlotRequestId();
final CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot(
slotPool,
requestId1);
final SlotRequest slotRequest = slotRequestFuture.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS);
final CompletableFuture<PhysicalSlot> future2 = requestNewAllocatedSlot(
slotPool,
new SlotRequestId());
assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, slotRequest.getAllocationId()));
final PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS);
assertTrue(future1.isDone());
assertFalse(future2.isDone());
final CompletableFuture<?> releaseFuture = new CompletableFuture<>();
final SingleLogicalSlot logicalSlot = SingleLogicalSlot.allocateFromPhysicalSlot(
requestId1,
slot1,
Locality.UNKNOWN,
new DummySlotOwner(),
true);
logicalSlot.tryAssignPayload(new DummyPayload(releaseFuture));
slotPool.releaseTaskManager(taskManagerLocation.getResourceID(), null);
releaseFuture.get();
assertFalse(logicalSlot.isAlive());
Thread.sleep(10);
assertFalse(future2.isDone());
}
}
/**
* Tests that unused offered slots are directly used to fulfill pending slot
* requests.
*
* <p>Moreover it tests that the old slot request is canceled
*
* <p>See FLINK-8089, FLINK-8934
*/
@Test
public void testFulfillingSlotRequestsWithUnusedOfferedSlots() throws Exception {
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(2);
resourceManagerGateway.setRequestSlotConsumer(
(SlotRequest slotRequest) -> allocationIds.offer(slotRequest.getAllocationId()));
final ArrayBlockingQueue<AllocationID> canceledAllocations = new ArrayBlockingQueue<>(2);
resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::offer);
final SlotRequestId slotRequestId1 = new SlotRequestId();
final SlotRequestId slotRequestId2 = new SlotRequestId();
final CompletableFuture<PhysicalSlot> slotFuture1 = requestNewAllocatedSlot(
slotPool,
slotRequestId1);
final AllocationID allocationId1 = allocationIds.take();
final CompletableFuture<PhysicalSlot> slotFuture2 = requestNewAllocatedSlot(
slotPool,
slotRequestId2);
final AllocationID allocationId2 = allocationIds.take();
slotPool.releaseSlot(slotRequestId1, null);
try {
slotFuture1.get();
fail("The first slot future should have failed because it was cancelled.");
} catch (ExecutionException ee) {
assertTrue(ExceptionUtils.stripExecutionException(ee) instanceof FlinkException);
}
assertEquals(allocationId1, canceledAllocations.take());
assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, allocationId1));
assertEquals(allocationId1, slotFuture2.get().getAllocationId());
assertEquals(allocationId2, canceledAllocations.take());
}
}
/**
* Tests that a SlotPoolImpl shutdown releases all registered slots.
*/
@Test
public void testShutdownReleasesAllSlots() throws Exception {
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
final int numSlotOffers = 2;
final Collection<SlotOffer> slotOffers = new ArrayList<>(numSlotOffers);
for (int i = 0; i < numSlotOffers; i++) {
slotOffers.add(
new SlotOffer(
new AllocationID(),
i,
ResourceProfile.ANY));
}
final ArrayBlockingQueue<AllocationID> freedSlotQueue = new ArrayBlockingQueue<>(numSlotOffers);
taskManagerGateway.setFreeSlotFunction(
(AllocationID allocationID, Throwable cause) -> {
try {
freedSlotQueue.put(allocationID);
return CompletableFuture.completedFuture(Acknowledge.get());
} catch (InterruptedException e) {
return FutureUtils.completedExceptionally(e);
}
});
final Collection<SlotOffer> acceptedSlotOffers = slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers);
assertThat(acceptedSlotOffers, Matchers.equalTo(slotOffers));
slotPool.close();
ArrayList<AllocationID> freedSlots = new ArrayList<>(numSlotOffers);
while (freedSlots.size() < numSlotOffers) {
freedSlotQueue.drainTo(freedSlots);
}
assertThat(freedSlots, Matchers.containsInAnyOrder(slotOffers.stream().map(SlotOffer::getAllocationId).toArray()));
}
}
@Test
public void testCheckIdleSlot() throws Exception {
final ManualClock clock = new ManualClock();
try (TestingSlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway, clock, TIMEOUT)) {
final BlockingQueue<AllocationID> freedSlots = new ArrayBlockingQueue<>(1);
taskManagerGateway.setFreeSlotFunction(
(AllocationID allocationId, Throwable cause) -> {
try {
freedSlots.put(allocationId);
return CompletableFuture.completedFuture(Acknowledge.get());
} catch (InterruptedException e) {
return FutureUtils.completedExceptionally(e);
}
});
final AllocationID expiredSlotID = new AllocationID();
final AllocationID freshSlotID = new AllocationID();
final SlotOffer slotToExpire = new SlotOffer(expiredSlotID, 0, ResourceProfile.ANY);
final SlotOffer slotToNotExpire = new SlotOffer(freshSlotID, 1, ResourceProfile.ANY);
assertThat(slotPool.registerTaskManager(taskManagerLocation.getResourceID()),
Matchers.is(true));
assertThat(
slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToExpire),
Matchers.is(true));
clock.advanceTime(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS);
assertThat(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToNotExpire),
Matchers.is(true));
clock.advanceTime(1L, TimeUnit.MILLISECONDS);
slotPool.triggerCheckIdleSlot();
final AllocationID freedSlot = freedSlots.poll(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS);
assertThat(freedSlot, Matchers.is(expiredSlotID));
assertThat(freedSlots.isEmpty(), Matchers.is(true));
}
}
/**
* Tests that idle slots which cannot be released will be discarded. See FLINK-11059.
*/
@Test
/**
* Tests that failed slots are freed on the {@link TaskExecutor}.
*/
@Test
public void testFreeFailedSlots() throws Exception {
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
final int parallelism = 5;
final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(parallelism);
resourceManagerGateway.setRequestSlotConsumer(
slotRequest -> allocationIds.offer(slotRequest.getAllocationId()));
final Map<SlotRequestId, CompletableFuture<PhysicalSlot>> slotRequestFutures = new HashMap<>(parallelism);
for (int i = 0; i < parallelism; i++) {
final SlotRequestId slotRequestId = new SlotRequestId();
slotRequestFutures.put(slotRequestId, requestNewAllocatedSlot(slotPool, slotRequestId));
}
final List<SlotOffer> slotOffers = new ArrayList<>(parallelism);
for (int i = 0; i < parallelism; i++) {
slotOffers.add(new SlotOffer(allocationIds.take(), i, ResourceProfile.ANY));
}
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers);
FutureUtils.waitForAll(slotRequestFutures.values()).get();
final ArrayBlockingQueue<AllocationID> freedSlots = new ArrayBlockingQueue<>(1);
taskManagerGateway.setFreeSlotFunction(
(allocationID, throwable) -> {
freedSlots.offer(allocationID);
return CompletableFuture.completedFuture(Acknowledge.get());
});
final FlinkException failException = new FlinkException("Test fail exception");
for (int i = 0; i < parallelism - 1; i++) {
final SlotOffer slotOffer = slotOffers.get(i);
Optional<ResourceID> emptyTaskExecutorFuture =
slotPool.failAllocation(slotOffer.getAllocationId(), failException);
assertThat(emptyTaskExecutorFuture.isPresent(), is(false));
assertThat(freedSlots.take(), is(equalTo(slotOffer.getAllocationId())));
}
final SlotOffer slotOffer = slotOffers.get(parallelism - 1);
final Optional<ResourceID> emptyTaskExecutorFuture = slotPool.failAllocation(
slotOffer.getAllocationId(),
failException);
assertThat(emptyTaskExecutorFuture.get(), is(equalTo(taskManagerLocation.getResourceID())));
assertThat(freedSlots.take(), is(equalTo(slotOffer.getAllocationId())));
}
}
/**
* Tests that create report of allocated slots on a {@link TaskExecutor}.
*/
@Test
public void testCreateAllocatedSlotReport() throws Exception {
final JobID jobId = new JobID();
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway, jobId)) {
final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(1);
resourceManagerGateway.setRequestSlotConsumer(
slotRequest -> allocationIds.offer(slotRequest.getAllocationId()));
final CompletableFuture<PhysicalSlot> slotRequestFuture = requestNewAllocatedSlot(
slotPool,
new SlotRequestId());
final List<AllocatedSlotInfo> allocatedSlotInfos = new ArrayList<>(2);
final List<SlotOffer> slotOffers = new ArrayList<>(2);
final AllocationID allocatedId = allocationIds.take();
slotOffers.add(new SlotOffer(allocatedId, 0, ResourceProfile.ANY));
allocatedSlotInfos.add(new AllocatedSlotInfo(0, allocatedId));
final AllocationID availableId = new AllocationID();
slotOffers.add(new SlotOffer(availableId, 1, ResourceProfile.ANY));
allocatedSlotInfos.add(new AllocatedSlotInfo(1, availableId));
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers);
slotRequestFuture.get();
final AllocatedSlotReport slotReport = slotPool.createAllocatedSlotReport(taskManagerLocation.getResourceID());
assertThat(jobId, is(slotReport.getJobId()));
assertThat(slotReport.getAllocatedSlotInfos(), containsInAnyOrder(isEachEqual(allocatedSlotInfos)));
}
}
@Test
public void testCalculationOfTaskExecutorUtilization() throws Exception {
try (final SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
final TaskManagerLocation firstTaskManagerLocation = new LocalTaskManagerLocation();
final TaskManagerLocation secondTaskManagerLocation = new LocalTaskManagerLocation();
final List<AllocationID> firstTaskManagersSlots = registerAndOfferSlots(firstTaskManagerLocation, slotPool, 4);
final List<AllocationID> secondTaskManagersSlots = registerAndOfferSlots(secondTaskManagerLocation, slotPool, 4);
slotPool.allocateAvailableSlot(new SlotRequestId(), firstTaskManagersSlots.get(0));
slotPool.allocateAvailableSlot(new SlotRequestId(), firstTaskManagersSlots.get(1));
slotPool.allocateAvailableSlot(new SlotRequestId(), secondTaskManagersSlots.get(3));
final Collection<SlotInfoWithUtilization> availableSlotsInformation = slotPool.getAvailableSlotsInformation();
final Map<TaskManagerLocation, Double> utilizationPerTaskExecutor = ImmutableMap.of(
firstTaskManagerLocation, 2.0 / 4,
secondTaskManagerLocation, 1.0 / 4);
for (SlotInfoWithUtilization slotInfoWithUtilization : availableSlotsInformation) {
final double expectedTaskExecutorUtilization = utilizationPerTaskExecutor.get(slotInfoWithUtilization.getTaskManagerLocation());
assertThat(slotInfoWithUtilization.getTaskExecutorUtilization(), is(closeTo(expectedTaskExecutorUtilization, 0.1)));
}
}
}
@Test
public void testOrphanedAllocationCanBeRemapped() throws Exception {
final List<AllocationID> allocationIds = new ArrayList<>();
resourceManagerGateway.setRequestSlotConsumer(
slotRequest -> allocationIds.add(slotRequest.getAllocationId()));
final List<AllocationID> canceledAllocations = new ArrayList<>();
resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::add);
try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) {
final SlotRequestId slotRequestId1 = new SlotRequestId();
final SlotRequestId slotRequestId2 = new SlotRequestId();
requestNewAllocatedSlots(slotPool, slotRequestId1, slotRequestId2);
final AllocationID allocationId1 = allocationIds.get(0);
final AllocationID allocationId2 = allocationIds.get(1);
registerAndOfferSlot(taskManagerLocation, slotPool, allocationId2);
assertThat(slotPool.getPendingRequests().values(), hasSize(1));
assertThat(slotPool.getPendingRequests().containsKeyA(slotRequestId2), is(true));
assertThat(slotPool.getPendingRequests().containsKeyB(allocationId1), is(true));
assertThat(canceledAllocations, hasSize(0));
}
}
@Test
public void testOrphanedAllocationIsCanceledIfNotRemapped() throws Exception {
final List<AllocationID> allocationIds = new ArrayList<>();
resourceManagerGateway.setRequestSlotConsumer(
slotRequest -> allocationIds.add(slotRequest.getAllocationId()));
final List<AllocationID> canceledAllocations = new ArrayList<>();
resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::add);
try (SlotPoolImpl slotPool = SlotPoolUtils.createAndSetUpSlotPool(resourceManagerGateway)) {
final SlotRequestId slotRequestId1 = new SlotRequestId();
final SlotRequestId slotRequestId2 = new SlotRequestId();
requestNewAllocatedSlots(slotPool, slotRequestId1, slotRequestId2);
final AllocationID allocationId1 = allocationIds.get(0);
final AllocationID allocationId2 = allocationIds.get(1);
AllocationID randomAllocationId;
do {
randomAllocationId = new AllocationID();
} while (randomAllocationId.equals(allocationId1) || randomAllocationId.equals(allocationId2));
registerAndOfferSlot(taskManagerLocation, slotPool, randomAllocationId);
assertThat(slotPool.getPendingRequests().values(), hasSize(1));
assertThat(canceledAllocations, contains(allocationId1));
}
}
/**
* In this case a slot is offered to the SlotPoolImpl before the ResourceManager is connected.
* It can happen in production if a TaskExecutor is reconnected to a restarted JobMaster.
*/
@Test
public void testSlotsOfferedWithoutResourceManagerConnected() throws Exception {
try (SlotPoolImpl slotPool = new TestingSlotPoolImpl(new JobID())) {
slotPool.start(JobMasterId.generate(), "mock-address", mainThreadExecutor);
final SlotRequestId slotRequestId = new SlotRequestId();
final CompletableFuture<PhysicalSlot> slotFuture = requestNewAllocatedSlot(slotPool, slotRequestId);
assertThat(slotPool.getWaitingForResourceManager().values(), hasSize(1));
final AllocationID allocationId = new AllocationID();
registerAndOfferSlot(taskManagerLocation, slotPool, allocationId);
assertThat(slotPool.getWaitingForResourceManager().values(), hasSize(0));
assertThat(slotFuture.isDone(), is(true));
assertThat(slotFuture.isCompletedExceptionally(), is(false));
assertThat(slotFuture.getNow(null).getAllocationId(), is(allocationId));
}
}
private static TestingSlotPoolImpl createAndSetUpSlotPool(
final ResourceManagerGateway resourceManagerGateway) throws Exception {
return new SlotPoolBuilder(mainThreadExecutor).setResourceManagerGateway(resourceManagerGateway).build();
}
private static TestingSlotPoolImpl createAndSetUpSlotPool(
final ResourceManagerGateway resourceManagerGateway,
final JobID jobId) throws Exception {
return new SlotPoolBuilder(mainThreadExecutor)
.setResourceManagerGateway(resourceManagerGateway)
.setJobId(jobId)
.build();
}
private static TestingSlotPoolImpl createAndSetUpSlotPool(
final ResourceManagerGateway resourceManagerGateway,
final Clock clock,
final Time idleSlotTimeout) throws Exception {
return new SlotPoolBuilder(mainThreadExecutor)
.setResourceManagerGateway(resourceManagerGateway)
.setClock(clock)
.setIdleSlotTimeout(idleSlotTimeout)
.build();
}
private boolean registerAndOfferSlot(
final TaskManagerLocation taskManagerLocation,
final SlotPoolImpl slotPool,
final AllocationID allocationId) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
final SlotOffer slotOffer = new SlotOffer(allocationId, 0, ResourceProfile.ANY);
return slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer);
}
private List<AllocationID> registerAndOfferSlots(TaskManagerLocation taskManagerLocation, SlotPoolImpl slotPool, int numberOfSlotsToRegister) {
slotPool.registerTaskManager(taskManagerLocation.getResourceID());
final List<AllocationID> allocationIds = IntStream.range(0, numberOfSlotsToRegister)
.mapToObj(ignored -> new AllocationID())
.collect(Collectors.toList());
Collection<SlotOffer> slotOffers = IntStream.range(0, numberOfSlotsToRegister)
.mapToObj(index -> new SlotOffer(allocationIds.get(index), index, ResourceProfile.ANY))
.collect(Collectors.toList());
slotPool.offerSlots(
taskManagerLocation,
new SimpleAckingTaskManagerGateway(),
slotOffers);
return allocationIds;
}
private static Collection<Matcher<? super AllocatedSlotInfo>> isEachEqual(Collection<AllocatedSlotInfo> allocatedSlotInfos) {
return allocatedSlotInfos
.stream()
.map(SlotPoolImplTest::isEqualAllocatedSlotInfo)
.collect(Collectors.toList());
}
private static Matcher<AllocatedSlotInfo> isEqualAllocatedSlotInfo(AllocatedSlotInfo expectedAllocatedSlotInfo) {
return new TypeSafeDiagnosingMatcher<AllocatedSlotInfo>() {
@Override
public void describeTo(Description description) {
description.appendText(describeAllocatedSlotInformation(expectedAllocatedSlotInfo));
}
private String describeAllocatedSlotInformation(AllocatedSlotInfo expectedAllocatedSlotInformation) {
return expectedAllocatedSlotInformation.toString();
}
@Override
protected boolean matchesSafely(AllocatedSlotInfo item, Description mismatchDescription) {
final boolean matches = item.getAllocationId().equals(expectedAllocatedSlotInfo.getAllocationId()) &&
item.getSlotIndex() == expectedAllocatedSlotInfo.getSlotIndex();
if (!matches) {
mismatchDescription
.appendText("Actual value ")
.appendText(describeAllocatedSlotInformation(item))
.appendText(" differs from expected value ")
.appendText(describeAllocatedSlotInformation(expectedAllocatedSlotInfo));
}
return matches;
}
};
}
}
|
Ok, It's about ones' design thinking, you can keep your style.
|
public static List<SqlNode> convertOrderByItems(final Collection<OrderByItemSegment> orderByItems) {
List<SqlNode> sqlNodes = Lists.newArrayList();
for (OrderByItemSegment orderByItemSegment : orderByItems) {
Optional<SqlNode> optional = Optional.empty();
if (orderByItemSegment instanceof ColumnOrderByItemSegment) {
optional = new ColumnOrderByItemSqlNodeConverter().convert((ColumnOrderByItemSegment) orderByItemSegment);
} else if (orderByItemSegment instanceof ExpressionOrderByItemSegment) {
throw new UnsupportedOperationException("unsupported ExpressionOrderByItemSegment");
} else if (orderByItemSegment instanceof IndexOrderByItemSegment) {
throw new UnsupportedOperationException("unsupported IndexOrderByItemSegment");
} else if (orderByItemSegment instanceof TextOrderByItemSegment) {
throw new UnsupportedOperationException("unsupported TextOrderByItemSegment");
}
if (optional.isPresent()) {
sqlNodes.add(optional.get());
}
}
return sqlNodes;
}
|
List<SqlNode> sqlNodes = Lists.newArrayList();
|
public static List<SqlNode> convertOrderByItems(final Collection<OrderByItemSegment> orderByItems) {
List<SqlNode> sqlNodes = Lists.newArrayList();
for (OrderByItemSegment orderByItemSegment : orderByItems) {
Optional<SqlNode> optional = Optional.empty();
if (orderByItemSegment instanceof ColumnOrderByItemSegment) {
optional = new ColumnOrderByItemSqlNodeConverter().convert((ColumnOrderByItemSegment) orderByItemSegment);
} else if (orderByItemSegment instanceof ExpressionOrderByItemSegment) {
throw new UnsupportedOperationException("unsupported ExpressionOrderByItemSegment");
} else if (orderByItemSegment instanceof IndexOrderByItemSegment) {
throw new UnsupportedOperationException("unsupported IndexOrderByItemSegment");
} else if (orderByItemSegment instanceof TextOrderByItemSegment) {
throw new UnsupportedOperationException("unsupported TextOrderByItemSegment");
}
if (optional.isPresent()) {
sqlNodes.add(optional.get());
}
}
return sqlNodes;
}
|
class SqlNodeConverterUtil {
/**
* Convert order by items.
* @param orderByItems order by item list.
* @return a collection of order by item <code>SqlNode</code>
*/
}
|
class SqlNodeConverterUtil {
/**
* Convert order by items.
* @param orderByItems order by item list.
* @return a collection of order by item <code>SqlNode</code>
*/
}
|
Nope. The mailbox executes the writer.
|
public void write(IN element, Context context) throws IOException, InterruptedException {
sinkContextAdapter.updateTimestamp(context);
RawMessage<byte[]> message = serializationSchema.serialize(element, sinkContextAdapter);
List<String> availableTopics = metadataListener.availableTopics();
String topic = topicRouter.route(element, message, availableTopics, sinkContextAdapter);
TypedMessageBuilder<?> builder = createMessageBuilder(topic, deliveryGuarantee);
if (sinkConfiguration.isEnableSchemaEvolution()) {
((TypedMessageBuilder<IN>) builder).value(element);
} else {
((TypedMessageBuilder<byte[]>) builder).value(message.getValue());
}
message.supplement(builder);
if (deliveryGuarantee == DeliveryGuarantee.NONE) {
builder.sendAsync();
} else if (deliveryGuarantee == DeliveryGuarantee.AT_LEAST_ONCE) {
pendingMessages.acquire();
CompletableFuture<MessageId> sender = builder.sendAsync();
sender.whenComplete(
(id, ex) -> {
pendingMessages.release();
if (ex != null) {
mailboxExecutor.execute(
() -> {
throw new FlinkRuntimeException(
"Failed to send data to Pulsar " + topic, ex);
},
"Failed to send message to Pulsar");
} else {
LOG.debug("Sent message to Pulsar {} with message id {}", topic, id);
}
});
}
}
|
}
|
public void write(IN element, Context context) throws IOException, InterruptedException {
PulsarMessage<?> message = serializationSchema.serialize(element, sinkContext);
String key = message.getKey();
List<String> availableTopics = metadataListener.availableTopics();
String topic = topicRouter.route(element, key, availableTopics, sinkContext);
TypedMessageBuilder<?> builder = createMessageBuilder(topic, context, message);
long deliverAt = messageDelayer.deliverAt(element, sinkContext);
if (deliverAt > 0) {
builder.deliverAt(deliverAt);
}
if (deliveryGuarantee == DeliveryGuarantee.NONE) {
builder.sendAsync();
} else {
requirePermits();
mailboxExecutor.execute(
() -> enqueueMessageSending(topic, builder),
"Failed to send message to Pulsar");
}
}
|
class PulsarWriter<IN> implements SinkWriter<IN, PulsarCommittable, Void> {
private static final Logger LOG = LoggerFactory.getLogger(PulsarWriter.class);
private final SinkConfiguration sinkConfiguration;
private final DeliveryGuarantee deliveryGuarantee;
private final PulsarSerializationSchema<IN> serializationSchema;
private final TopicRouter<IN> topicRouter;
private final PulsarSinkContextAdapter sinkContextAdapter;
private final TopicMetadataListener metadataListener;
private final MailboxExecutor mailboxExecutor;
private final TopicProducerRegister producerRegister;
private final Semaphore pendingMessages;
/**
* Constructor creating a Pulsar writer.
*
* <p>It will throw a {@link RuntimeException} if {@link
* PulsarSerializationSchema
* SinkConfiguration)} fails.
*
* @param sinkConfiguration the configuration to configure the Pulsar producer.
* @param deliveryGuarantee the Sink's delivery guarantee.
* @param serializationSchema serialize to transform the incoming records to {@link RawMessage}.
* @param metadataListener the listener for querying topic metadata.
* @param topicRouterProvider create related topic router to choose topic by incoming records.
* @param initContext context to provide information about the runtime environment.
*/
public PulsarWriter(
SinkConfiguration sinkConfiguration,
DeliveryGuarantee deliveryGuarantee,
PulsarSerializationSchema<IN> serializationSchema,
TopicMetadataListener metadataListener,
SerializableFunction<SinkConfiguration, TopicRouter<IN>> topicRouterProvider,
InitContext initContext) {
this.sinkConfiguration = sinkConfiguration;
this.deliveryGuarantee = deliveryGuarantee;
this.serializationSchema = serializationSchema;
this.topicRouter = topicRouterProvider.apply(sinkConfiguration);
this.sinkContextAdapter = new PulsarSinkContextAdapter(initContext, sinkConfiguration);
this.metadataListener = metadataListener;
this.mailboxExecutor = initContext.getMailboxExecutor();
LOG.debug("Initialize topic metadata after creating Pulsar writer.");
ProcessingTimeService timeService = initContext.getProcessingTimeService();
metadataListener.open(sinkConfiguration, timeService);
topicRouter.open(sinkConfiguration);
PulsarSerializationSchemaInitializationContext initializationContext =
new PulsarSerializationSchemaInitializationContext(initContext);
try {
serializationSchema.open(initializationContext, sinkContextAdapter, sinkConfiguration);
} catch (Exception e) {
throw new FlinkRuntimeException("Cannot initialize schema.", e);
}
this.producerRegister = new TopicProducerRegister(sinkConfiguration, serializationSchema);
this.pendingMessages = new Semaphore(sinkConfiguration.getMaxPendingMessages());
}
@Override
@SuppressWarnings("unchecked")
private TypedMessageBuilder<?> createMessageBuilder(
String topic, DeliveryGuarantee deliveryGuarantee) {
Producer<?> producer = producerRegister.getOrCreateProducer(topic);
if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE) {
Transaction transaction = producerRegister.getOrCreateTransaction(topic);
return producer.newMessage(transaction);
} else {
return producer.newMessage();
}
}
@Override
public List<PulsarCommittable> prepareCommit(boolean flush) throws IOException {
if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE || flush) {
while (pendingMessages.availablePermits() < sinkConfiguration.getMaxPendingMessages()) {
producerRegister.flush();
}
}
if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE) {
return producerRegister.prepareCommit();
} else {
return emptyList();
}
}
@Override
public List<Void> snapshotState(long checkpointId) throws IOException {
if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE) {
while (pendingMessages.availablePermits() < sinkConfiguration.getMaxPendingMessages()) {
producerRegister.flush();
}
producerRegister.clearTransactions();
}
return emptyList();
}
@Override
public void close() throws Exception {
closeAll(metadataListener, producerRegister);
}
}
|
class PulsarWriter<IN> implements PrecommittingSinkWriter<IN, PulsarCommittable> {
private static final Logger LOG = LoggerFactory.getLogger(PulsarWriter.class);
private final SinkConfiguration sinkConfiguration;
private final PulsarSerializationSchema<IN> serializationSchema;
private final TopicMetadataListener metadataListener;
private final TopicRouter<IN> topicRouter;
private final MessageDelayer<IN> messageDelayer;
private final DeliveryGuarantee deliveryGuarantee;
private final PulsarSinkContext sinkContext;
private final MailboxExecutor mailboxExecutor;
private final TopicProducerRegister producerRegister;
private long pendingMessages = 0;
/**
* Constructor creating a Pulsar writer.
*
* <p>It will throw a {@link RuntimeException} if {@link
* PulsarSerializationSchema
* fails.
*
* @param sinkConfiguration The configuration to configure the Pulsar producer.
* @param serializationSchema Transform the incoming records into different message properties.
* @param metadataListener The listener for querying topic metadata.
* @param topicRouter Topic router to choose topic by incoming records.
* @param initContext Context to provide information about the runtime environment.
*/
public PulsarWriter(
SinkConfiguration sinkConfiguration,
PulsarSerializationSchema<IN> serializationSchema,
TopicMetadataListener metadataListener,
TopicRouter<IN> topicRouter,
MessageDelayer<IN> messageDelayer,
InitContext initContext) {
this.sinkConfiguration = checkNotNull(sinkConfiguration);
this.serializationSchema = checkNotNull(serializationSchema);
this.metadataListener = checkNotNull(metadataListener);
this.topicRouter = checkNotNull(topicRouter);
this.messageDelayer = checkNotNull(messageDelayer);
checkNotNull(initContext);
this.deliveryGuarantee = sinkConfiguration.getDeliveryGuarantee();
this.sinkContext = new PulsarSinkContextImpl(initContext, sinkConfiguration);
this.mailboxExecutor = initContext.getMailboxExecutor();
LOG.debug("Initialize topic metadata after creating Pulsar writer.");
ProcessingTimeService timeService = initContext.getProcessingTimeService();
this.metadataListener.open(sinkConfiguration, timeService);
this.topicRouter.open(sinkConfiguration);
try {
InitializationContext initializationContext =
initContext.asSerializationSchemaInitializationContext();
this.serializationSchema.open(initializationContext, sinkContext, sinkConfiguration);
} catch (Exception e) {
throw new FlinkRuntimeException("Cannot initialize schema.", e);
}
this.producerRegister = new TopicProducerRegister(sinkConfiguration);
}
@Override
private void enqueueMessageSending(String topic, TypedMessageBuilder<?> builder)
throws ExecutionException, InterruptedException {
builder.sendAsync()
.whenComplete(
(id, ex) -> {
this.releasePermits();
if (ex != null) {
throw new FlinkRuntimeException(
"Failed to send data to Pulsar " + topic, ex);
} else {
LOG.debug(
"Sent message to Pulsar {} with message id {}", topic, id);
}
})
.get();
}
private void requirePermits() throws InterruptedException {
while (pendingMessages >= sinkConfiguration.getMaxPendingMessages()) {
LOG.info("Waiting for the available permits.");
mailboxExecutor.yield();
}
pendingMessages++;
}
private void releasePermits() {
this.pendingMessages -= 1;
}
@SuppressWarnings("rawtypes")
private TypedMessageBuilder<?> createMessageBuilder(
String topic, Context context, PulsarMessage<?> message) {
Schema<?> schema = message.getSchema();
TypedMessageBuilder<?> builder = producerRegister.createMessageBuilder(topic, schema);
byte[] orderingKey = message.getOrderingKey();
if (orderingKey != null && orderingKey.length > 0) {
builder.orderingKey(orderingKey);
}
String key = message.getKey();
if (!Strings.isNullOrEmpty(key)) {
builder.key(key);
}
long eventTime = message.getEventTime();
if (eventTime > 0) {
builder.eventTime(eventTime);
} else {
Long timestamp = context.timestamp();
if (timestamp != null) {
builder.eventTime(timestamp);
}
}
((TypedMessageBuilder) builder).value(message.getValue());
Map<String, String> properties = message.getProperties();
if (properties != null && !properties.isEmpty()) {
builder.properties(properties);
}
Long sequenceId = message.getSequenceId();
if (sequenceId != null) {
builder.sequenceId(sequenceId);
}
List<String> clusters = message.getReplicationClusters();
if (clusters != null && !clusters.isEmpty()) {
builder.replicationClusters(clusters);
}
if (message.isDisableReplication()) {
builder.disableReplication();
}
return builder;
}
@Override
public void flush(boolean endOfInput) throws IOException, InterruptedException {
if (endOfInput) {
producerRegister.flush();
} else {
while (pendingMessages != 0 && deliveryGuarantee != DeliveryGuarantee.NONE) {
producerRegister.flush();
LOG.info("Flush the pending messages to Pulsar.");
mailboxExecutor.yield();
}
}
}
@Override
public Collection<PulsarCommittable> prepareCommit() {
if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE) {
return producerRegister.prepareCommit();
} else {
return emptyList();
}
}
@Override
public void close() throws Exception {
closeAll(metadataListener, producerRegister);
}
}
|
non synchronized access to `childTokenSourceList` may cause concurrency access failure.
|
public void cancel() {
if (this.cancellationRequested.compareAndSet(false, true)) {
for (LinkedCancellationTokenSource childTokenSource : this.childTokenSourceList) {
childTokenSource.close();
}
childTokenSourceList.clear();
}
}
|
childTokenSourceList.clear();
|
public void cancel() {
synchronized (this) {
if (this.cancellationRequested.compareAndSet(false, true)) {
for (LinkedCancellationTokenSource childTokenSource : this.childTokenSourceList) {
childTokenSource.close();
}
childTokenSourceList.clear();
}
}
}
|
class LinkedCancellationToken {
private final List<LinkedCancellationTokenSource> childTokenSourceList;
private final LinkedCancellationTokenSource tokenSource;
private final AtomicBoolean cancellationRequested;
public LinkedCancellationToken(LinkedCancellationTokenSource tokenSource) {
this.childTokenSourceList = new ArrayList<>();
this.tokenSource = tokenSource;
cancellationRequested = new AtomicBoolean(false);
}
public synchronized void register(LinkedCancellationTokenSource childTokenSource) {
this.childTokenSourceList.add(childTokenSource);
}
public boolean isCancellationRequested() {
return this.cancellationRequested.get()
|| this.tokenSource.isClosed();
}
}
|
class LinkedCancellationToken {
private final List<LinkedCancellationTokenSource> childTokenSourceList;
private final LinkedCancellationTokenSource tokenSource;
private final AtomicBoolean cancellationRequested;
public LinkedCancellationToken(LinkedCancellationTokenSource tokenSource) {
this.childTokenSourceList = new ArrayList<>();
this.tokenSource = tokenSource;
this.cancellationRequested = new AtomicBoolean();
}
public void register(LinkedCancellationTokenSource childTokenSource) {
synchronized (this) {
if (this.cancellationRequested.get()) {
throw new IllegalStateException("The cancellation token has been cancelled");
}
this.childTokenSourceList.add(childTokenSource);
}
}
public boolean isCancellationRequested() {
return this.cancellationRequested.get()
|| this.tokenSource.isClosed();
}
}
|
Can't we just use something like `container.beanManager().getEvent().select(new TypeLiteral<AtomicReference<String>>(){}).fire(msg);` instead?
|
public void testObserverNotification() {
ArcContainer container = Arc.container();
ReferenceWrapper msg = new ReferenceWrapper();
RequestFoo.DESTROYED.set(false);
container.beanManager().getEvent().select(ReferenceWrapper.class).fire(msg);
String fooId1 = msg.getReference().get();
assertNotNull(fooId1);
assertTrue(RequestFoo.DESTROYED.get());
RequestFoo.DESTROYED.set(false);
msg.getReference().set(null);
ManagedContext requestContext = container.requestContext();
assertFalse(requestContext.isActive());
try {
requestContext.activate();
String fooId2 = container.instance(RequestFoo.class).get().getId();
assertNotEquals(fooId1, fooId2);
container.beanManager().getEvent().select(ReferenceWrapper.class).fire(msg);
assertEquals(fooId2, msg.getReference().get());
} finally {
requestContext.terminate();
}
assertTrue(RequestFoo.DESTROYED.get());
}
|
container.beanManager().getEvent().select(ReferenceWrapper.class).fire(msg);
|
public void testObserverNotification() {
ArcContainer container = Arc.container();
AtomicReference<String> msg = new AtomicReference<>();
RequestFoo.DESTROYED.set(false);
container.beanManager().getEvent().select(new TypeLiteral<AtomicReference<String>>() {
}).fire(msg);
String fooId1 = msg.get();
assertNotNull(fooId1);
assertTrue(RequestFoo.DESTROYED.get());
RequestFoo.DESTROYED.set(false);
msg.set(null);
ManagedContext requestContext = container.requestContext();
assertFalse(requestContext.isActive());
try {
requestContext.activate();
String fooId2 = container.instance(RequestFoo.class).get().getId();
assertNotEquals(fooId1, fooId2);
container.beanManager().getEvent().select(new TypeLiteral<AtomicReference<String>>() {
}).fire(msg);
assertEquals(fooId2, msg.get());
} finally {
requestContext.terminate();
}
assertTrue(RequestFoo.DESTROYED.get());
}
|
class RequestInObserverNotificationTest {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(RequestFoo.class, MyObserver.class);
@Test
@Singleton
static class MyObserver {
@SuppressWarnings({ "rawtypes", "unchecked" })
void observeString(@Observes ReferenceWrapper value, RequestFoo foo) {
value.getReference().set(foo.getId());
}
}
static class ReferenceWrapper {
AtomicReference<String> reference;
public ReferenceWrapper() {
this.reference = new AtomicReference<>();
}
public AtomicReference<String> getReference() {
return reference;
}
}
@RequestScoped
static class RequestFoo {
static final AtomicBoolean DESTROYED = new AtomicBoolean();
private String id;
@PostConstruct
void init() {
id = UUID.randomUUID().toString();
}
public String getId() {
return id;
}
@PreDestroy
void destroy() {
DESTROYED.set(true);
}
}
}
|
class RequestInObserverNotificationTest {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(RequestFoo.class, MyObserver.class);
@Test
@Singleton
static class MyObserver {
@SuppressWarnings({ "rawtypes", "unchecked" })
void observeString(@Observes AtomicReference value, RequestFoo foo) {
value.set(foo.getId());
}
}
@RequestScoped
static class RequestFoo {
static final AtomicBoolean DESTROYED = new AtomicBoolean();
private String id;
@PostConstruct
void init() {
id = UUID.randomUUID().toString();
}
public String getId() {
return id;
}
@PreDestroy
void destroy() {
DESTROYED.set(true);
}
}
}
|
How i atomicity ensured here? WOuldn't you need to hold at least a read lock to read this.lastDatabaseAccount and locationCache atomically?
|
private List<String> getEffectivePreferredRegions() {
if (this.connectionPolicy.getPreferredRegions() != null && !this.connectionPolicy.getPreferredRegions().isEmpty()) {
return this.connectionPolicy.getPreferredRegions();
}
if (this.latestDatabaseAccount == null) {
return Collections.emptyList();
}
return this.locationCache.getEffectivePreferredLocations();
}
|
private List<String> getEffectivePreferredRegions() {
if (this.connectionPolicy.getPreferredRegions() != null && !this.connectionPolicy.getPreferredRegions().isEmpty()) {
return this.connectionPolicy.getPreferredRegions();
}
this.databaseAccountReadLock.lock();
try {
if (this.latestDatabaseAccount == null) {
return Collections.emptyList();
}
return this.locationCache.getEffectivePreferredLocations();
} finally {
this.databaseAccountReadLock.unlock();
}
}
|
class GlobalEndpointManager implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class);
private static final CosmosDaemonThreadFactory theadFactory = new CosmosDaemonThreadFactory("cosmos-global-endpoint-mgr");
private final int backgroundRefreshLocationTimeIntervalInMS;
private final LocationCache locationCache;
private final URI defaultEndpoint;
private final ConnectionPolicy connectionPolicy;
private final Duration maxInitializationTime;
private final DatabaseAccountManagerInternal owner;
private final AtomicBoolean isRefreshing;
private final AtomicBoolean refreshInBackground;
private final Scheduler scheduler = Schedulers.newSingle(theadFactory);
private volatile boolean isClosed;
private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true);
private volatile DatabaseAccount latestDatabaseAccount;
private volatile Throwable latestDatabaseRefreshError;
public void setLatestDatabaseRefreshError(Throwable latestDatabaseRefreshError) {
this.latestDatabaseRefreshError = latestDatabaseRefreshError;
}
public Throwable getLatestDatabaseRefreshError() {
return latestDatabaseRefreshError;
}
public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) {
this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000;
this.maxInitializationTime = Duration.ofSeconds(configs.getGlobalEndpointManagerMaxInitializationTimeInSeconds());
try {
this.locationCache = new LocationCache(
connectionPolicy,
owner.getServiceEndpoint(),
configs);
this.owner = owner;
this.defaultEndpoint = owner.getServiceEndpoint();
this.connectionPolicy = connectionPolicy;
this.isRefreshing = new AtomicBoolean(false);
this.refreshInBackground = new AtomicBoolean(false);
this.isClosed = false;
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
public void init() {
startRefreshLocationTimerAsync(true).block(maxInitializationTime);
}
public UnmodifiableList<URI> getReadEndpoints() {
return this.locationCache.getReadEndpoints();
}
public UnmodifiableList<URI> getWriteEndpoints() {
return this.locationCache.getWriteEndpoints();
}
public UnmodifiableList<URI> getApplicableReadEndpoints(RxDocumentServiceRequest request) {
return this.locationCache.getApplicableReadEndpoints(request);
}
public UnmodifiableList<URI> getApplicableWriteEndpoints(RxDocumentServiceRequest request) {
return this.locationCache.getApplicableWriteEndpoints(request);
}
public UnmodifiableList<URI> getApplicableReadEndpoints(List<String> excludedRegions) {
return this.locationCache.getApplicableReadEndpoints(excludedRegions, Collections.emptyList());
}
public UnmodifiableList<URI> getApplicableWriteEndpoints(List<String> excludedRegions) {
return this.locationCache.getApplicableWriteEndpoints(excludedRegions, Collections.emptyList());
}
public List<URI> getAvailableReadEndpoints() {
return this.locationCache.getAvailableReadEndpoints();
}
public List<URI> getAvailableWriteEndpoints() {
return this.locationCache.getAvailableWriteEndpoints();
}
public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync(
URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) {
return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume(
e -> {
logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage());
if (locations.isEmpty()) {
return Mono.error(e);
}
Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size())
.map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux());
Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single();
return res.doOnError(
innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage()));
});
}
public URI resolveServiceEndpoint(RxDocumentServiceRequest request) {
URI serviceEndpoint = this.locationCache.resolveServiceEndpoint(request);
if (request.faultInjectionRequestContext != null) {
request.faultInjectionRequestContext.setLocationEndpointToRoute(serviceEndpoint);
}
return serviceEndpoint;
}
public URI resolveFaultInjectionServiceEndpoint(String region, boolean writeOnly) {
return this.locationCache.resolveFaultInjectionEndpoint(region, writeOnly);
}
public URI getDefaultEndpoint() {
return this.locationCache.getDefaultEndpoint();
}
public void markEndpointUnavailableForRead(URI endpoint) {
logger.debug("Marking endpoint {} unavailable for read",endpoint);
this.locationCache.markEndpointUnavailableForRead(endpoint);;
}
public void markEndpointUnavailableForWrite(URI endpoint) {
logger.debug("Marking endpoint {} unavailable for Write",endpoint);
this.locationCache.markEndpointUnavailableForWrite(endpoint);
}
public boolean canUseMultipleWriteLocations() {
return this.locationCache.canUseMultipleWriteLocations();
}
public boolean canUseMultipleWriteLocations(RxDocumentServiceRequest request) {
return this.locationCache.canUseMultipleWriteLocations(request);
}
public void close() {
this.isClosed = true;
this.scheduler.dispose();
logger.debug("GlobalEndpointManager closed.");
}
public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) {
return Mono.defer(() -> {
logger.debug("refreshLocationAsync() invoked");
if (forceRefresh) {
Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync(
this.defaultEndpoint,
new ArrayList<>(this.getEffectivePreferredRegions()),
this::getDatabaseAccountAsync);
return databaseAccountObs.map(dbAccount -> {
this.locationCache.onDatabaseAccountRead(dbAccount);
return dbAccount;
}).flatMap(dbAccount -> {
return Mono.empty();
});
}
if (!isRefreshing.compareAndSet(false, true)) {
logger.debug("in the middle of another refresh. Not invoking a new refresh.");
return Mono.empty();
}
logger.debug("will refresh");
return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false));
});
}
/**
* This will provide the latest databaseAccount.
* If due to some reason last databaseAccount update was null,
* this method will return previous valid value
* @return DatabaseAccount
*/
public DatabaseAccount getLatestDatabaseAccount() {
return this.latestDatabaseAccount;
}
public int getPreferredLocationCount() {
List<String> effectivePreferredRegions = this.getEffectivePreferredRegions();
return effectivePreferredRegions != null ? effectivePreferredRegions.size() : 0;
}
private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) {
return Mono.defer(() -> {
logger.debug("refreshLocationPrivateAsync() refreshing locations");
if (databaseAccount != null) {
this.locationCache.onDatabaseAccountRead(databaseAccount);
}
Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>();
if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) {
logger.debug("shouldRefreshEndpoints: true");
if (databaseAccount == null && !canRefreshInBackground.v) {
logger.debug("shouldRefreshEndpoints: can't be done in background");
Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync(
this.defaultEndpoint,
new ArrayList<>(this.getEffectivePreferredRegions()),
this::getDatabaseAccountAsync);
return databaseAccountObs.map(dbAccount -> {
this.locationCache.onDatabaseAccountRead(dbAccount);
this.isRefreshing.set(false);
return dbAccount;
}).flatMap(dbAccount -> {
if (!this.refreshInBackground.get()) {
this.startRefreshLocationTimerAsync();
}
return Mono.empty();
});
}
if (!this.refreshInBackground.get()) {
this.startRefreshLocationTimerAsync();
}
this.isRefreshing.set(false);
return Mono.empty();
} else {
logger.debug("shouldRefreshEndpoints: false, nothing to do.");
this.isRefreshing.set(false);
return Mono.empty();
}
});
}
private void startRefreshLocationTimerAsync() {
startRefreshLocationTimerAsync(false).subscribe();
}
private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) {
if (this.isClosed) {
logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed");
return Mono.empty();
}
logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS);
LocalDateTime now = LocalDateTime.now();
int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS;
this.refreshInBackground.set(true);
return Mono.delay(Duration.ofMillis(delayInMillis), CosmosSchedulers.COSMOS_PARALLEL)
.flatMap(
t -> {
if (this.isClosed) {
logger.info("client already closed");
return Mono.empty();
}
logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now);
Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.getEffectivePreferredRegions()),
this::getDatabaseAccountAsync);
return databaseAccountObs.flatMap(dbAccount -> {
logger.info("db account retrieved {}", dbAccount);
this.refreshInBackground.set(false);
return this.refreshLocationPrivateAsync(dbAccount);
});
}).onErrorResume(ex -> {
logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex);
this.setLatestDatabaseRefreshError(ex);
this.startRefreshLocationTimerAsync();
return Mono.empty();
}).subscribeOn(scheduler);
}
private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) {
return this.owner.getDatabaseAccountFromEndpoint(serviceEndpoint)
.doOnNext(databaseAccount -> {
if(databaseAccount != null) {
this.latestDatabaseAccount = databaseAccount;
this.setLatestDatabaseRefreshError(null);
}
logger.debug("account retrieved: {}", databaseAccount);
}).single();
}
public boolean isClosed() {
return this.isClosed;
}
public String getRegionName(URI locationEndpoint, OperationType operationType) {
return this.locationCache.getRegionName(locationEndpoint, operationType);
}
public ConnectionPolicy getConnectionPolicy() {
return this.connectionPolicy;
}
}
|
class GlobalEndpointManager implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class);
private static final CosmosDaemonThreadFactory theadFactory = new CosmosDaemonThreadFactory("cosmos-global-endpoint-mgr");
private final int backgroundRefreshLocationTimeIntervalInMS;
private final LocationCache locationCache;
private final URI defaultEndpoint;
private final ConnectionPolicy connectionPolicy;
private final Duration maxInitializationTime;
private final DatabaseAccountManagerInternal owner;
private final AtomicBoolean isRefreshing;
private final AtomicBoolean refreshInBackground;
private final Scheduler scheduler = Schedulers.newSingle(theadFactory);
private volatile boolean isClosed;
private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true);
private volatile DatabaseAccount latestDatabaseAccount;
private final ReentrantReadWriteLock.WriteLock databaseAccountWriteLock;
private final ReentrantReadWriteLock.ReadLock databaseAccountReadLock;
private volatile Throwable latestDatabaseRefreshError;
public void setLatestDatabaseRefreshError(Throwable latestDatabaseRefreshError) {
this.latestDatabaseRefreshError = latestDatabaseRefreshError;
}
public Throwable getLatestDatabaseRefreshError() {
return latestDatabaseRefreshError;
}
public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) {
this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000;
this.maxInitializationTime = Duration.ofSeconds(configs.getGlobalEndpointManagerMaxInitializationTimeInSeconds());
try {
this.locationCache = new LocationCache(
connectionPolicy,
owner.getServiceEndpoint(),
configs);
this.owner = owner;
this.defaultEndpoint = owner.getServiceEndpoint();
this.connectionPolicy = connectionPolicy;
this.isRefreshing = new AtomicBoolean(false);
this.refreshInBackground = new AtomicBoolean(false);
this.isClosed = false;
ReentrantReadWriteLock reentrantReadWriteLock = new ReentrantReadWriteLock();
this.databaseAccountWriteLock = reentrantReadWriteLock.writeLock();
this.databaseAccountReadLock = reentrantReadWriteLock.readLock();
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
public void init() {
startRefreshLocationTimerAsync(true).block(maxInitializationTime);
}
public UnmodifiableList<URI> getReadEndpoints() {
return this.locationCache.getReadEndpoints();
}
public UnmodifiableList<URI> getWriteEndpoints() {
return this.locationCache.getWriteEndpoints();
}
public UnmodifiableList<URI> getApplicableReadEndpoints(RxDocumentServiceRequest request) {
return this.locationCache.getApplicableReadEndpoints(request);
}
public UnmodifiableList<URI> getApplicableWriteEndpoints(RxDocumentServiceRequest request) {
return this.locationCache.getApplicableWriteEndpoints(request);
}
public UnmodifiableList<URI> getApplicableReadEndpoints(List<String> excludedRegions) {
return this.locationCache.getApplicableReadEndpoints(excludedRegions, Collections.emptyList());
}
public UnmodifiableList<URI> getApplicableWriteEndpoints(List<String> excludedRegions) {
return this.locationCache.getApplicableWriteEndpoints(excludedRegions, Collections.emptyList());
}
public List<URI> getAvailableReadEndpoints() {
return this.locationCache.getAvailableReadEndpoints();
}
public List<URI> getAvailableWriteEndpoints() {
return this.locationCache.getAvailableWriteEndpoints();
}
public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync(
URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) {
return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume(
e -> {
logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage());
if (locations.isEmpty()) {
return Mono.error(e);
}
Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size())
.map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux());
Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single();
return res.doOnError(
innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage()));
});
}
public URI resolveServiceEndpoint(RxDocumentServiceRequest request) {
URI serviceEndpoint = this.locationCache.resolveServiceEndpoint(request);
if (request.faultInjectionRequestContext != null) {
request.faultInjectionRequestContext.setLocationEndpointToRoute(serviceEndpoint);
}
return serviceEndpoint;
}
public URI resolveFaultInjectionServiceEndpoint(String region, boolean writeOnly) {
return this.locationCache.resolveFaultInjectionEndpoint(region, writeOnly);
}
public URI getDefaultEndpoint() {
return this.locationCache.getDefaultEndpoint();
}
public void markEndpointUnavailableForRead(URI endpoint) {
logger.debug("Marking endpoint {} unavailable for read",endpoint);
this.locationCache.markEndpointUnavailableForRead(endpoint);;
}
public void markEndpointUnavailableForWrite(URI endpoint) {
logger.debug("Marking endpoint {} unavailable for Write",endpoint);
this.locationCache.markEndpointUnavailableForWrite(endpoint);
}
public boolean canUseMultipleWriteLocations() {
return this.locationCache.canUseMultipleWriteLocations();
}
public boolean canUseMultipleWriteLocations(RxDocumentServiceRequest request) {
return this.locationCache.canUseMultipleWriteLocations(request);
}
public void close() {
this.isClosed = true;
this.scheduler.dispose();
logger.debug("GlobalEndpointManager closed.");
}
public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) {
return Mono.defer(() -> {
logger.debug("refreshLocationAsync() invoked");
if (forceRefresh) {
Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync(
this.defaultEndpoint,
new ArrayList<>(this.getEffectivePreferredRegions()),
this::getDatabaseAccountAsync);
return databaseAccountObs.map(dbAccount -> {
this.databaseAccountWriteLock.lock();
try {
this.locationCache.onDatabaseAccountRead(dbAccount);
} finally {
this.databaseAccountWriteLock.unlock();
}
return dbAccount;
}).flatMap(dbAccount -> {
return Mono.empty();
});
}
if (!isRefreshing.compareAndSet(false, true)) {
logger.debug("in the middle of another refresh. Not invoking a new refresh.");
return Mono.empty();
}
logger.debug("will refresh");
return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false));
});
}
/**
* This will provide the latest databaseAccount.
* If due to some reason last databaseAccount update was null,
* this method will return previous valid value
* @return DatabaseAccount
*/
public DatabaseAccount getLatestDatabaseAccount() {
return this.latestDatabaseAccount;
}
public int getPreferredLocationCount() {
List<String> effectivePreferredRegions = this.getEffectivePreferredRegions();
return effectivePreferredRegions != null ? effectivePreferredRegions.size() : 0;
}
private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) {
return Mono.defer(() -> {
logger.debug("refreshLocationPrivateAsync() refreshing locations");
if (databaseAccount != null) {
this.databaseAccountWriteLock.lock();
try {
this.locationCache.onDatabaseAccountRead(databaseAccount);
} finally {
this.databaseAccountWriteLock.unlock();
}
}
Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>();
if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) {
logger.debug("shouldRefreshEndpoints: true");
if (databaseAccount == null && !canRefreshInBackground.v) {
logger.debug("shouldRefreshEndpoints: can't be done in background");
Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync(
this.defaultEndpoint,
new ArrayList<>(this.getEffectivePreferredRegions()),
this::getDatabaseAccountAsync);
return databaseAccountObs.map(dbAccount -> {
this.databaseAccountWriteLock.lock();
try {
this.locationCache.onDatabaseAccountRead(dbAccount);
} finally {
this.databaseAccountWriteLock.unlock();
}
this.isRefreshing.set(false);
return dbAccount;
}).flatMap(dbAccount -> {
if (!this.refreshInBackground.get()) {
this.startRefreshLocationTimerAsync();
}
return Mono.empty();
});
}
if (!this.refreshInBackground.get()) {
this.startRefreshLocationTimerAsync();
}
this.isRefreshing.set(false);
return Mono.empty();
} else {
logger.debug("shouldRefreshEndpoints: false, nothing to do.");
this.isRefreshing.set(false);
return Mono.empty();
}
});
}
private void startRefreshLocationTimerAsync() {
startRefreshLocationTimerAsync(false).subscribe();
}
private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) {
if (this.isClosed) {
logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed");
return Mono.empty();
}
logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS);
LocalDateTime now = LocalDateTime.now();
int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS;
this.refreshInBackground.set(true);
return Mono.delay(Duration.ofMillis(delayInMillis), CosmosSchedulers.COSMOS_PARALLEL)
.flatMap(
t -> {
if (this.isClosed) {
logger.info("client already closed");
return Mono.empty();
}
logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now);
Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.getEffectivePreferredRegions()),
this::getDatabaseAccountAsync);
return databaseAccountObs.flatMap(dbAccount -> {
logger.info("db account retrieved {}", dbAccount);
this.refreshInBackground.set(false);
return this.refreshLocationPrivateAsync(dbAccount);
});
}).onErrorResume(ex -> {
logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex);
this.setLatestDatabaseRefreshError(ex);
this.startRefreshLocationTimerAsync();
return Mono.empty();
}).subscribeOn(scheduler);
}
private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) {
return this.owner.getDatabaseAccountFromEndpoint(serviceEndpoint)
.doOnNext(databaseAccount -> {
if(databaseAccount != null) {
this.databaseAccountWriteLock.lock();
try {
this.latestDatabaseAccount = databaseAccount;
this.setLatestDatabaseRefreshError(null);
} finally {
this.databaseAccountWriteLock.unlock();
}
}
logger.debug("account retrieved: {}", databaseAccount);
}).single();
}
public boolean isClosed() {
return this.isClosed;
}
public String getRegionName(URI locationEndpoint, OperationType operationType) {
return this.locationCache.getRegionName(locationEndpoint, operationType);
}
public ConnectionPolicy getConnectionPolicy() {
return this.connectionPolicy;
}
}
|
|
We can do a direct assignment here
|
public void windowCleanupScheduled() throws Exception {
final String stateId = "my-state-id";
PCollection<KV<String, Integer>> input =
pipeline
.apply(Create.of(KV.of("hello", 1), KV.of("hello", 2)))
.apply(Window.into(FixedWindows.of(Duration.millis(10))));
TupleTag<Integer> mainOutput = new TupleTag<>();
final ParDoMultiOverrideFactory.GbkThenStatefulParDo<String, Integer, Integer>
gbkThenStatefulParDo;
gbkThenStatefulParDo =
new ParDoMultiOverrideFactory.GbkThenStatefulParDo<>(
new DoFn<KV<String, Integer>, Integer>() {
@StateId(stateId)
private final StateSpec<ValueState<String>> spec =
StateSpecs.value(StringUtf8Coder.of());
@ProcessElement
public void process(ProcessContext c) {}
},
mainOutput,
TupleTagList.empty(),
Collections.emptyList(),
DoFnSchemaInformation.create(),
Collections.emptyMap());
final PCollection<KeyedWorkItem<String, KV<String, Integer>>> grouped;
grouped = gbkThenStatefulParDo.groupToKeyedWorkItem(input);
PCollection<Integer> produced =
gbkThenStatefulParDo.applyStatefulParDo(grouped).get(mainOutput).setCoder(VarIntCoder.of());
StatefulParDoEvaluatorFactory<String, Integer, Integer> factory =
new StatefulParDoEvaluatorFactory<>(mockEvaluationContext, options);
AppliedPTransform<
PCollection<? extends KeyedWorkItem<String, KV<String, Integer>>>,
PCollectionTuple,
StatefulParDo<String, Integer, Integer>>
producingTransform = (AppliedPTransform) DirectGraphs.getProducer(produced);
when(mockEvaluationContext.getExecutionContext(
eq(producingTransform), Mockito.<StructuralKey>any()))
.thenReturn(mockExecutionContext);
when(mockExecutionContext.getStepContext(any())).thenReturn(mockStepContext);
IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(9));
IntervalWindow secondWindow = new IntervalWindow(new Instant(10), new Instant(19));
StateNamespace firstWindowNamespace =
StateNamespaces.window(IntervalWindow.getCoder(), firstWindow);
StateNamespace secondWindowNamespace =
StateNamespaces.window(IntervalWindow.getCoder(), secondWindow);
StateTag<ValueState<String>> tag =
StateTags.tagForSpec(stateId, StateSpecs.value(StringUtf8Coder.of()));
stateInternals.state(firstWindowNamespace, tag).write("first");
stateInternals.state(secondWindowNamespace, tag).write("second");
CommittedBundle<KeyedWorkItem<String, KV<String, Integer>>> inputBundle =
BUNDLE_FACTORY
.createBundle(grouped)
.add(
WindowedValue.of(
KeyedWorkItems.<String, KV<String, Integer>>elementsWorkItem(
"hello",
Collections.singleton(
WindowedValue.of(
KV.of("hello", 1),
new Instant(3),
firstWindow,
PaneInfo.NO_FIRING))),
new Instant(3),
firstWindow,
PaneInfo.NO_FIRING))
.add(
WindowedValue.of(
KeyedWorkItems.<String, KV<String, Integer>>elementsWorkItem(
"hello",
Collections.singleton(
WindowedValue.of(
KV.of("hello", 2),
new Instant(11),
secondWindow,
PaneInfo.NO_FIRING))),
new Instant(11),
secondWindow,
PaneInfo.NO_FIRING))
.commit(Instant.now());
factory.forApplication(producingTransform, inputBundle);
ArgumentCaptor<Runnable> argumentCaptor = ArgumentCaptor.forClass(Runnable.class);
verify(mockEvaluationContext)
.scheduleAfterWindowExpiration(
eq(producingTransform), eq(firstWindow), Mockito.any(), argumentCaptor.capture());
argumentCaptor.getValue().run();
assertThat(stateInternals.state(firstWindowNamespace, tag).read(), nullValue());
assertThat(stateInternals.state(secondWindowNamespace, tag).read(), equalTo("second"));
verify(mockEvaluationContext)
.scheduleAfterWindowExpiration(
eq(producingTransform), eq(secondWindow), Mockito.any(), argumentCaptor.capture());
argumentCaptor.getValue().run();
assertThat(stateInternals.state(secondWindowNamespace, tag).read(), nullValue());
}
|
@StateId(stateId)
|
public void windowCleanupScheduled() throws Exception {
final String stateId = "my-state-id";
PCollection<KV<String, Integer>> input =
pipeline
.apply(Create.of(KV.of("hello", 1), KV.of("hello", 2)))
.apply(Window.into(FixedWindows.of(Duration.millis(10))));
TupleTag<Integer> mainOutput = new TupleTag<>();
final ParDoMultiOverrideFactory.GbkThenStatefulParDo<String, Integer, Integer>
gbkThenStatefulParDo;
gbkThenStatefulParDo =
new ParDoMultiOverrideFactory.GbkThenStatefulParDo<>(
new DoFn<KV<String, Integer>, Integer>() {
@StateId(stateId)
private final StateSpec<ValueState<String>> spec =
StateSpecs.value(StringUtf8Coder.of());
@ProcessElement
public void process(ProcessContext c) {}
},
mainOutput,
TupleTagList.empty(),
Collections.emptyList(),
DoFnSchemaInformation.create(),
Collections.emptyMap());
final PCollection<KeyedWorkItem<String, KV<String, Integer>>> grouped =
gbkThenStatefulParDo.groupToKeyedWorkItem(input);
PCollection<Integer> produced =
gbkThenStatefulParDo.applyStatefulParDo(grouped).get(mainOutput).setCoder(VarIntCoder.of());
StatefulParDoEvaluatorFactory<String, Integer, Integer> factory =
new StatefulParDoEvaluatorFactory<>(mockEvaluationContext, options);
AppliedPTransform<
PCollection<? extends KeyedWorkItem<String, KV<String, Integer>>>,
PCollectionTuple,
StatefulParDo<String, Integer, Integer>>
producingTransform = (AppliedPTransform) DirectGraphs.getProducer(produced);
when(mockEvaluationContext.getExecutionContext(
eq(producingTransform), Mockito.<StructuralKey>any()))
.thenReturn(mockExecutionContext);
when(mockExecutionContext.getStepContext(any())).thenReturn(mockStepContext);
IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(9));
IntervalWindow secondWindow = new IntervalWindow(new Instant(10), new Instant(19));
StateNamespace firstWindowNamespace =
StateNamespaces.window(IntervalWindow.getCoder(), firstWindow);
StateNamespace secondWindowNamespace =
StateNamespaces.window(IntervalWindow.getCoder(), secondWindow);
StateTag<ValueState<String>> tag =
StateTags.tagForSpec(stateId, StateSpecs.value(StringUtf8Coder.of()));
stateInternals.state(firstWindowNamespace, tag).write("first");
stateInternals.state(secondWindowNamespace, tag).write("second");
CommittedBundle<KeyedWorkItem<String, KV<String, Integer>>> inputBundle =
BUNDLE_FACTORY
.createBundle(grouped)
.add(
WindowedValue.of(
KeyedWorkItems.<String, KV<String, Integer>>elementsWorkItem(
"hello",
Collections.singleton(
WindowedValue.of(
KV.of("hello", 1),
new Instant(3),
firstWindow,
PaneInfo.NO_FIRING))),
new Instant(3),
firstWindow,
PaneInfo.NO_FIRING))
.add(
WindowedValue.of(
KeyedWorkItems.<String, KV<String, Integer>>elementsWorkItem(
"hello",
Collections.singleton(
WindowedValue.of(
KV.of("hello", 2),
new Instant(11),
secondWindow,
PaneInfo.NO_FIRING))),
new Instant(11),
secondWindow,
PaneInfo.NO_FIRING))
.commit(Instant.now());
factory.forApplication(producingTransform, inputBundle);
ArgumentCaptor<Runnable> argumentCaptor = ArgumentCaptor.forClass(Runnable.class);
verify(mockEvaluationContext)
.scheduleAfterWindowExpiration(
eq(producingTransform), eq(firstWindow), Mockito.any(), argumentCaptor.capture());
argumentCaptor.getValue().run();
assertThat(stateInternals.state(firstWindowNamespace, tag).read(), nullValue());
assertThat(stateInternals.state(secondWindowNamespace, tag).read(), equalTo("second"));
verify(mockEvaluationContext)
.scheduleAfterWindowExpiration(
eq(producingTransform), eq(secondWindow), Mockito.any(), argumentCaptor.capture());
argumentCaptor.getValue().run();
assertThat(stateInternals.state(secondWindowNamespace, tag).read(), nullValue());
}
|
class StatefulParDoEvaluatorFactoryTest implements Serializable {
@Mock private transient EvaluationContext mockEvaluationContext;
@Mock private transient DirectExecutionContext mockExecutionContext;
@Mock private transient DirectExecutionContext.DirectStepContext mockStepContext;
@Mock private transient ReadyCheckingSideInputReader mockSideInputReader;
@Mock private transient UncommittedBundle<Integer> mockUncommittedBundle;
private static final String KEY = "any-key";
private final transient PipelineOptions options = PipelineOptionsFactory.create();
private final transient StateInternals stateInternals =
CopyOnAccessInMemoryStateInternals.<Object>withUnderlying(KEY, null);
private final transient DirectTimerInternals timerInternals =
DirectTimerInternals.create(
MockClock.fromInstant(Instant.now()),
Mockito.mock(TransformWatermarks.class),
TimerUpdate.builder(StructuralKey.of(KEY, StringUtf8Coder.of())));
private static final BundleFactory BUNDLE_FACTORY = ImmutableListBundleFactory.create();
@Rule
public transient TestPipeline pipeline =
TestPipeline.create().enableAbandonedNodeEnforcement(false);
@SuppressWarnings("unchecked")
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
when((StateInternals) mockStepContext.stateInternals()).thenReturn(stateInternals);
when(mockStepContext.timerInternals()).thenReturn(timerInternals);
when(mockEvaluationContext.createSideInputReader(anyList()))
.thenReturn(
SideInputContainer.create(mockEvaluationContext, Collections.emptyList())
.createReaderForViews(Collections.emptyList()));
}
@Test
/**
* A test that explicitly delays a side input so that the main input will have to be reprocessed,
* testing that {@code finishBundle()} re-assembles the GBK outputs correctly.
*/
@Test
public void testUnprocessedElements() throws Exception {
final String stateId = "my-state-id";
PCollection<KV<String, Integer>> mainInput =
pipeline
.apply(Create.of(KV.of("hello", 1), KV.of("hello", 2)))
.apply(Window.into(FixedWindows.of(Duration.millis(10))));
final PCollectionView<List<Integer>> sideInput =
pipeline
.apply("Create side input", Create.of(42))
.apply("Window side input", Window.into(FixedWindows.of(Duration.millis(10))))
.apply("View side input", View.asList());
TupleTag<Integer> mainOutput = new TupleTag<>();
PCollection<Integer> produced =
mainInput
.apply(
new ParDoMultiOverrideFactory.GbkThenStatefulParDo<>(
new DoFn<KV<String, Integer>, Integer>() {
@StateId(stateId)
private final StateSpec<ValueState<String>> spec =
StateSpecs.value(StringUtf8Coder.of());
@ProcessElement
public void process(ProcessContext c) {}
},
mainOutput,
TupleTagList.empty(),
Collections.singletonList(sideInput),
DoFnSchemaInformation.create(),
Collections.emptyMap()))
.get(mainOutput)
.setCoder(VarIntCoder.of());
StatefulParDoEvaluatorFactory<String, Integer, Integer> factory =
new StatefulParDoEvaluatorFactory<>(mockEvaluationContext, options);
AppliedPTransform<
PCollection<KeyedWorkItem<String, KV<String, Integer>>>,
PCollectionTuple,
StatefulParDo<String, Integer, Integer>>
producingTransform = (AppliedPTransform) DirectGraphs.getProducer(produced);
when(mockEvaluationContext.getExecutionContext(
eq(producingTransform), Mockito.<StructuralKey>any()))
.thenReturn(mockExecutionContext);
when(mockExecutionContext.getStepContext(any())).thenReturn(mockStepContext);
when(mockEvaluationContext.createBundle(Matchers.<PCollection<Integer>>any()))
.thenReturn(mockUncommittedBundle);
when(mockStepContext.getTimerUpdate()).thenReturn(TimerUpdate.empty());
when(mockEvaluationContext.createSideInputReader(anyList())).thenReturn(mockSideInputReader);
when(mockSideInputReader.isReady(Matchers.any(), Matchers.any())).thenReturn(false);
IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(9));
String key = "hello";
WindowedValue<KV<String, Integer>> firstKv =
WindowedValue.of(KV.of(key, 1), new Instant(3), firstWindow, PaneInfo.NO_FIRING);
WindowedValue<KeyedWorkItem<String, KV<String, Integer>>> gbkOutputElement =
firstKv.withValue(
KeyedWorkItems.elementsWorkItem(
"hello",
ImmutableList.of(
firstKv,
firstKv.withValue(KV.of(key, 13)),
firstKv.withValue(KV.of(key, 15)))));
CommittedBundle<KeyedWorkItem<String, KV<String, Integer>>> inputBundle =
BUNDLE_FACTORY
.createBundle(
(PCollection<KeyedWorkItem<String, KV<String, Integer>>>)
Iterables.getOnlyElement(
TransformInputs.nonAdditionalInputs(producingTransform)))
.add(gbkOutputElement)
.commit(Instant.now());
TransformEvaluator<KeyedWorkItem<String, KV<String, Integer>>> evaluator =
factory.forApplication(producingTransform, inputBundle);
evaluator.processElement(gbkOutputElement);
TransformResult<KeyedWorkItem<String, KV<String, Integer>>> result = evaluator.finishBundle();
List<Integer> pushedBackInts = new ArrayList<>();
for (WindowedValue<? extends KeyedWorkItem<String, KV<String, Integer>>> unprocessedElement :
result.getUnprocessedElements()) {
assertThat(
Iterables.getOnlyElement(unprocessedElement.getWindows()),
equalTo((BoundedWindow) firstWindow));
assertThat(unprocessedElement.getValue().key(), equalTo("hello"));
for (WindowedValue<KV<String, Integer>> windowedKv :
unprocessedElement.getValue().elementsIterable()) {
pushedBackInts.add(windowedKv.getValue().getValue());
}
}
assertThat(pushedBackInts, containsInAnyOrder(1, 13, 15));
}
@Test
public void testRequiresTimeSortedInput() {
Instant now = Instant.ofEpochMilli(0);
PCollection<KV<String, Integer>> input =
pipeline.apply(
Create.timestamped(
TimestampedValue.of(KV.of("", 1), now.plus(2)),
TimestampedValue.of(KV.of("", 2), now.plus(1)),
TimestampedValue.of(KV.of("", 3), now)));
PCollection<String> result = input.apply(ParDo.of(statefulConcat()));
PAssert.that(result).containsInAnyOrder("3", "3:2", "3:2:1");
pipeline.run();
}
private static DoFn<KV<String, Integer>, String> statefulConcat() {
final String stateId = "sum";
return new DoFn<KV<String, Integer>, String>() {
@StateId(stateId)
final StateSpec<ValueState<String>> stateSpec = StateSpecs.value();
@ProcessElement
@RequiresTimeSortedInput
public void processElement(
ProcessContext context, @StateId(stateId) ValueState<String> state) {
String current = MoreObjects.firstNonNull(state.read(), "");
if (!current.isEmpty()) {
current += ":";
}
current += context.element().getValue();
context.output(current);
state.write(current);
}
};
}
}
|
class StatefulParDoEvaluatorFactoryTest implements Serializable {
@Mock private transient EvaluationContext mockEvaluationContext;
@Mock private transient DirectExecutionContext mockExecutionContext;
@Mock private transient DirectExecutionContext.DirectStepContext mockStepContext;
@Mock private transient ReadyCheckingSideInputReader mockSideInputReader;
@Mock private transient UncommittedBundle<Integer> mockUncommittedBundle;
private static final String KEY = "any-key";
private final transient PipelineOptions options = PipelineOptionsFactory.create();
private final transient StateInternals stateInternals =
CopyOnAccessInMemoryStateInternals.<Object>withUnderlying(KEY, null);
private final transient DirectTimerInternals timerInternals =
DirectTimerInternals.create(
MockClock.fromInstant(Instant.now()),
Mockito.mock(TransformWatermarks.class),
TimerUpdate.builder(StructuralKey.of(KEY, StringUtf8Coder.of())));
private static final BundleFactory BUNDLE_FACTORY = ImmutableListBundleFactory.create();
@Rule
public transient TestPipeline pipeline =
TestPipeline.create().enableAbandonedNodeEnforcement(false);
@SuppressWarnings("unchecked")
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
when((StateInternals) mockStepContext.stateInternals()).thenReturn(stateInternals);
when(mockStepContext.timerInternals()).thenReturn(timerInternals);
when(mockEvaluationContext.createSideInputReader(anyList()))
.thenReturn(
SideInputContainer.create(mockEvaluationContext, Collections.emptyList())
.createReaderForViews(Collections.emptyList()));
}
@Test
/**
* A test that explicitly delays a side input so that the main input will have to be reprocessed,
* testing that {@code finishBundle()} re-assembles the GBK outputs correctly.
*/
@Test
public void testUnprocessedElements() throws Exception {
final String stateId = "my-state-id";
PCollection<KV<String, Integer>> mainInput =
pipeline
.apply(Create.of(KV.of("hello", 1), KV.of("hello", 2)))
.apply(Window.into(FixedWindows.of(Duration.millis(10))));
final PCollectionView<List<Integer>> sideInput =
pipeline
.apply("Create side input", Create.of(42))
.apply("Window side input", Window.into(FixedWindows.of(Duration.millis(10))))
.apply("View side input", View.asList());
TupleTag<Integer> mainOutput = new TupleTag<>();
PCollection<Integer> produced =
mainInput
.apply(
new ParDoMultiOverrideFactory.GbkThenStatefulParDo<>(
new DoFn<KV<String, Integer>, Integer>() {
@StateId(stateId)
private final StateSpec<ValueState<String>> spec =
StateSpecs.value(StringUtf8Coder.of());
@ProcessElement
public void process(ProcessContext c) {}
},
mainOutput,
TupleTagList.empty(),
Collections.singletonList(sideInput),
DoFnSchemaInformation.create(),
Collections.emptyMap()))
.get(mainOutput)
.setCoder(VarIntCoder.of());
StatefulParDoEvaluatorFactory<String, Integer, Integer> factory =
new StatefulParDoEvaluatorFactory<>(mockEvaluationContext, options);
AppliedPTransform<
PCollection<KeyedWorkItem<String, KV<String, Integer>>>,
PCollectionTuple,
StatefulParDo<String, Integer, Integer>>
producingTransform = (AppliedPTransform) DirectGraphs.getProducer(produced);
when(mockEvaluationContext.getExecutionContext(
eq(producingTransform), Mockito.<StructuralKey>any()))
.thenReturn(mockExecutionContext);
when(mockExecutionContext.getStepContext(any())).thenReturn(mockStepContext);
when(mockEvaluationContext.createBundle(Matchers.<PCollection<Integer>>any()))
.thenReturn(mockUncommittedBundle);
when(mockStepContext.getTimerUpdate()).thenReturn(TimerUpdate.empty());
when(mockEvaluationContext.createSideInputReader(anyList())).thenReturn(mockSideInputReader);
when(mockSideInputReader.isReady(Matchers.any(), Matchers.any())).thenReturn(false);
IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(9));
String key = "hello";
WindowedValue<KV<String, Integer>> firstKv =
WindowedValue.of(KV.of(key, 1), new Instant(3), firstWindow, PaneInfo.NO_FIRING);
WindowedValue<KeyedWorkItem<String, KV<String, Integer>>> gbkOutputElement =
firstKv.withValue(
KeyedWorkItems.elementsWorkItem(
"hello",
ImmutableList.of(
firstKv,
firstKv.withValue(KV.of(key, 13)),
firstKv.withValue(KV.of(key, 15)))));
CommittedBundle<KeyedWorkItem<String, KV<String, Integer>>> inputBundle =
BUNDLE_FACTORY
.createBundle(
(PCollection<KeyedWorkItem<String, KV<String, Integer>>>)
Iterables.getOnlyElement(
TransformInputs.nonAdditionalInputs(producingTransform)))
.add(gbkOutputElement)
.commit(Instant.now());
TransformEvaluator<KeyedWorkItem<String, KV<String, Integer>>> evaluator =
factory.forApplication(producingTransform, inputBundle);
evaluator.processElement(gbkOutputElement);
TransformResult<KeyedWorkItem<String, KV<String, Integer>>> result = evaluator.finishBundle();
List<Integer> pushedBackInts = new ArrayList<>();
for (WindowedValue<? extends KeyedWorkItem<String, KV<String, Integer>>> unprocessedElement :
result.getUnprocessedElements()) {
assertThat(
Iterables.getOnlyElement(unprocessedElement.getWindows()),
equalTo((BoundedWindow) firstWindow));
assertThat(unprocessedElement.getValue().key(), equalTo("hello"));
for (WindowedValue<KV<String, Integer>> windowedKv :
unprocessedElement.getValue().elementsIterable()) {
pushedBackInts.add(windowedKv.getValue().getValue());
}
}
assertThat(pushedBackInts, containsInAnyOrder(1, 13, 15));
}
@Test
public void testRequiresTimeSortedInput() {
Instant now = Instant.ofEpochMilli(0);
PCollection<KV<String, Integer>> input =
pipeline.apply(
Create.timestamped(
TimestampedValue.of(KV.of("", 1), now.plus(2)),
TimestampedValue.of(KV.of("", 2), now.plus(1)),
TimestampedValue.of(KV.of("", 3), now)));
PCollection<String> result = input.apply(ParDo.of(statefulConcat()));
PAssert.that(result).containsInAnyOrder("3", "3:2", "3:2:1");
pipeline.run();
}
@Test
public void testRequiresTimeSortedInputWithLateData() {
Instant now = Instant.ofEpochMilli(0);
PCollection<KV<String, Integer>> input =
pipeline.apply(
TestStream.create(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()))
.addElements(TimestampedValue.of(KV.of("", 1), now.plus(2)))
.addElements(TimestampedValue.of(KV.of("", 2), now.plus(1)))
.advanceWatermarkTo(now.plus(1))
.addElements(TimestampedValue.of(KV.of("", 3), now))
.advanceWatermarkToInfinity());
PCollection<String> result = input.apply(ParDo.of(statefulConcat()));
PAssert.that(result).containsInAnyOrder("2", "2:1");
pipeline.run();
}
@Test
public void testRequiresTimeSortedInputWithLateDataAndAllowedLateness() {
Instant now = Instant.ofEpochMilli(0);
PCollection<KV<String, Integer>> input =
pipeline
.apply(
TestStream.create(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()))
.addElements(TimestampedValue.of(KV.of("", 1), now.plus(2)))
.addElements(TimestampedValue.of(KV.of("", 2), now.plus(1)))
.advanceWatermarkTo(now.plus(1))
.addElements(TimestampedValue.of(KV.of("", 3), now))
.advanceWatermarkToInfinity())
.apply(
Window.<KV<String, Integer>>into(new GlobalWindows())
.withAllowedLateness(Duration.millis(2)));
PCollection<String> result = input.apply(ParDo.of(statefulConcat()));
PAssert.that(result).containsInAnyOrder("3", "3:2", "3:2:1");
pipeline.run();
}
private static DoFn<KV<String, Integer>, String> statefulConcat() {
final String stateId = "sum";
return new DoFn<KV<String, Integer>, String>() {
@StateId(stateId)
final StateSpec<ValueState<String>> stateSpec = StateSpecs.value();
@ProcessElement
@RequiresTimeSortedInput
public void processElement(
ProcessContext context, @StateId(stateId) ValueState<String> state) {
String current = MoreObjects.firstNonNull(state.read(), "");
if (!current.isEmpty()) {
current += ":";
}
current += context.element().getValue();
context.output(current);
state.write(current);
}
};
}
}
|
We'd better to wrap the requiredParams and SQL template string by a individual class rather than hard code it in every single method.
|
public static String buildStatsMinValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column");
if (checkParams(requiredParams, params)) {
return processTemplate(MIN_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
|
Set<String> requiredParams = Sets.newHashSet("table", "column");
|
public static String buildStatsMinValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(MIN_VALUE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(MIN_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
|
class InternalSqlTemplate {
/** -------------------------- for statistics begin -------------------------- */
public static final String MIN_VALUE_SQL = "SELECT MIN(${column}) AS min_value FROM ${table};";
public static final String PARTITION_MIN_VALUE_SQL = "SELECT MIN(${column}) AS min_value"
+ " FROM ${table} PARTITION (${partition});";
public static final String MAX_VALUE_SQL = "SELECT MAX(${column}) AS max_value FROM ${table};";
public static final String PARTITION_MAX_VALUE_SQL = "SELECT MAX(${column}) AS max_value FROM"
+ " ${table} PARTITION (${partition});";
public static final String NDV_VALUE_SQL = "SELECT NDV(${column}) AS ndv FROM ${table};";
public static final String PARTITION_NDV_VALUE_SQL = "SELECT NDV(${column}) AS ndv FROM"
+ " ${table} PARTITION (${partition});";
public static final String MIN_MAX_NDV_VALUE_SQL = "SELECT MIN(${column}) AS min_value, MAX(${column})"
+ " AS max_value, NDV(${column}) AS ndv FROM ${table};";
public static final String PARTITION_MIN_MAX_NDV_VALUE_SQL = "SELECT MIN(${column}) AS min_value,"
+ " MAX(${column}) AS max_value, NDV(${column}) AS ndv FROM ${table} PARTITION (${partition});";
public static final String ROW_COUNT_SQL = "SELECT COUNT(1) AS row_count FROM ${table};";
public static final String PARTITION_ROW_COUNT_SQL = "SELECT COUNT(1) AS row_count FROM ${table} PARTITION"
+ " (${partition});";
public static final String MAX_SIZE_SQL = "SELECT MAX(LENGTH(${column})) AS max_size FROM ${table};";
public static final String PARTITION_MAX_SIZE_SQL = "SELECT MAX(LENGTH(${column})) AS max_size FROM"
+ " ${table} PARTITION (${partition});";
public static final String AVG_SIZE_SQL = "SELECT AVG(LENGTH(${column})) AS avg_size FROM ${table};";
public static final String PARTITION_AVG_SIZE_SQL = "SELECT AVG(LENGTH(${column})) AS avg_size"
+ " FROM ${table} PARTITION (${partition});";
public static final String MAX_AVG_SIZE_SQL = "SELECT MAX(LENGTH(${column})) AS max_size,"
+ " AVG(LENGTH(${column})) AS avg_size FROM ${table};";
public static final String PARTITION_MAX_AVG_SIZE_SQL = "SELECT MAX(LENGTH(${column}))"
+ " AS max_size, AVG(LENGTH(${column})) AS avg_size FROM ${table} PARTITION (${partition});";
public static final String NUM_NULLS_SQL = "SELECT COUNT(1) AS num_nulls FROM ${table}"
+ " WHERE ${column} IS NULL;";
public static final String PARTITION_NUM_NULLS_SQL = "SELECT COUNT(1) AS num_nulls FROM"
+ " ${table} PARTITION (${partition}) WHERE ${column} IS NULL;";
/** ---------------------------- for statistics end ---------------------------- */
private static final Logger LOG = LogManager.getLogger(InternalSqlTemplate.class);
private static final Pattern PATTERN = Pattern.compile("\\$\\{\\w+\\}");
/**
* Concatenate SQL statements based on templates and parameters. e.g.:
* template and parameters:
* 'SELECT ${col} FROM ${table} WHERE id = ${id};',
* parameters: {col=colName, table=tableName, id=1}
* result sql: 'SELECT colName FROM tableName WHERE id = 1;
* <p>
*
* @param template sql template
* @param params k,v parameter, if without parameter, params should be null
* @return SQL statement with parameters concatenated
*/
public static String processTemplate(String template, Map<String, String> params) {
Matcher matcher = PATTERN.matcher(template);
StringBuffer sb = new StringBuffer();
while (matcher.find()) {
String param = matcher.group();
String value = params.get(param.substring(2, param.length() - 1));
matcher.appendReplacement(sb, value == null ? "" : value);
}
matcher.appendTail(sb);
LOG.debug("Template:{}, params: {}, SQL: {}", template, params, sb.toString());
return sb.toString();
}
private static boolean checkParams(Set<String> requiredParams, Map<String, String> params) {
if (params != null) {
Set<String> paramsSet = params.keySet();
return paramsSet.containsAll(requiredParams);
} else {
return requiredParams == null;
}
}
public static String buildStatsPartitionMinValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column", "partition");
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MIN_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsMaxValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column");
if (checkParams(requiredParams, params)) {
return processTemplate(MAX_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionMaxValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column", "partition");
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MAX_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsNdvValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column");
if (checkParams(requiredParams, params)) {
return processTemplate(NDV_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionNdvValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column", "partition");
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_NDV_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsMinMaxNdvValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column");
if (checkParams(requiredParams, params)) {
return processTemplate(MIN_MAX_NDV_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionMinMaxNdvValueSql(Map<String, String> params)
throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column", "partition");
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MIN_MAX_NDV_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsRowCountSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table");
if (checkParams(requiredParams, params)) {
return processTemplate(ROW_COUNT_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionRowCountSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "partition");
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_ROW_COUNT_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsMaxSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column");
if (checkParams(requiredParams, params)) {
return processTemplate(MAX_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionMaxSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column", "partition");
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MAX_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsAvgSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column");
if (checkParams(requiredParams, params)) {
return processTemplate(AVG_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionAvgSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column", "partition");
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_AVG_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsMaxAvgSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column");
if (checkParams(requiredParams, params)) {
return processTemplate(MAX_AVG_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionMaxAvgSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column", "partition");
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MAX_AVG_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsNumNullsSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column");
if (checkParams(requiredParams, params)) {
return processTemplate(NUM_NULLS_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionNumNullsSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = Sets.newHashSet("table", "column", "partition");
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_NUM_NULLS_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
}
|
class InternalSqlTemplate {
/** -------------------------- for statistics begin -------------------------- */
public static final String MIN_VALUE_SQL = "SELECT MIN(${column}) AS min_value FROM ${table};";
public static final String PARTITION_MIN_VALUE_SQL = "SELECT MIN(${column}) AS min_value"
+ " FROM ${table} PARTITION (${partition});";
public static final String MAX_VALUE_SQL = "SELECT MAX(${column}) AS max_value FROM ${table};";
public static final String PARTITION_MAX_VALUE_SQL = "SELECT MAX(${column}) AS max_value FROM"
+ " ${table} PARTITION (${partition});";
public static final String NDV_VALUE_SQL = "SELECT NDV(${column}) AS ndv FROM ${table};";
public static final String PARTITION_NDV_VALUE_SQL = "SELECT NDV(${column}) AS ndv FROM"
+ " ${table} PARTITION (${partition});";
public static final String MIN_MAX_NDV_VALUE_SQL = "SELECT MIN(${column}) AS min_value, MAX(${column})"
+ " AS max_value, NDV(${column}) AS ndv FROM ${table};";
public static final String PARTITION_MIN_MAX_NDV_VALUE_SQL = "SELECT MIN(${column}) AS min_value,"
+ " MAX(${column}) AS max_value, NDV(${column}) AS ndv FROM ${table} PARTITION (${partition});";
public static final String ROW_COUNT_SQL = "SELECT COUNT(1) AS row_count FROM ${table};";
public static final String PARTITION_ROW_COUNT_SQL = "SELECT COUNT(1) AS row_count FROM ${table} PARTITION"
+ " (${partition});";
public static final String MAX_SIZE_SQL = "SELECT MAX(LENGTH(${column})) AS max_size FROM ${table};";
public static final String PARTITION_MAX_SIZE_SQL = "SELECT MAX(LENGTH(${column})) AS max_size FROM"
+ " ${table} PARTITION (${partition});";
public static final String AVG_SIZE_SQL = "SELECT AVG(LENGTH(${column})) AS avg_size FROM ${table};";
public static final String PARTITION_AVG_SIZE_SQL = "SELECT AVG(LENGTH(${column})) AS avg_size"
+ " FROM ${table} PARTITION (${partition});";
public static final String MAX_AVG_SIZE_SQL = "SELECT MAX(LENGTH(${column})) AS max_size,"
+ " AVG(LENGTH(${column})) AS avg_size FROM ${table};";
public static final String PARTITION_MAX_AVG_SIZE_SQL = "SELECT MAX(LENGTH(${column}))"
+ " AS max_size, AVG(LENGTH(${column})) AS avg_size FROM ${table} PARTITION (${partition});";
public static final String NUM_NULLS_SQL = "SELECT COUNT(1) AS num_nulls FROM ${table}"
+ " WHERE ${column} IS NULL;";
public static final String PARTITION_NUM_NULLS_SQL = "SELECT COUNT(1) AS num_nulls FROM"
+ " ${table} PARTITION (${partition}) WHERE ${column} IS NULL;";
/** ---------------------------- for statistics end ---------------------------- */
private static final Logger LOG = LogManager.getLogger(InternalSqlTemplate.class);
private static final Pattern PATTERN = Pattern.compile("\\$\\{\\w+\\}");
/**
* Concatenate SQL statements based on templates and parameters. e.g.:
* template and parameters:
* 'SELECT ${col} FROM ${table} WHERE id = ${id};',
* parameters: {col=colName, table=tableName, id=1}
* result sql: 'SELECT colName FROM tableName WHERE id = 1;
* <p>
*
* @param template sql template
* @param params k,v parameter, if without parameter, params should be null
* @return SQL statement with parameters concatenated
*/
public static String processTemplate(String template, Map<String, String> params) {
Matcher matcher = PATTERN.matcher(template);
StringBuffer sb = new StringBuffer();
while (matcher.find()) {
String param = matcher.group();
String value = params.get(param.substring(2, param.length() - 1));
matcher.appendReplacement(sb, value == null ? "" : value);
}
matcher.appendTail(sb);
LOG.debug("Template:{}, params: {}, SQL: {}", template, params, sb.toString());
return sb.toString();
}
public static String buildStatsPartitionMinValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(PARTITION_MIN_VALUE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MIN_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsMaxValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(MAX_VALUE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(MAX_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionMaxValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(PARTITION_MAX_VALUE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MAX_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsNdvValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(NDV_VALUE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(NDV_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionNdvValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(PARTITION_NDV_VALUE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_NDV_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsMinMaxNdvValueSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(MIN_MAX_NDV_VALUE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(MIN_MAX_NDV_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionMinMaxNdvValueSql(Map<String, String> params)
throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(PARTITION_MIN_MAX_NDV_VALUE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MIN_MAX_NDV_VALUE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsRowCountSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(ROW_COUNT_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(ROW_COUNT_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionRowCountSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(PARTITION_ROW_COUNT_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_ROW_COUNT_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsMaxSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(MAX_SIZE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(MAX_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionMaxSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(PARTITION_MAX_SIZE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MAX_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsAvgSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(AVG_SIZE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(AVG_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionAvgSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(PARTITION_AVG_SIZE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_AVG_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsMaxAvgSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(MAX_AVG_SIZE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(MAX_AVG_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionMaxAvgSizeSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(PARTITION_MAX_AVG_SIZE_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_MAX_AVG_SIZE_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsNumNullsSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(NUM_NULLS_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(NUM_NULLS_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
public static String buildStatsPartitionNumNullsSql(Map<String, String> params) throws InvalidFormatException {
Set<String> requiredParams = getTemplateParams(PARTITION_NUM_NULLS_SQL);
if (checkParams(requiredParams, params)) {
return processTemplate(PARTITION_NUM_NULLS_SQL, params);
} else {
throw new InvalidFormatException("Wrong parameter format. need params: " + requiredParams);
}
}
private static Set<String> getTemplateParams(String template) {
Matcher matcher = PATTERN.matcher(template);
Set<String> requiredParams = Sets.newHashSet();
while (matcher.find()) {
String param = matcher.group();
String value = param.substring(2, param.length() - 1);
requiredParams.add(value);
}
return requiredParams;
}
private static boolean checkParams(Set<String> requiredParams, Map<String, String> params) {
if (params != null && !params.isEmpty()) {
Set<String> paramsSet = params.keySet();
return paramsSet.containsAll(requiredParams);
} else {
return requiredParams == null;
}
}
}
|
Consider moving the comment (or comma) here and above to make the inline comment point at the right parameter
|
protected void setUp(boolean dontInitializeNode2) throws Exception {
Distribution distribution = new Distribution(Distribution.getSimpleGroupConfig(2, 10));
jsonWriter.setDefaultPathPrefix("/cluster/v2");
{
Set<ConfiguredNode> nodes = FleetControllerTest.toNodes(0, 1, 2, 3);
ContentCluster cluster = new ContentCluster(
"books", nodes, distribution, 6 /* minStorageNodesUp*/, 0.9, /* minRatioOfStorageNodesUp */true);
initializeCluster(cluster, nodes);
AnnotatedClusterState baselineState = AnnotatedClusterState.withoutAnnotations(ClusterState.stateFromString("distributor:4 storage:4"));
Map<String, AnnotatedClusterState> bucketSpaceStates = new HashMap<>();
bucketSpaceStates.put("default", AnnotatedClusterState.withoutAnnotations(ClusterState.stateFromString("distributor:4 storage:4 .3.s:m")));
bucketSpaceStates.put("global", baselineState);
books = new ClusterControllerMock(cluster, baselineState.getClusterState(),
ClusterStateBundle.of(baselineState, bucketSpaceStates), 0, 0);
}
{
Set<ConfiguredNode> nodes = FleetControllerTest.toNodes(1, 2, 3, 5, 7);
Set<ConfiguredNode> nodesInSlobrok = FleetControllerTest.toNodes(1, 3, 5, 7);
ContentCluster cluster = new ContentCluster(
"music", nodes, distribution, 4 /* minStorageNodesUp*/, 0.0, /* minRatioOfStorageNodesUp */true);
if (dontInitializeNode2) {
initializeCluster(cluster, nodesInSlobrok);
}
else {
initializeCluster(cluster, nodes);
}
AnnotatedClusterState baselineState = AnnotatedClusterState.withoutAnnotations(ClusterState.stateFromString("distributor:8 .0.s:d .2.s:d .4.s:d .6.s:d "
+ "storage:8 .0.s:d .2.s:d .4.s:d .6.s:d"));
music = new ClusterControllerMock(cluster, baselineState.getClusterState(),
ClusterStateBundle.ofBaselineOnly(baselineState), 0, 0);
}
ccSockets = new TreeMap<>();
ccSockets.put(0, new ClusterControllerStateRestAPI.Socket("localhost", 80));
restAPI = new ClusterControllerStateRestAPI(new ClusterControllerStateRestAPI.FleetControllerResolver() {
@Override
public Map<String, RemoteClusterControllerTaskScheduler> getFleetControllers() {
Map<String, RemoteClusterControllerTaskScheduler> fleetControllers = new LinkedHashMap<>();
fleetControllers.put(books.context.cluster.getName(), books);
fleetControllers.put(music.context.cluster.getName(), music);
return fleetControllers;
}
}, ccSockets);
}
|
"music", nodes, distribution, 4 /* minStorageNodesUp*/, 0.0, /* minRatioOfStorageNodesUp */true);
|
protected void setUp(boolean dontInitializeNode2) throws Exception {
Distribution distribution = new Distribution(Distribution.getSimpleGroupConfig(2, 10));
jsonWriter.setDefaultPathPrefix("/cluster/v2");
{
Set<ConfiguredNode> nodes = FleetControllerTest.toNodes(0, 1, 2, 3);
ContentCluster cluster = new ContentCluster(
"books", nodes, distribution, 6 /* minStorageNodesUp*/, 0.9 /* minRatioOfStorageNodesUp */,
true /* determineBucketsFromBucketSpaceMetric */);
initializeCluster(cluster, nodes);
AnnotatedClusterState baselineState = AnnotatedClusterState.withoutAnnotations(ClusterState.stateFromString("distributor:4 storage:4"));
Map<String, AnnotatedClusterState> bucketSpaceStates = new HashMap<>();
bucketSpaceStates.put("default", AnnotatedClusterState.withoutAnnotations(ClusterState.stateFromString("distributor:4 storage:4 .3.s:m")));
bucketSpaceStates.put("global", baselineState);
books = new ClusterControllerMock(cluster, baselineState.getClusterState(),
ClusterStateBundle.of(baselineState, bucketSpaceStates), 0, 0);
}
{
Set<ConfiguredNode> nodes = FleetControllerTest.toNodes(1, 2, 3, 5, 7);
Set<ConfiguredNode> nodesInSlobrok = FleetControllerTest.toNodes(1, 3, 5, 7);
ContentCluster cluster = new ContentCluster(
"music", nodes, distribution, 4 /* minStorageNodesUp*/, 0.0 /* minRatioOfStorageNodesUp */,
true /* determineBucketsFromBucketSpaceMetric */);
if (dontInitializeNode2) {
initializeCluster(cluster, nodesInSlobrok);
}
else {
initializeCluster(cluster, nodes);
}
AnnotatedClusterState baselineState = AnnotatedClusterState.withoutAnnotations(ClusterState.stateFromString("distributor:8 .0.s:d .2.s:d .4.s:d .6.s:d "
+ "storage:8 .0.s:d .2.s:d .4.s:d .6.s:d"));
music = new ClusterControllerMock(cluster, baselineState.getClusterState(),
ClusterStateBundle.ofBaselineOnly(baselineState), 0, 0);
}
ccSockets = new TreeMap<>();
ccSockets.put(0, new ClusterControllerStateRestAPI.Socket("localhost", 80));
restAPI = new ClusterControllerStateRestAPI(new ClusterControllerStateRestAPI.FleetControllerResolver() {
@Override
public Map<String, RemoteClusterControllerTaskScheduler> getFleetControllers() {
Map<String, RemoteClusterControllerTaskScheduler> fleetControllers = new LinkedHashMap<>();
fleetControllers.put(books.context.cluster.getName(), books);
fleetControllers.put(music.context.cluster.getName(), music);
return fleetControllers;
}
}, ccSockets);
}
|
class StateRequest implements UnitStateRequest {
private String[] path;
private int recursive;
StateRequest(String req, int recursive) {
path = req.isEmpty() ? new String[0] : req.split("/");
this.recursive = recursive;
}
@Override
public int getRecursiveLevels() { return recursive;
}
@Override
public String[] getUnitPath() { return path; }
}
|
class StateRequest implements UnitStateRequest {
private String[] path;
private int recursive;
StateRequest(String req, int recursive) {
path = req.isEmpty() ? new String[0] : req.split("/");
this.recursive = recursive;
}
@Override
public int getRecursiveLevels() { return recursive;
}
@Override
public String[] getUnitPath() { return path; }
}
|
You are right, currently HBase connector only support atomic types as columns (HBase qualifier).
|
private HBaseTableSchema validateTableSchema(TableSchema schema) {
HBaseTableSchema hbaseSchema = new HBaseTableSchema();
String[] fieldNames = schema.getFieldNames();
TypeInformation[] fieldTypes = schema.getFieldTypes();
for (int i = 0; i < fieldNames.length; i++) {
String name = fieldNames[i];
TypeInformation<?> type = fieldTypes[i];
if (type instanceof RowTypeInfo) {
RowTypeInfo familyType = (RowTypeInfo) type;
String[] qualifierNames = familyType.getFieldNames();
TypeInformation[] qualifierTypes = familyType.getFieldTypes();
for (int j = 0; j < familyType.getArity(); j++) {
Class clazz = qualifierTypes[j].getTypeClass();
if (LocalDateTime.class.equals(clazz)) {
clazz = Timestamp.class;
} else if (LocalDate.class.equals(clazz)) {
clazz = Date.class;
} else if (LocalTime.class.equals(clazz)) {
clazz = Time.class;
}
hbaseSchema.addColumn(name, qualifierNames[j], clazz);
}
} else {
hbaseSchema.setRowKey(name, type.getTypeClass());
}
}
return hbaseSchema;
}
|
clazz = Timestamp.class;
|
private HBaseTableSchema validateTableSchema(TableSchema schema) {
HBaseTableSchema hbaseSchema = new HBaseTableSchema();
String[] fieldNames = schema.getFieldNames();
TypeInformation[] fieldTypes = schema.getFieldTypes();
for (int i = 0; i < fieldNames.length; i++) {
String name = fieldNames[i];
TypeInformation<?> type = fieldTypes[i];
if (type instanceof RowTypeInfo) {
RowTypeInfo familyType = (RowTypeInfo) type;
String[] qualifierNames = familyType.getFieldNames();
TypeInformation[] qualifierTypes = familyType.getFieldTypes();
for (int j = 0; j < familyType.getArity(); j++) {
Class clazz = qualifierTypes[j].getTypeClass();
if (LocalDateTime.class.equals(clazz)) {
clazz = Timestamp.class;
} else if (LocalDate.class.equals(clazz)) {
clazz = Date.class;
} else if (LocalTime.class.equals(clazz)) {
clazz = Time.class;
}
hbaseSchema.addColumn(name, qualifierNames[j], clazz);
}
} else {
hbaseSchema.setRowKey(name, type.getTypeClass());
}
}
return hbaseSchema;
}
|
class HBaseTableFactory implements StreamTableSourceFactory<Row>, StreamTableSinkFactory<Tuple2<Boolean, Row>> {
@Override
public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) {
final DescriptorProperties descriptorProperties = getValidatedProperties(properties);
Configuration hbaseClientConf = HBaseConfiguration.create();
String hbaseZk = descriptorProperties.getString(CONNECTOR_ZK_QUORUM);
hbaseClientConf.set(HConstants.ZOOKEEPER_QUORUM, hbaseZk);
descriptorProperties
.getOptionalString(CONNECTOR_ZK_NODE_PARENT)
.ifPresent(v -> hbaseClientConf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, v));
String hTableName = descriptorProperties.getString(CONNECTOR_TABLE_NAME);
TableSchema tableSchema = descriptorProperties.getTableSchema(SCHEMA);
HBaseTableSchema hbaseSchema = validateTableSchema(tableSchema);
return new HBaseTableSource(hbaseClientConf, hTableName, hbaseSchema, null);
}
@Override
public StreamTableSink<Tuple2<Boolean, Row>> createStreamTableSink(Map<String, String> properties) {
final DescriptorProperties descriptorProperties = getValidatedProperties(properties);
HBaseOptions.Builder hbaseOptionsBuilder = HBaseOptions.builder();
hbaseOptionsBuilder.setZkQuorum(descriptorProperties.getString(CONNECTOR_ZK_QUORUM));
hbaseOptionsBuilder.setTableName(descriptorProperties.getString(CONNECTOR_TABLE_NAME));
descriptorProperties
.getOptionalString(CONNECTOR_ZK_NODE_PARENT)
.ifPresent(hbaseOptionsBuilder::setZkNodeParent);
TableSchema tableSchema = descriptorProperties.getTableSchema(SCHEMA);
HBaseTableSchema hbaseSchema = validateTableSchema(tableSchema);
HBaseWriteOptions.Builder writeBuilder = HBaseWriteOptions.builder();
descriptorProperties
.getOptionalInt(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_ROWS)
.ifPresent(writeBuilder::setBufferFlushMaxRows);
descriptorProperties
.getOptionalMemorySize(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_SIZE)
.ifPresent(v -> writeBuilder.setBufferFlushMaxSizeInBytes(v.getBytes()));
descriptorProperties
.getOptionalDuration(CONNECTOR_WRITE_BUFFER_FLUSH_INTERVAL)
.ifPresent(v -> writeBuilder.setBufferFlushIntervalMillis(v.toMillis()));
return new HBaseUpsertTableSink(
hbaseSchema,
hbaseOptionsBuilder.build(),
writeBuilder.build()
);
}
private DescriptorProperties getValidatedProperties(Map<String, String> properties) {
final DescriptorProperties descriptorProperties = new DescriptorProperties(true);
descriptorProperties.putProperties(properties);
new HBaseValidator().validate(descriptorProperties);
return descriptorProperties;
}
@Override
public Map<String, String> requiredContext() {
Map<String, String> context = new HashMap<>();
context.put(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE_HBASE);
context.put(CONNECTOR_VERSION, hbaseVersion());
context.put(CONNECTOR_PROPERTY_VERSION, "1");
return context;
}
@Override
public List<String> supportedProperties() {
List<String> properties = new ArrayList<>();
properties.add(CONNECTOR_TABLE_NAME);
properties.add(CONNECTOR_ZK_QUORUM);
properties.add(CONNECTOR_ZK_NODE_PARENT);
properties.add(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_SIZE);
properties.add(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_ROWS);
properties.add(CONNECTOR_WRITE_BUFFER_FLUSH_INTERVAL);
properties.add(SCHEMA + ".
properties.add(SCHEMA + ".
properties.add(SCHEMA + ".
return properties;
}
private String hbaseVersion() {
return CONNECTOR_VERSION_VALUE_143;
}
}
|
class HBaseTableFactory implements StreamTableSourceFactory<Row>, StreamTableSinkFactory<Tuple2<Boolean, Row>> {
@Override
public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) {
final DescriptorProperties descriptorProperties = getValidatedProperties(properties);
Configuration hbaseClientConf = HBaseConfiguration.create();
String hbaseZk = descriptorProperties.getString(CONNECTOR_ZK_QUORUM);
hbaseClientConf.set(HConstants.ZOOKEEPER_QUORUM, hbaseZk);
descriptorProperties
.getOptionalString(CONNECTOR_ZK_NODE_PARENT)
.ifPresent(v -> hbaseClientConf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, v));
String hTableName = descriptorProperties.getString(CONNECTOR_TABLE_NAME);
TableSchema tableSchema = descriptorProperties.getTableSchema(SCHEMA);
HBaseTableSchema hbaseSchema = validateTableSchema(tableSchema);
return new HBaseTableSource(hbaseClientConf, hTableName, hbaseSchema, null);
}
@Override
public StreamTableSink<Tuple2<Boolean, Row>> createStreamTableSink(Map<String, String> properties) {
final DescriptorProperties descriptorProperties = getValidatedProperties(properties);
HBaseOptions.Builder hbaseOptionsBuilder = HBaseOptions.builder();
hbaseOptionsBuilder.setZkQuorum(descriptorProperties.getString(CONNECTOR_ZK_QUORUM));
hbaseOptionsBuilder.setTableName(descriptorProperties.getString(CONNECTOR_TABLE_NAME));
descriptorProperties
.getOptionalString(CONNECTOR_ZK_NODE_PARENT)
.ifPresent(hbaseOptionsBuilder::setZkNodeParent);
TableSchema tableSchema = descriptorProperties.getTableSchema(SCHEMA);
HBaseTableSchema hbaseSchema = validateTableSchema(tableSchema);
HBaseWriteOptions.Builder writeBuilder = HBaseWriteOptions.builder();
descriptorProperties
.getOptionalInt(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_ROWS)
.ifPresent(writeBuilder::setBufferFlushMaxRows);
descriptorProperties
.getOptionalMemorySize(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_SIZE)
.ifPresent(v -> writeBuilder.setBufferFlushMaxSizeInBytes(v.getBytes()));
descriptorProperties
.getOptionalDuration(CONNECTOR_WRITE_BUFFER_FLUSH_INTERVAL)
.ifPresent(v -> writeBuilder.setBufferFlushIntervalMillis(v.toMillis()));
return new HBaseUpsertTableSink(
hbaseSchema,
hbaseOptionsBuilder.build(),
writeBuilder.build()
);
}
private DescriptorProperties getValidatedProperties(Map<String, String> properties) {
final DescriptorProperties descriptorProperties = new DescriptorProperties(true);
descriptorProperties.putProperties(properties);
new HBaseValidator().validate(descriptorProperties);
return descriptorProperties;
}
@Override
public Map<String, String> requiredContext() {
Map<String, String> context = new HashMap<>();
context.put(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE_HBASE);
context.put(CONNECTOR_VERSION, hbaseVersion());
context.put(CONNECTOR_PROPERTY_VERSION, "1");
return context;
}
@Override
public List<String> supportedProperties() {
List<String> properties = new ArrayList<>();
properties.add(CONNECTOR_TABLE_NAME);
properties.add(CONNECTOR_ZK_QUORUM);
properties.add(CONNECTOR_ZK_NODE_PARENT);
properties.add(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_SIZE);
properties.add(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_ROWS);
properties.add(CONNECTOR_WRITE_BUFFER_FLUSH_INTERVAL);
properties.add(SCHEMA + ".
properties.add(SCHEMA + ".
properties.add(SCHEMA + ".
return properties;
}
private String hbaseVersion() {
return CONNECTOR_VERSION_VALUE_143;
}
}
|
Sorry, I missed this on the first pass. We want `RuntimeException` to be passed through without modification if possible. I believe this will work: ``` if (e.getCause() instanceof RuntimeException) { throw (RuntimeException) e.getCause(); } throw new RuntimeException(e); ```
|
private void outputRow(TimestampedFuture c, OutputReceiver<Row> r) throws InterruptedException {
final Value v;
try {
v = c.future().get();
} catch (ExecutionException e) {
throw new RuntimeException(checkArgumentNotNull(e.getCause()));
}
if (!v.isNull()) {
Row row = ZetaSqlBeamTranslationUtils.toBeamRow(v, outputSchema, verifyRowValues);
r.outputWithTimestamp(row, c.timestamp());
}
}
|
throw new RuntimeException(checkArgumentNotNull(e.getCause()));
|
private void outputRow(TimestampedFuture c, OutputReceiver<Row> r) throws InterruptedException {
final Value v;
try {
v = c.future().get();
} catch (ExecutionException e) {
throw extractException(e);
}
if (!v.isNull()) {
Row row = ZetaSqlBeamTranslationUtils.toBeamRow(v, outputSchema, verifyRowValues);
r.outputWithTimestamp(row, c.timestamp());
}
}
|
class OutputReceiverForFinishBundle implements OutputReceiver<Row> {
private final FinishBundleContext c;
private final BoundedWindow w;
private OutputReceiverForFinishBundle(FinishBundleContext c, BoundedWindow w) {
this.c = c;
this.w = w;
}
@Override
public void output(Row output) {
throw new RuntimeException("Unsupported");
}
@Override
public void outputWithTimestamp(Row output, Instant timestamp) {
c.output(output, timestamp, w);
}
}
|
class OutputReceiverForFinishBundle implements OutputReceiver<Row> {
private final FinishBundleContext c;
private final BoundedWindow w;
private OutputReceiverForFinishBundle(FinishBundleContext c, BoundedWindow w) {
this.c = c;
this.w = w;
}
@Override
public void output(Row output) {
throw new RuntimeException("Unsupported");
}
@Override
public void outputWithTimestamp(Row output, Instant timestamp) {
c.output(output, timestamp, w);
}
}
|
Can you extract these if condition to new private method?
|
private SqlNode convertSQLStatement(final ExplainStatement deleteStatement) {
return deleteStatement.getStatement().map(each -> {
if (each instanceof SelectStatement) {
return new SelectStatementConverter().convert((SelectStatement) each);
} else if (each instanceof DeleteStatement) {
return new DeleteStatementConverter().convert((DeleteStatement) each);
}
return null;
}).orElseThrow(IllegalStateException::new);
}
|
if (each instanceof SelectStatement) {
|
private SqlNode convertSQLStatement(final ExplainStatement deleteStatement) {
return deleteStatement.getStatement().map(this::convertSqlNode).orElseThrow(IllegalStateException::new);
}
|
class ExplainStatementConverter implements SQLStatementConverter<ExplainStatement, SqlNode> {
@Override
public SqlNode convert(final ExplainStatement deleteStatement) {
return new SqlExplain(SqlParserPos.ZERO, convertSQLStatement(deleteStatement), SqlExplainLevel.ALL_ATTRIBUTES.symbol(SqlParserPos.ZERO),
SqlExplain.Depth.TYPE.symbol(SqlParserPos.ZERO), SqlExplainFormat.TEXT.symbol(SqlParserPos.ZERO), 0);
}
}
|
class ExplainStatementConverter implements SQLStatementConverter<ExplainStatement, SqlNode> {
@Override
public SqlNode convert(final ExplainStatement deleteStatement) {
return new SqlExplain(SqlParserPos.ZERO, convertSQLStatement(deleteStatement), SqlExplainLevel.ALL_ATTRIBUTES.symbol(SqlParserPos.ZERO),
SqlExplain.Depth.TYPE.symbol(SqlParserPos.ZERO), SqlExplainFormat.TEXT.symbol(SqlParserPos.ZERO), 0);
}
private SqlNode convertSqlNode(final SQLStatement sqlStatement) {
if (sqlStatement instanceof SelectStatement) {
return new SelectStatementConverter().convert((SelectStatement) sqlStatement);
} else if (sqlStatement instanceof DeleteStatement) {
return new DeleteStatementConverter().convert((DeleteStatement) sqlStatement);
}
return null;
}
}
|
EXECUTE COMMAND need return binary row format
|
private void initQueryOptions(ConnectContext context) {
this.queryOptions = context.getSessionVariable().toThrift();
this.queryOptions.setEnablePipelineEngine(SessionVariable.enablePipelineEngine());
this.queryOptions.setBeExecVersion(Config.be_exec_version);
this.queryOptions.setQueryTimeout(context.getExecTimeout());
this.queryOptions.setExecutionTimeout(context.getExecTimeout());
this.queryOptions.setEnableScanNodeRunSerial(context.getSessionVariable().isEnableScanRunSerial());
this.queryOptions.setMysqlRowBinaryFormat(
context.getCommand() == MysqlCommand.COM_STMT_EXECUTE);
}
|
this.queryOptions.setMysqlRowBinaryFormat(
|
private void initQueryOptions(ConnectContext context) {
this.queryOptions = context.getSessionVariable().toThrift();
this.queryOptions.setEnablePipelineEngine(SessionVariable.enablePipelineEngine());
this.queryOptions.setBeExecVersion(Config.be_exec_version);
this.queryOptions.setQueryTimeout(context.getExecTimeout());
this.queryOptions.setExecutionTimeout(context.getExecTimeout());
this.queryOptions.setEnableScanNodeRunSerial(context.getSessionVariable().isEnableScanRunSerial());
this.queryOptions.setMysqlRowBinaryFormat(
context.getCommand() == MysqlCommand.COM_STMT_EXECUTE);
}
|
class Coordinator implements CoordInterface {
private static final Logger LOG = LogManager.getLogger(Coordinator.class);
private static final String localIP = FrontendOptions.getLocalHostAddress();
private static final Random instanceRandom = new Random();
Status queryStatus = new Status();
Map<TNetworkAddress, Long> addressToBackendID = Maps.newHashMap();
private ImmutableMap<Long, Backend> idToBackend = ImmutableMap.of();
private final TDescriptorTable descTable;
private Map<Integer, TFileScanRangeParams> fileScanRangeParamsMap = Maps.newHashMap();
private final TQueryGlobals queryGlobals = new TQueryGlobals();
private TQueryOptions queryOptions;
private TNetworkAddress coordAddress;
private final Lock lock = new ReentrantLock();
private boolean returnedAllResults;
private final Map<PlanFragmentId, FragmentExecParams> fragmentExecParamsMap = Maps.newHashMap();
private final List<PlanFragment> fragments;
private int instanceTotalNum;
private Map<Long, BackendExecStates> beToExecStates = Maps.newHashMap();
private Map<Long, PipelineExecContexts> beToPipelineExecCtxs = Maps.newHashMap();
private final List<BackendExecState> backendExecStates = Lists.newArrayList();
private final Map<Pair<Integer, Long>, PipelineExecContext> pipelineExecContexts = new HashMap<>();
private final List<BackendExecState> needCheckBackendExecStates = Lists.newArrayList();
private final List<PipelineExecContext> needCheckPipelineExecContexts = Lists.newArrayList();
private ResultReceiver receiver;
private final List<ScanNode> scanNodes;
private int scanRangeNum = 0;
private final Set<TUniqueId> instanceIds = Sets.newHashSet();
private final boolean isBlockQuery;
private int numReceivedRows = 0;
private List<String> deltaUrls;
private Map<String, String> loadCounters;
private String trackingUrl;
private List<String> exportFiles;
private final List<TTabletCommitInfo> commitInfos = Lists.newArrayList();
private final List<TErrorTabletInfo> errorTabletInfos = Lists.newArrayList();
private long jobId = -1;
private TUniqueId queryId;
private final boolean needReport;
private final TUniqueId nextInstanceId;
private long timeoutDeadline;
private boolean enableShareHashTableForBroadcastJoin = false;
private boolean enablePipelineEngine = false;
private boolean fasterFloatConvert = false;
private int maxMsgSizeOfResultReceiver = TConfiguration.DEFAULT_MAX_MESSAGE_SIZE;
public TNetworkAddress runtimeFilterMergeAddr;
public TUniqueId runtimeFilterMergeInstanceId;
public Map<RuntimeFilterId, List<FRuntimeFilterTargetParam>> ridToTargetParam = Maps.newHashMap();
public List<RuntimeFilter> assignedRuntimeFilters = new ArrayList<>();
public Map<RuntimeFilterId, Integer> ridToBuilderNum = Maps.newHashMap();
private PointQueryExec pointExec = null;
private StatsErrorEstimator statsErrorEstimator;
public void setTWorkloadGroups(List<TPipelineWorkloadGroup> tWorkloadGroups) {
this.tWorkloadGroups = tWorkloadGroups;
}
private List<TPipelineWorkloadGroup> tWorkloadGroups = Lists.newArrayList();
private final ExecutionProfile executionProfile;
public ExecutionProfile getExecutionProfile() {
return executionProfile;
}
private boolean isAllExternalScan = true;
public Coordinator(ConnectContext context, Analyzer analyzer, Planner planner,
StatsErrorEstimator statsErrorEstimator) {
this(context, analyzer, planner);
this.statsErrorEstimator = statsErrorEstimator;
}
public Coordinator(ConnectContext context, Analyzer analyzer, Planner planner) {
this.isBlockQuery = planner.isBlockQuery();
this.queryId = context.queryId();
this.fragments = planner.getFragments();
this.scanNodes = planner.getScanNodes();
this.descTable = planner.getDescTable().toThrift();
this.returnedAllResults = false;
this.enableShareHashTableForBroadcastJoin = context.getSessionVariable().enableShareHashTableForBroadcastJoin;
this.enablePipelineEngine = context.getSessionVariable().getEnablePipelineEngine()
&& (fragments.size() > 0 && fragments.get(0).getSink() instanceof ResultSink);
this.fasterFloatConvert = context.getSessionVariable().fasterFloatConvert();
initQueryOptions(context);
setFromUserProperty(context);
this.queryGlobals.setNowString(TimeUtils.DATETIME_FORMAT.format(LocalDateTime.now()));
this.queryGlobals.setTimestampMs(System.currentTimeMillis());
this.queryGlobals.setNanoSeconds(LocalDateTime.now().getNano());
this.queryGlobals.setLoadZeroTolerance(false);
if (context.getSessionVariable().getTimeZone().equals("CST")) {
this.queryGlobals.setTimeZone(TimeUtils.DEFAULT_TIME_ZONE);
} else {
this.queryGlobals.setTimeZone(context.getSessionVariable().getTimeZone());
}
this.needReport = context.getSessionVariable().enableProfile();
this.nextInstanceId = new TUniqueId();
nextInstanceId.setHi(queryId.hi);
nextInstanceId.setLo(queryId.lo + 1);
this.assignedRuntimeFilters = planner.getRuntimeFilters();
this.executionProfile = new ExecutionProfile(queryId, fragments.size());
this.maxMsgSizeOfResultReceiver = context.getSessionVariable().getMaxMsgSizeOfResultReceiver();
}
public Coordinator(Long jobId, TUniqueId queryId, DescriptorTable descTable, List<PlanFragment> fragments,
List<ScanNode> scanNodes, String timezone, boolean loadZeroTolerance) {
this.isBlockQuery = true;
this.jobId = jobId;
this.queryId = queryId;
this.descTable = descTable.toThrift();
this.fragments = fragments;
this.scanNodes = scanNodes;
this.queryOptions = new TQueryOptions();
this.queryGlobals.setNowString(TimeUtils.DATETIME_FORMAT.format(LocalDateTime.now()));
this.queryGlobals.setTimestampMs(System.currentTimeMillis());
this.queryGlobals.setTimeZone(timezone);
this.queryGlobals.setLoadZeroTolerance(loadZeroTolerance);
this.queryOptions.setBeExecVersion(Config.be_exec_version);
this.needReport = true;
this.nextInstanceId = new TUniqueId();
nextInstanceId.setHi(queryId.hi);
nextInstanceId.setLo(queryId.lo + 1);
this.executionProfile = new ExecutionProfile(queryId, fragments.size());
}
private void setFromUserProperty(ConnectContext connectContext) {
String qualifiedUser = connectContext.getQualifiedUser();
int cpuLimit = Env.getCurrentEnv().getAuth().getCpuResourceLimit(qualifiedUser);
if (cpuLimit > 0) {
TResourceLimit resourceLimit = new TResourceLimit();
resourceLimit.setCpuLimit(cpuLimit);
this.queryOptions.setResourceLimit(resourceLimit);
}
long maxExecMemByte = connectContext.getSessionVariable().getMaxExecMemByte();
long memLimit = maxExecMemByte > 0 ? maxExecMemByte :
Env.getCurrentEnv().getAuth().getExecMemLimit(qualifiedUser);
if (memLimit > 0) {
this.queryOptions.setMemLimit(memLimit);
this.queryOptions.setMaxReservation(memLimit);
this.queryOptions.setInitialReservationTotalClaims(memLimit);
this.queryOptions.setBufferPoolLimit(memLimit);
}
}
public long getJobId() {
return jobId;
}
public TUniqueId getQueryId() {
return queryId;
}
public int getScanRangeNum() {
return scanRangeNum;
}
public void setQueryId(TUniqueId queryId) {
this.queryId = queryId;
}
public void setQueryType(TQueryType type) {
this.queryOptions.setQueryType(type);
}
public void setExecPipEngine(boolean vec) {
this.queryOptions.setEnablePipelineEngine(vec);
}
public Status getExecStatus() {
return queryStatus;
}
public List<String> getDeltaUrls() {
return deltaUrls;
}
public Map<String, String> getLoadCounters() {
return loadCounters;
}
public String getTrackingUrl() {
return trackingUrl;
}
public void setExecMemoryLimit(long execMemoryLimit) {
this.queryOptions.setMemLimit(execMemoryLimit);
}
public void setLoadMemLimit(long loadMemLimit) {
this.queryOptions.setLoadMemLimit(loadMemLimit);
}
public void setTimeout(int timeout) {
this.queryOptions.setQueryTimeout(timeout);
this.queryOptions.setExecutionTimeout(timeout);
}
public void setLoadZeroTolerance(boolean loadZeroTolerance) {
this.queryGlobals.setLoadZeroTolerance(loadZeroTolerance);
}
public void clearExportStatus() {
lock.lock();
try {
this.backendExecStates.clear();
this.pipelineExecContexts.clear();
this.queryStatus.setStatus(new Status());
if (this.exportFiles == null) {
this.exportFiles = Lists.newArrayList();
}
this.exportFiles.clear();
this.needCheckBackendExecStates.clear();
this.needCheckPipelineExecContexts.clear();
} finally {
lock.unlock();
}
}
public List<TTabletCommitInfo> getCommitInfos() {
return commitInfos;
}
public List<TErrorTabletInfo> getErrorTabletInfos() {
return errorTabletInfos;
}
public Map<String, Integer> getBeToInstancesNum() {
Map<String, Integer> result = Maps.newTreeMap();
if (enablePipelineEngine) {
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
result.put(ctxs.brpcAddr.hostname.concat(":").concat("" + ctxs.brpcAddr.port),
ctxs.getInstanceNumber());
}
} else {
for (BackendExecStates states : beToExecStates.values()) {
result.put(states.brpcAddr.hostname.concat(":").concat("" + states.brpcAddr.port),
states.states.size());
}
}
return result;
}
@Override
public int getInstanceTotalNum() {
return instanceTotalNum;
}
private void prepare() {
for (PlanFragment fragment : fragments) {
fragmentExecParamsMap.put(fragment.getFragmentId(), new FragmentExecParams(fragment));
}
for (PlanFragment fragment : fragments) {
if (!(fragment.getSink() instanceof DataStreamSink)) {
continue;
}
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getDestFragment().getFragmentId());
params.inputFragments.add(fragment.getFragmentId());
}
coordAddress = new TNetworkAddress(localIP, Config.rpc_port);
this.idToBackend = Env.getCurrentSystemInfo().getIdToBackend();
if (LOG.isDebugEnabled()) {
LOG.debug("idToBackend size={}", idToBackend.size());
for (Map.Entry<Long, Backend> entry : idToBackend.entrySet()) {
Long backendID = entry.getKey();
Backend backend = entry.getValue();
LOG.debug("backend: {}-{}-{}", backendID, backend.getHost(), backend.getBePort());
}
}
}
private void lock() {
lock.lock();
}
private void unlock() {
lock.unlock();
}
private void traceInstance() {
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder();
int idx = 0;
sb.append("query id=").append(DebugUtil.printId(queryId)).append(",");
sb.append("fragment=[");
for (Map.Entry<PlanFragmentId, FragmentExecParams> entry : fragmentExecParamsMap.entrySet()) {
if (idx++ != 0) {
sb.append(",");
}
sb.append(entry.getKey());
entry.getValue().appendTo(sb);
}
sb.append("]");
LOG.debug(sb.toString());
}
}
@Override
public void exec() throws Exception {
if (LOG.isDebugEnabled() && !scanNodes.isEmpty()) {
LOG.debug("debug: in Coordinator::exec. query id: {}, planNode: {}",
DebugUtil.printId(queryId), scanNodes.get(0).treeToThrift());
}
if (LOG.isDebugEnabled() && !fragments.isEmpty()) {
LOG.debug("debug: in Coordinator::exec. query id: {}, fragment: {}",
DebugUtil.printId(queryId), fragments.get(0).toThrift());
}
prepare();
computeScanRangeAssignment();
computeFragmentExecParams();
traceInstance();
QeProcessorImpl.INSTANCE.registerInstances(queryId, instanceIds.size());
PlanFragmentId topId = fragments.get(0).getFragmentId();
FragmentExecParams topParams = fragmentExecParamsMap.get(topId);
DataSink topDataSink = topParams.fragment.getSink();
this.timeoutDeadline = System.currentTimeMillis() + queryOptions.getExecutionTimeout() * 1000L;
if (topDataSink instanceof ResultSink || topDataSink instanceof ResultFileSink) {
TNetworkAddress execBeAddr = topParams.instanceExecParams.get(0).host;
receiver = new ResultReceiver(queryId, topParams.instanceExecParams.get(0).instanceId,
addressToBackendID.get(execBeAddr), toBrpcHost(execBeAddr), this.timeoutDeadline,
this.maxMsgSizeOfResultReceiver);
if (LOG.isDebugEnabled()) {
LOG.debug("dispatch query job: {} to {}", DebugUtil.printId(queryId),
topParams.instanceExecParams.get(0).host);
}
if (topDataSink instanceof ResultFileSink
&& ((ResultFileSink) topDataSink).getStorageType() == StorageBackend.StorageType.BROKER) {
ResultFileSink topResultFileSink = (ResultFileSink) topDataSink;
FsBroker broker = Env.getCurrentEnv().getBrokerMgr()
.getBroker(topResultFileSink.getBrokerName(), execBeAddr.getHostname());
topResultFileSink.setBrokerAddr(broker.host, broker.port);
}
} else {
this.queryOptions.setIsReportSuccess(true);
deltaUrls = Lists.newArrayList();
loadCounters = Maps.newHashMap();
List<Long> relatedBackendIds = Lists.newArrayList(addressToBackendID.values());
Env.getCurrentEnv().getLoadManager().initJobProgress(jobId, queryId, instanceIds,
relatedBackendIds);
Env.getCurrentEnv().getProgressManager().addTotalScanNums(String.valueOf(jobId), scanRangeNum);
LOG.info("dispatch load job: {} to {}", DebugUtil.printId(queryId), addressToBackendID.keySet());
}
executionProfile.markInstances(instanceIds);
if (enablePipelineEngine) {
sendPipelineCtx();
} else {
sendFragment();
}
}
/**
* The logic for sending query plan fragments is as follows:
* First, plan fragments are dependent. According to the order in "fragments" list,
* it must be ensured that on the BE side, the next fragment instance can be executed
* only after the previous fragment instance is ready,
* <p>
* In the previous logic, we will send fragment instances in sequence through RPC,
* and will wait for the RPC of the previous fragment instance to return successfully
* before sending the next one. But for some complex queries, this may lead to too many RPCs.
* <p>
* The optimized logic is as follows:
* 1. If the number of fragment instance is <= 2, the original logic is still used
* to complete the sending of fragments through at most 2 RPCs.
* 2. If the number of fragment instance is >= 3, first group all fragments by BE,
* and send all fragment instances to the corresponding BE node through the FIRST rpc,
* but these fragment instances will only perform the preparation phase but will not be actually executed.
* After that, the execution logic of all fragment instances is started through the SECOND RPC.
* <p>
* After optimization, a query on a BE node will only send two RPCs at most.
* Thereby reducing the "send fragment timeout" error caused by too many RPCs and BE unable to process in time.
*
* @throws TException
* @throws RpcException
* @throws UserException
*/
private void sendFragment() throws TException, RpcException, UserException {
lock();
try {
Multiset<TNetworkAddress> hostCounter = HashMultiset.create();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
for (FInstanceExecParam fi : params.instanceExecParams) {
hostCounter.add(fi.host);
}
}
int backendIdx = 0;
int profileFragmentId = 0;
long memoryLimit = queryOptions.getMemLimit();
beToExecStates.clear();
boolean twoPhaseExecution = fragments.size() >= 2;
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
int instanceNum = params.instanceExecParams.size();
Preconditions.checkState(instanceNum > 0);
instanceTotalNum += instanceNum;
List<TExecPlanFragmentParams> tParams = params.toThrift(backendIdx);
if (colocateFragmentIds.contains(fragment.getFragmentId().asInt())) {
int rate = Math.min(Config.query_colocate_join_memory_limit_penalty_factor, instanceNum);
long newMemory = memoryLimit / rate;
for (TExecPlanFragmentParams tParam : tParams) {
tParam.query_options.setMemLimit(newMemory);
}
}
boolean needCheckBackendState = false;
if (queryOptions.getQueryType() == TQueryType.LOAD && profileFragmentId == 0) {
needCheckBackendState = true;
}
int instanceId = 0;
for (TExecPlanFragmentParams tParam : tParams) {
BackendExecState execState =
new BackendExecState(fragment.getFragmentId(), instanceId++,
profileFragmentId, tParam, this.addressToBackendID,
executionProfile.getLoadChannelProfile());
tParam.setFragmentNumOnHost(hostCounter.count(execState.address));
tParam.setBackendId(execState.backend.getId());
tParam.setNeedWaitExecutionTrigger(twoPhaseExecution);
backendExecStates.add(execState);
if (needCheckBackendState) {
needCheckBackendExecStates.add(execState);
if (LOG.isDebugEnabled()) {
LOG.debug("add need check backend {} for fragment, {} job: {}",
execState.backend.getId(), fragment.getFragmentId().asInt(), jobId);
}
}
BackendExecStates states = beToExecStates.get(execState.backend.getId());
if (states == null) {
states = new BackendExecStates(execState.backend.getId(), execState.brpcAddress,
twoPhaseExecution);
beToExecStates.putIfAbsent(execState.backend.getId(), states);
}
states.addState(execState);
++backendIdx;
}
profileFragmentId += 1;
}
List<Triple<BackendExecStates, BackendServiceProxy, Future<InternalService.PExecPlanFragmentResult>>>
futures = Lists.newArrayList();
Context parentSpanContext = Context.current();
for (BackendExecStates states : beToExecStates.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execRemoteFragmentsAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
states.scopedSpan = new ScopedSpan(span);
states.unsetFields();
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(states, proxy, states.execRemoteFragmentsAsync(proxy)));
}
waitRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send fragments");
if (twoPhaseExecution) {
futures.clear();
for (BackendExecStates states : beToExecStates.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execPlanFragmentStartAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
states.scopedSpan = new ScopedSpan(span);
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(states, proxy, states.execPlanFragmentStartAsync(proxy)));
}
waitRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send execution start");
}
attachInstanceProfileToFragmentProfile();
} finally {
unlock();
}
}
private void sendPipelineCtx() throws TException, RpcException, UserException {
lock();
try {
Multiset<TNetworkAddress> hostCounter = HashMultiset.create();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
for (FInstanceExecParam fi : params.instanceExecParams) {
hostCounter.add(fi.host);
}
}
int backendIdx = 0;
int profileFragmentId = 0;
beToPipelineExecCtxs.clear();
boolean twoPhaseExecution = fragments.size() >= 2;
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
int instanceNum = params.instanceExecParams.size();
Preconditions.checkState(instanceNum > 0);
Map<TNetworkAddress, TPipelineFragmentParams> tParams = params.toTPipelineParams(backendIdx);
boolean needCheckBackendState = false;
if (queryOptions.getQueryType() == TQueryType.LOAD && profileFragmentId == 0) {
needCheckBackendState = true;
}
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap = new HashMap<TUniqueId, RuntimeProfile>();
for (Map.Entry<TNetworkAddress, TPipelineFragmentParams> entry : tParams.entrySet()) {
for (TPipelineInstanceParams instanceParam : entry.getValue().local_params) {
String name = "Instance " + DebugUtil.printId(instanceParam.fragment_instance_id)
+ " (host=" + entry.getKey() + ")";
fragmentInstancesMap.put(instanceParam.fragment_instance_id, new RuntimeProfile(name));
}
}
for (Map.Entry<TNetworkAddress, TPipelineFragmentParams> entry : tParams.entrySet()) {
Long backendId = this.addressToBackendID.get(entry.getKey());
PipelineExecContext pipelineExecContext = new PipelineExecContext(fragment.getFragmentId(),
profileFragmentId, entry.getValue(), backendId, fragmentInstancesMap,
executionProfile.getLoadChannelProfile());
entry.getValue().setFragmentNumOnHost(hostCounter.count(pipelineExecContext.address));
entry.getValue().setBackendId(pipelineExecContext.backend.getId());
entry.getValue().setNeedWaitExecutionTrigger(twoPhaseExecution);
entry.getValue().setFragmentId(fragment.getFragmentId().asInt());
pipelineExecContexts.put(Pair.of(fragment.getFragmentId().asInt(), backendId), pipelineExecContext);
if (needCheckBackendState) {
needCheckPipelineExecContexts.add(pipelineExecContext);
if (LOG.isDebugEnabled()) {
LOG.debug("add need check backend {} for fragment, {} job: {}",
pipelineExecContext.backend.getId(), fragment.getFragmentId().asInt(), jobId);
}
}
PipelineExecContexts ctxs = beToPipelineExecCtxs.get(pipelineExecContext.backend.getId());
if (ctxs == null) {
ctxs = new PipelineExecContexts(pipelineExecContext.backend.getId(),
pipelineExecContext.brpcAddress, twoPhaseExecution,
entry.getValue().getFragmentNumOnHost());
beToPipelineExecCtxs.putIfAbsent(pipelineExecContext.backend.getId(), ctxs);
}
ctxs.addContext(pipelineExecContext);
++backendIdx;
}
profileFragmentId += 1;
}
List<Triple<PipelineExecContexts, BackendServiceProxy, Future<InternalService.PExecPlanFragmentResult>>>
futures = Lists.newArrayList();
Context parentSpanContext = Context.current();
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execRemoteFragmentsAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
ctxs.scopedSpan = new ScopedSpan(span);
ctxs.unsetFields();
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(ctxs, proxy, ctxs.execRemoteFragmentsAsync(proxy)));
}
waitPipelineRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send fragments");
if (twoPhaseExecution) {
futures.clear();
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execPlanFragmentStartAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
ctxs.scopedSpan = new ScopedSpan(span);
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(ctxs, proxy, ctxs.execPlanFragmentStartAsync(proxy)));
}
waitPipelineRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send execution start");
}
attachInstanceProfileToFragmentProfile();
} finally {
unlock();
}
}
private void waitRpc(List<Triple<BackendExecStates, BackendServiceProxy, Future<PExecPlanFragmentResult>>> futures,
long leftTimeMs,
String operation) throws RpcException, UserException {
if (leftTimeMs <= 0) {
long elapsed = (System.currentTimeMillis() - timeoutDeadline) / 1000 + queryOptions.getExecutionTimeout();
String msg = String.format(
"timeout before waiting %s rpc, query timeout:%d, already elapsed:%d, left for this:%d",
operation, queryOptions.getExecutionTimeout(), elapsed, leftTimeMs);
LOG.warn("Query {} {}", DebugUtil.printId(queryId), msg);
throw new UserException(msg);
}
long timeoutMs = Math.min(leftTimeMs, Config.remote_fragment_exec_timeout_ms);
for (Triple<BackendExecStates, BackendServiceProxy, Future<PExecPlanFragmentResult>> triple : futures) {
TStatusCode code;
String errMsg = null;
Exception exception = null;
Span span = triple.getLeft().scopedSpan.getSpan();
try {
PExecPlanFragmentResult result = triple.getRight().get(timeoutMs, TimeUnit.MILLISECONDS);
code = TStatusCode.findByValue(result.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
if (!result.getStatus().getErrorMsgsList().isEmpty()) {
errMsg = result.getStatus().getErrorMsgsList().get(0);
} else {
errMsg = operation + " failed. backend id: " + triple.getLeft().beId;
}
}
} catch (ExecutionException e) {
exception = e;
code = TStatusCode.THRIFT_RPC_ERROR;
triple.getMiddle().removeProxy(triple.getLeft().brpcAddr);
} catch (InterruptedException e) {
exception = e;
code = TStatusCode.INTERNAL_ERROR;
} catch (TimeoutException e) {
exception = e;
errMsg = String.format(
"timeout when waiting for %s rpc, query timeout:%d, left timeout for this operation:%d",
operation, queryOptions.getExecutionTimeout(), timeoutMs / 1000);
LOG.warn("Query {} {}", DebugUtil.printId(queryId), errMsg);
code = TStatusCode.TIMEOUT;
}
try {
if (code != TStatusCode.OK) {
if (exception != null && errMsg == null) {
errMsg = operation + " failed. " + exception.getMessage();
}
queryStatus.setStatus(errMsg);
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
switch (code) {
case TIMEOUT:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
case THRIFT_RPC_ERROR:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
SimpleScheduler.addToBlacklist(triple.getLeft().beId, errMsg);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
default:
throw new UserException(errMsg, exception);
}
}
} catch (Exception e) {
span.recordException(e);
throw e;
} finally {
triple.getLeft().scopedSpan.endSpan();
}
}
}
private void waitPipelineRpc(List<Triple<PipelineExecContexts, BackendServiceProxy,
Future<PExecPlanFragmentResult>>> futures, long leftTimeMs,
String operation) throws RpcException, UserException {
if (leftTimeMs <= 0) {
long elapsed = (System.currentTimeMillis() - timeoutDeadline) / 1000 + queryOptions.getExecutionTimeout();
String msg = String.format(
"timeout before waiting %s rpc, query timeout:%d, already elapsed:%d, left for this:%d",
operation, queryOptions.getExecutionTimeout(), elapsed, leftTimeMs);
LOG.warn("Query {} {}", DebugUtil.printId(queryId), msg);
throw new UserException(msg);
}
long timeoutMs = Math.min(leftTimeMs, Config.remote_fragment_exec_timeout_ms);
for (Triple<PipelineExecContexts, BackendServiceProxy, Future<PExecPlanFragmentResult>> triple : futures) {
TStatusCode code;
String errMsg = null;
Exception exception = null;
Span span = triple.getLeft().scopedSpan.getSpan();
try {
PExecPlanFragmentResult result = triple.getRight().get(timeoutMs, TimeUnit.MILLISECONDS);
code = TStatusCode.findByValue(result.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
if (!result.getStatus().getErrorMsgsList().isEmpty()) {
errMsg = result.getStatus().getErrorMsgsList().get(0);
} else {
errMsg = operation + " failed. backend id: " + triple.getLeft().beId;
}
}
} catch (ExecutionException e) {
exception = e;
code = TStatusCode.THRIFT_RPC_ERROR;
triple.getMiddle().removeProxy(triple.getLeft().brpcAddr);
} catch (InterruptedException e) {
exception = e;
code = TStatusCode.INTERNAL_ERROR;
} catch (TimeoutException e) {
exception = e;
errMsg = String.format(
"timeout when waiting for %s rpc, query timeout:%d, left timeout for this operation:%d",
operation, queryOptions.getExecutionTimeout(), timeoutMs / 1000);
LOG.warn("Query {} {}", DebugUtil.printId(queryId), errMsg);
code = TStatusCode.TIMEOUT;
}
try {
if (code != TStatusCode.OK) {
if (exception != null && errMsg == null) {
errMsg = operation + " failed. " + exception.getMessage();
}
queryStatus.setStatus(errMsg);
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
switch (code) {
case TIMEOUT:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
case THRIFT_RPC_ERROR:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
SimpleScheduler.addToBlacklist(triple.getLeft().beId, errMsg);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
default:
throw new UserException(errMsg, exception);
}
}
} catch (Exception e) {
span.recordException(e);
throw e;
} finally {
triple.getLeft().scopedSpan.endSpan();
}
}
}
public List<String> getExportFiles() {
return exportFiles;
}
void updateExportFiles(List<String> files) {
lock.lock();
try {
if (exportFiles == null) {
exportFiles = Lists.newArrayList();
}
exportFiles.addAll(files);
} finally {
lock.unlock();
}
}
void updateDeltas(List<String> urls) {
lock.lock();
try {
deltaUrls.addAll(urls);
} finally {
lock.unlock();
}
}
private void updateLoadCounters(Map<String, String> newLoadCounters) {
lock.lock();
try {
long numRowsNormal = 0L;
String value = this.loadCounters.get(LoadEtlTask.DPP_NORMAL_ALL);
if (value != null) {
numRowsNormal = Long.parseLong(value);
}
long numRowsAbnormal = 0L;
value = this.loadCounters.get(LoadEtlTask.DPP_ABNORMAL_ALL);
if (value != null) {
numRowsAbnormal = Long.parseLong(value);
}
long numRowsUnselected = 0L;
value = this.loadCounters.get(LoadJob.UNSELECTED_ROWS);
if (value != null) {
numRowsUnselected = Long.parseLong(value);
}
value = newLoadCounters.get(LoadEtlTask.DPP_NORMAL_ALL);
if (value != null) {
numRowsNormal += Long.parseLong(value);
}
value = newLoadCounters.get(LoadEtlTask.DPP_ABNORMAL_ALL);
if (value != null) {
numRowsAbnormal += Long.parseLong(value);
}
value = newLoadCounters.get(LoadJob.UNSELECTED_ROWS);
if (value != null) {
numRowsUnselected += Long.parseLong(value);
}
this.loadCounters.put(LoadEtlTask.DPP_NORMAL_ALL, "" + numRowsNormal);
this.loadCounters.put(LoadEtlTask.DPP_ABNORMAL_ALL, "" + numRowsAbnormal);
this.loadCounters.put(LoadJob.UNSELECTED_ROWS, "" + numRowsUnselected);
} finally {
lock.unlock();
}
}
private void updateCommitInfos(List<TTabletCommitInfo> commitInfos) {
lock.lock();
try {
this.commitInfos.addAll(commitInfos);
} finally {
lock.unlock();
}
}
private void updateErrorTabletInfos(List<TErrorTabletInfo> errorTabletInfos) {
lock.lock();
try {
if (this.errorTabletInfos.size() <= Config.max_error_tablet_of_broker_load) {
this.errorTabletInfos.addAll(errorTabletInfos.stream().limit(Config.max_error_tablet_of_broker_load
- this.errorTabletInfos.size()).collect(Collectors.toList()));
}
} finally {
lock.unlock();
}
}
private void updateStatus(Status status, TUniqueId instanceId) {
lock.lock();
try {
if (returnedAllResults && status.isCancelled()) {
return;
}
if (status.ok()) {
return;
}
if (!queryStatus.ok()) {
return;
}
queryStatus.setStatus(status);
LOG.warn("one instance report fail throw updateStatus(), need cancel. job id: {},"
+ " query id: {}, instance id: {}, error message: {}",
jobId, DebugUtil.printId(queryId), instanceId != null ? DebugUtil.printId(instanceId) : "NaN",
status.getErrorMsg());
if (status.getErrorCode() == TStatusCode.TIMEOUT) {
cancelInternal(Types.PPlanFragmentCancelReason.TIMEOUT);
} else {
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
}
} finally {
lock.unlock();
}
}
@Override
public RowBatch getNext() throws Exception {
if (receiver == null) {
throw new UserException("There is no receiver.");
}
RowBatch resultBatch;
Status status = new Status();
resultBatch = receiver.getNext(status);
if (!status.ok()) {
LOG.warn("Query {} coordinator get next fail, {}, need cancel.",
DebugUtil.printId(queryId), status.toString());
}
updateStatus(status, null /* no instance id */);
Status copyStatus = null;
lock();
try {
copyStatus = new Status(queryStatus);
} finally {
unlock();
}
if (!copyStatus.ok()) {
if (Strings.isNullOrEmpty(copyStatus.getErrorMsg())) {
copyStatus.rewriteErrorMsg();
}
if (copyStatus.isRpcError()) {
throw new RpcException(null, copyStatus.getErrorMsg());
} else {
String errMsg = copyStatus.getErrorMsg();
LOG.warn("Query {} failed: {}", DebugUtil.printId(queryId), errMsg);
int hostIndex = errMsg.indexOf("host");
if (hostIndex != -1) {
errMsg = errMsg.substring(0, hostIndex);
}
throw new UserException(errMsg);
}
}
if (resultBatch.isEos()) {
this.returnedAllResults = true;
Long numLimitRows = fragments.get(0).getPlanRoot().getLimit();
boolean hasLimit = numLimitRows > 0;
if (!isBlockQuery && instanceIds.size() > 1 && hasLimit && numReceivedRows >= numLimitRows) {
LOG.debug("no block query, return num >= limit rows, need cancel");
cancelInternal(Types.PPlanFragmentCancelReason.LIMIT_REACH);
}
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable().dryRunQuery) {
numReceivedRows = 0;
numReceivedRows += resultBatch.getQueryStatistics().getReturnedRows();
}
} else if (resultBatch.getBatch() != null) {
numReceivedRows += resultBatch.getBatch().getRowsSize();
}
return resultBatch;
}
public void cancel() {
cancel(Types.PPlanFragmentCancelReason.USER_CANCEL);
}
@Override
public void cancel(Types.PPlanFragmentCancelReason cancelReason) {
lock();
try {
if (!queryStatus.ok()) {
return;
} else {
queryStatus.setStatus(Status.CANCELLED);
}
LOG.warn("cancel execution of query, this is outside invoke");
cancelInternal(cancelReason);
} finally {
unlock();
}
}
private void cancelInternal(Types.PPlanFragmentCancelReason cancelReason) {
if (null != receiver) {
receiver.cancel(cancelReason.toString());
}
if (null != pointExec) {
pointExec.cancel();
return;
}
cancelRemoteFragmentsAsync(cancelReason);
executionProfile.onCancel();
}
private void cancelRemoteFragmentsAsync(Types.PPlanFragmentCancelReason cancelReason) {
if (enablePipelineEngine) {
for (PipelineExecContext ctx : pipelineExecContexts.values()) {
ctx.cancelFragmentInstance(cancelReason);
}
} else {
for (BackendExecState backendExecState : backendExecStates) {
backendExecState.cancelFragmentInstance(cancelReason);
}
}
}
private void computeFragmentExecParams() throws Exception {
computeFragmentHosts();
instanceIds.clear();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (LOG.isDebugEnabled()) {
LOG.debug("fragment {} has instances {}",
params.fragment.getFragmentId(), params.instanceExecParams.size());
}
for (int j = 0; j < params.instanceExecParams.size(); ++j) {
TUniqueId instanceId = new TUniqueId();
instanceId.setHi(queryId.hi);
instanceId.setLo(queryId.lo + instanceIds.size() + 1);
params.instanceExecParams.get(j).instanceId = instanceId;
instanceIds.add(instanceId);
}
}
computeMultiCastFragmentParams();
assignRuntimeFilterAddr();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (params.fragment instanceof MultiCastPlanFragment) {
continue;
}
PlanFragment destFragment = params.fragment.getDestFragment();
if (destFragment == null) {
continue;
}
FragmentExecParams destParams = fragmentExecParamsMap.get(destFragment.getFragmentId());
DataSink sink = params.fragment.getSink();
PlanNodeId exchId = sink.getExchNodeId();
PlanNode exchNode = PlanNode.findPlanNodeFromPlanNodeId(destFragment.getPlanRoot(), exchId);
Preconditions.checkState(exchNode != null, "exchNode is null");
Preconditions.checkState(exchNode instanceof ExchangeNode,
"exchNode is not ExchangeNode" + exchNode.getId().toString());
if (destParams.perExchNumSenders.get(exchId.asInt()) == null) {
destParams.perExchNumSenders.put(exchId.asInt(), params.instanceExecParams.size());
} else {
destParams.perExchNumSenders.put(exchId.asInt(),
params.instanceExecParams.size() + destParams.perExchNumSenders.get(exchId.asInt()));
}
if (sink.getOutputPartition() != null
&& sink.getOutputPartition().isBucketShuffleHashPartition()) {
Preconditions.checkState(bucketShuffleJoinController
.isBucketShuffleJoin(destFragment.getFragmentId().asInt()), "Sink is"
+ "Bucket Shuffle Partition, The destFragment must have bucket shuffle join node ");
int bucketSeq = 0;
int bucketNum = bucketShuffleJoinController.getFragmentBucketNum(destFragment.getFragmentId());
if (destParams.instanceExecParams.size() == 1 && (bucketNum == 0
|| destParams.instanceExecParams.get(0).bucketSeqSet.isEmpty())) {
bucketNum = 1;
destParams.instanceExecParams.get(0).bucketSeqSet.add(0);
}
TNetworkAddress dummyServer = new TNetworkAddress("0.0.0.0", 0);
while (bucketSeq < bucketNum) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = new TUniqueId(-1, -1);
dest.server = dummyServer;
dest.setBrpcServer(dummyServer);
for (FInstanceExecParam instanceExecParams : destParams.instanceExecParams) {
if (instanceExecParams.bucketSeqSet.contains(bucketSeq)) {
dest.fragment_instance_id = instanceExecParams.instanceId;
dest.server = toRpcHost(instanceExecParams.host);
dest.setBrpcServer(toBrpcHost(instanceExecParams.host));
break;
}
}
bucketSeq++;
params.destinations.add(dest);
}
} else {
if (enablePipelineEngine && enableShareHashTableForBroadcastJoin
&& ((ExchangeNode) exchNode).isRightChildOfBroadcastHashJoin()) {
Map<TNetworkAddress, FInstanceExecParam> destHosts = new HashMap<>();
destParams.instanceExecParams.forEach(param -> {
if (destHosts.containsKey(param.host)) {
destHosts.get(param.host).instancesSharingHashTable.add(param.instanceId);
} else {
destHosts.put(param.host, param);
param.buildHashTableForBroadcastJoin = true;
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = param.instanceId;
try {
dest.server = toRpcHost(param.host);
dest.setBrpcServer(toBrpcHost(param.host));
} catch (Exception e) {
throw new RuntimeException(e);
}
params.destinations.add(dest);
}
});
} else {
for (int j = 0; j < destParams.instanceExecParams.size(); ++j) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = destParams.instanceExecParams.get(j).instanceId;
dest.server = toRpcHost(destParams.instanceExecParams.get(j).host);
dest.setBrpcServer(toBrpcHost(destParams.instanceExecParams.get(j).host));
params.destinations.add(dest);
}
}
}
}
}
private void computeMultiCastFragmentParams() throws Exception {
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (!(params.fragment instanceof MultiCastPlanFragment)) {
continue;
}
MultiCastPlanFragment multi = (MultiCastPlanFragment) params.fragment;
Preconditions.checkState(multi.getSink() instanceof MultiCastDataSink);
MultiCastDataSink multiSink = (MultiCastDataSink) multi.getSink();
for (int i = 0; i < multi.getDestFragmentList().size(); i++) {
PlanFragment destFragment = multi.getDestFragmentList().get(i);
DataStreamSink sink = multiSink.getDataStreamSinks().get(i);
if (destFragment == null) {
continue;
}
FragmentExecParams destParams = fragmentExecParamsMap.get(destFragment.getFragmentId());
multi.getDestFragmentList().get(i).setOutputPartition(params.fragment.getOutputPartition());
PlanNodeId exchId = sink.getExchNodeId();
PlanNode exchNode = PlanNode.findPlanNodeFromPlanNodeId(destFragment.getPlanRoot(), exchId);
Preconditions.checkState(!destParams.perExchNumSenders.containsKey(exchId.asInt()));
Preconditions.checkState(exchNode != null, "exchNode is null");
Preconditions.checkState(exchNode instanceof ExchangeNode,
"exchNode is not ExchangeNode" + exchNode.getId().toString());
if (destParams.perExchNumSenders.get(exchId.asInt()) == null) {
destParams.perExchNumSenders.put(exchId.asInt(), params.instanceExecParams.size());
} else {
destParams.perExchNumSenders.put(exchId.asInt(),
params.instanceExecParams.size() + destParams.perExchNumSenders.get(exchId.asInt()));
}
List<TPlanFragmentDestination> destinations = multiSink.getDestinations().get(i);
if (sink.getOutputPartition() != null
&& sink.getOutputPartition().isBucketShuffleHashPartition()) {
Preconditions.checkState(bucketShuffleJoinController
.isBucketShuffleJoin(destFragment.getFragmentId().asInt()), "Sink is"
+ "Bucket Shuffle Partition, The destFragment must have bucket shuffle join node ");
int bucketSeq = 0;
int bucketNum = bucketShuffleJoinController.getFragmentBucketNum(destFragment.getFragmentId());
if (destParams.instanceExecParams.size() == 1 && (bucketNum == 0
|| destParams.instanceExecParams.get(0).bucketSeqSet.isEmpty())) {
bucketNum = 1;
destParams.instanceExecParams.get(0).bucketSeqSet.add(0);
}
TNetworkAddress dummyServer = new TNetworkAddress("0.0.0.0", 0);
while (bucketSeq < bucketNum) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = new TUniqueId(-1, -1);
dest.server = dummyServer;
dest.setBrpcServer(dummyServer);
for (FInstanceExecParam instanceExecParams : destParams.instanceExecParams) {
if (instanceExecParams.bucketSeqSet.contains(bucketSeq)) {
dest.fragment_instance_id = instanceExecParams.instanceId;
dest.server = toRpcHost(instanceExecParams.host);
dest.setBrpcServer(toBrpcHost(instanceExecParams.host));
break;
}
}
bucketSeq++;
destinations.add(dest);
}
} else if (enablePipelineEngine && enableShareHashTableForBroadcastJoin
&& ((ExchangeNode) exchNode).isRightChildOfBroadcastHashJoin()) {
Map<TNetworkAddress, FInstanceExecParam> destHosts = new HashMap<>();
destParams.instanceExecParams.forEach(param -> {
if (destHosts.containsKey(param.host)) {
destHosts.get(param.host).instancesSharingHashTable.add(param.instanceId);
} else {
destHosts.put(param.host, param);
param.buildHashTableForBroadcastJoin = true;
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = param.instanceId;
try {
dest.server = toRpcHost(param.host);
dest.setBrpcServer(toBrpcHost(param.host));
} catch (Exception e) {
throw new RuntimeException(e);
}
destinations.add(dest);
}
});
} else {
for (int j = 0; j < destParams.instanceExecParams.size(); ++j) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = destParams.instanceExecParams.get(j).instanceId;
dest.server = toRpcHost(destParams.instanceExecParams.get(j).host);
dest.brpc_server = toBrpcHost(destParams.instanceExecParams.get(j).host);
destinations.add(dest);
}
}
}
}
}
private TNetworkAddress toRpcHost(TNetworkAddress host) throws Exception {
Backend backend = Env.getCurrentSystemInfo().getBackendWithBePort(
host.getHostname(), host.getPort());
if (backend == null) {
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
TNetworkAddress dest = new TNetworkAddress(backend.getHost(), backend.getBeRpcPort());
return dest;
}
private TNetworkAddress toBrpcHost(TNetworkAddress host) throws Exception {
Backend backend = Env.getCurrentSystemInfo().getBackendWithBePort(
host.getHostname(), host.getPort());
if (backend == null) {
throw new UserException(SystemInfoService.NO_BACKEND_LOAD_AVAILABLE_MSG);
}
if (backend.getBrpcPort() < 0) {
return null;
}
return new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
}
private boolean containsUnionNode(PlanNode node) {
if (node instanceof UnionNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof UnionNode) {
return true;
} else {
return containsUnionNode(child);
}
}
return false;
}
private boolean containsIntersectNode(PlanNode node) {
if (node instanceof IntersectNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof IntersectNode) {
return true;
} else {
return containsIntersectNode(child);
}
}
return false;
}
private boolean containsExceptNode(PlanNode node) {
if (node instanceof ExceptNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof ExceptNode) {
return true;
} else {
return containsExceptNode(child);
}
}
return false;
}
private boolean containsSetOperationNode(PlanNode node) {
if (node instanceof SetOperationNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof SetOperationNode) {
return true;
} else {
return containsSetOperationNode(child);
}
}
return false;
}
private void computeFragmentHosts() throws Exception {
for (int i = fragments.size() - 1; i >= 0; --i) {
PlanFragment fragment = fragments.get(i);
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
if (fragment.getDataPartition() == DataPartition.UNPARTITIONED) {
Reference<Long> backendIdRef = new Reference<Long>();
TNetworkAddress execHostport;
if (((ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()) || (isAllExternalScan
&& Config.prefer_compute_node_for_external_table)) && !addressToBackendID.isEmpty()) {
execHostport = SimpleScheduler.getHostByCurrentBackend(addressToBackendID);
} else {
execHostport = SimpleScheduler.getHost(this.idToBackend, backendIdRef);
}
if (execHostport == null) {
LOG.warn("DataPartition UNPARTITIONED, no scanNode Backend available");
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
if (backendIdRef.getRef() != null) {
this.addressToBackendID.put(execHostport, backendIdRef.getRef());
}
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport,
0, params);
params.instanceExecParams.add(instanceParam);
continue;
}
Pair<PlanNode, PlanNode> pairNodes = findLeftmostNode(fragment.getPlanRoot());
PlanNode fatherNode = pairNodes.first;
PlanNode leftMostNode = pairNodes.second;
/*
* Case A:
* if the left most is ScanNode, which means there is no child fragment,
* we should assign fragment instances on every scan node hosts.
* Case B:
* if not, there should be exchange nodes to collect all data from child fragments(input fragments),
* so we should assign fragment instances corresponding to the child fragments' host
*/
if (!(leftMostNode instanceof ScanNode)) {
int inputFragmentIndex = 0;
int maxParallelism = 0;
int childrenCount = (fatherNode != null) ? fatherNode.getChildren().size() : 1;
for (int j = 0; j < childrenCount; j++) {
int currentChildFragmentParallelism
= fragmentExecParamsMap.get(fragment.getChild(j).getFragmentId()).instanceExecParams.size();
if (currentChildFragmentParallelism > maxParallelism) {
maxParallelism = currentChildFragmentParallelism;
inputFragmentIndex = j;
}
}
PlanFragmentId inputFragmentId = fragment.getChild(inputFragmentIndex).getFragmentId();
int exchangeInstances = -1;
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable() != null) {
exchangeInstances = ConnectContext.get().getSessionVariable().getExchangeInstanceParallel();
}
if (leftMostNode.getNumInstances() == 1) {
exchangeInstances = 1;
}
if (exchangeInstances > 0 && fragmentExecParamsMap.get(inputFragmentId)
.instanceExecParams.size() > exchangeInstances) {
Set<TNetworkAddress> hostSet = Sets.newHashSet();
for (FInstanceExecParam execParams :
fragmentExecParamsMap.get(inputFragmentId).instanceExecParams) {
hostSet.add(execParams.host);
}
List<TNetworkAddress> hosts = Lists.newArrayList(hostSet);
Collections.shuffle(hosts, instanceRandom);
for (int index = 0; index < exchangeInstances; index++) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null,
hosts.get(index % hosts.size()), 0, params);
params.instanceExecParams.add(instanceParam);
}
} else {
for (FInstanceExecParam execParams
: fragmentExecParamsMap.get(inputFragmentId).instanceExecParams) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execParams.host, 0, params);
params.instanceExecParams.add(instanceParam);
}
}
Collections.shuffle(params.instanceExecParams, instanceRandom);
continue;
}
int parallelExecInstanceNum = fragment.getParallelExecNum();
if ((isColocateFragment(fragment, fragment.getPlanRoot())
&& fragmentIdToSeqToAddressMap.containsKey(fragment.getFragmentId())
&& fragmentIdToSeqToAddressMap.get(fragment.getFragmentId()).size() > 0)) {
computeColocateJoinInstanceParam(fragment.getFragmentId(), parallelExecInstanceNum, params);
} else if (bucketShuffleJoinController.isBucketShuffleJoin(fragment.getFragmentId().asInt())) {
bucketShuffleJoinController.computeInstanceParam(fragment.getFragmentId(),
parallelExecInstanceNum, params);
} else {
for (Entry<TNetworkAddress, Map<Integer, List<TScanRangeParams>>> entry : fragmentExecParamsMap.get(
fragment.getFragmentId()).scanRangeAssignment.entrySet()) {
TNetworkAddress key = entry.getKey();
Map<Integer, List<TScanRangeParams>> value = entry.getValue();
for (Integer planNodeId : value.keySet()) {
List<TScanRangeParams> perNodeScanRanges = value.get(planNodeId);
List<List<TScanRangeParams>> perInstanceScanRanges = Lists.newArrayList();
List<Boolean> sharedScanOpts = Lists.newArrayList();
Optional<ScanNode> node = scanNodes.stream().filter(scanNode -> {
return scanNode.getId().asInt() == planNodeId;
}).findFirst();
if (!enablePipelineEngine || (node.isPresent() && node.get().getShouldColoScan())
|| (node.isPresent() && node.get() instanceof FileScanNode)
|| (node.isPresent() && node.get().shouldDisableSharedScan())) {
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(perNodeScanRanges.size(), parallelExecInstanceNum);
}
if (node.isPresent() && node.get().shouldUseOneInstance(ConnectContext.get())) {
expectedInstanceNum = 1;
}
perInstanceScanRanges = ListUtil.splitBySize(perNodeScanRanges,
expectedInstanceNum);
sharedScanOpts = Collections.nCopies(perInstanceScanRanges.size(), false);
} else {
int expectedInstanceNum = Math.min(parallelExecInstanceNum,
leftMostNode.getNumInstances());
expectedInstanceNum = Math.max(expectedInstanceNum, 1);
if (node.isPresent() && node.get().shouldUseOneInstance(ConnectContext.get())) {
expectedInstanceNum = 1;
}
perInstanceScanRanges = Collections.nCopies(expectedInstanceNum, perNodeScanRanges);
sharedScanOpts = Collections.nCopies(perInstanceScanRanges.size(), true);
}
LOG.debug("scan range number per instance is: {}", perInstanceScanRanges.size());
for (int j = 0; j < perInstanceScanRanges.size(); j++) {
List<TScanRangeParams> scanRangeParams = perInstanceScanRanges.get(j);
boolean sharedScan = sharedScanOpts.get(j);
FInstanceExecParam instanceParam = new FInstanceExecParam(null, key, 0, params);
instanceParam.perNodeScanRanges.put(planNodeId, scanRangeParams);
instanceParam.perNodeSharedScans.put(planNodeId, sharedScan);
params.instanceExecParams.add(instanceParam);
}
}
}
}
if (params.instanceExecParams.isEmpty()) {
Reference<Long> backendIdRef = new Reference<Long>();
TNetworkAddress execHostport;
if (ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()
&& !addressToBackendID.isEmpty()) {
execHostport = SimpleScheduler.getHostByCurrentBackend(addressToBackendID);
} else {
execHostport = SimpleScheduler.getHost(this.idToBackend, backendIdRef);
}
if (execHostport == null) {
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
if (backendIdRef.getRef() != null) {
this.addressToBackendID.put(execHostport, backendIdRef.getRef());
}
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport, 0, params);
params.instanceExecParams.add(instanceParam);
}
}
}
private void assignRuntimeFilterAddr() throws Exception {
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
for (RuntimeFilterId rid : fragment.getTargetRuntimeFilterIds()) {
List<FRuntimeFilterTargetParam> targetFragments = ridToTargetParam.computeIfAbsent(rid,
k -> new ArrayList<>());
for (final FInstanceExecParam instance : params.instanceExecParams) {
targetFragments.add(new FRuntimeFilterTargetParam(instance.instanceId, toBrpcHost(instance.host)));
}
}
for (RuntimeFilterId rid : fragment.getBuilderRuntimeFilterIds()) {
ridToBuilderNum.merge(rid, params.instanceExecParams.size(), Integer::sum);
}
}
FragmentExecParams uppermostParams = fragmentExecParamsMap.get(fragments.get(0).getFragmentId());
runtimeFilterMergeAddr = toBrpcHost(uppermostParams.instanceExecParams.get(0).host);
runtimeFilterMergeInstanceId = uppermostParams.instanceExecParams.get(0).instanceId;
}
private boolean isColocateFragment(PlanFragment planFragment, PlanNode node) {
if (ConnectContext.get() != null) {
if (ConnectContext.get().getSessionVariable().isDisableColocatePlan()) {
return false;
}
}
if (colocateFragmentIds.contains(node.getFragmentId().asInt())) {
return true;
}
if (planFragment.hasColocatePlanNode()) {
colocateFragmentIds.add(planFragment.getId().asInt());
return true;
}
return false;
}
private Pair<PlanNode, PlanNode> findLeftmostNode(PlanNode plan) {
PlanNode newPlan = plan;
PlanNode fatherPlan = null;
while (newPlan.getChildren().size() != 0 && !(newPlan instanceof ExchangeNode)) {
fatherPlan = newPlan;
newPlan = newPlan.getChild(0);
}
return Pair.of(fatherPlan, newPlan);
}
private <K, V> V findOrInsert(Map<K, V> m, final K key, final V defaultVal) {
V value = m.get(key);
if (value == null) {
m.put(key, defaultVal);
value = defaultVal;
}
return value;
}
private List<TScanRangeParams> findOrInsert(Map<Integer, List<TScanRangeParams>> m, Integer key,
ArrayList<TScanRangeParams> defaultVal) {
List<TScanRangeParams> value = m.get(key);
if (value == null) {
m.put(key, defaultVal);
value = defaultVal;
}
return value;
}
private void computeColocateJoinInstanceParam(PlanFragmentId fragmentId,
int parallelExecInstanceNum, FragmentExecParams params) {
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(fragmentId);
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdTobucketSeqToScanRangeMap.get(fragmentId);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.get(fragmentId);
Map<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressToScanRanges
= Maps.newHashMap();
for (Map.Entry<Integer, Map<Integer, List<TScanRangeParams>>> scanRanges : bucketSeqToScanRange.entrySet()) {
TNetworkAddress address = bucketSeqToAddress.get(scanRanges.getKey());
Map<Integer, List<TScanRangeParams>> nodeScanRanges = scanRanges.getValue();
Map<Integer, List<TScanRangeParams>> filteredNodeScanRanges = Maps.newHashMap();
for (Integer scanNodeId : nodeScanRanges.keySet()) {
if (scanNodeIds.contains(scanNodeId)) {
filteredNodeScanRanges.put(scanNodeId, nodeScanRanges.get(scanNodeId));
}
}
Pair<Integer, Map<Integer, List<TScanRangeParams>>> filteredScanRanges
= Pair.of(scanRanges.getKey(), filteredNodeScanRanges);
if (!addressToScanRanges.containsKey(address)) {
addressToScanRanges.put(address, Lists.newArrayList());
}
addressToScanRanges.get(address).add(filteredScanRanges);
}
FragmentScanRangeAssignment assignment = params.scanRangeAssignment;
for (Map.Entry<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressScanRange
: addressToScanRanges.entrySet()) {
List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> scanRange = addressScanRange.getValue();
Map<Integer, List<TScanRangeParams>> range
= findOrInsert(assignment, addressScanRange.getKey(), new HashMap<>());
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(scanRange.size(), parallelExecInstanceNum);
}
List<List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> perInstanceScanRanges
= ListUtil.splitBySize(scanRange, expectedInstanceNum);
for (List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> perInstanceScanRange
: perInstanceScanRanges) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null, addressScanRange.getKey(), 0, params);
for (Pair<Integer, Map<Integer, List<TScanRangeParams>>> nodeScanRangeMap : perInstanceScanRange) {
instanceParam.bucketSeqSet.add(nodeScanRangeMap.first);
for (Map.Entry<Integer, List<TScanRangeParams>> nodeScanRange
: nodeScanRangeMap.second.entrySet()) {
if (!instanceParam.perNodeScanRanges.containsKey(nodeScanRange.getKey())) {
range.put(nodeScanRange.getKey(), Lists.newArrayList());
instanceParam.perNodeScanRanges.put(nodeScanRange.getKey(), Lists.newArrayList());
}
range.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
instanceParam.perNodeScanRanges.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
}
}
params.instanceExecParams.add(instanceParam);
}
}
}
private Map<TNetworkAddress, Long> getReplicaNumPerHostForOlapTable() {
Map<TNetworkAddress, Long> replicaNumPerHost = Maps.newHashMap();
for (ScanNode scanNode : scanNodes) {
List<TScanRangeLocations> locationsList = scanNode.getScanRangeLocations(0);
for (TScanRangeLocations locations : locationsList) {
for (TScanRangeLocation location : locations.locations) {
if (replicaNumPerHost.containsKey(location.server)) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) + 1L);
} else {
replicaNumPerHost.put(location.server, 1L);
}
}
}
}
return replicaNumPerHost;
}
private void computeScanRangeAssignment() throws Exception {
Map<TNetworkAddress, Long> assignedBytesPerHost = Maps.newHashMap();
Map<TNetworkAddress, Long> replicaNumPerHost = getReplicaNumPerHostForOlapTable();
Collections.shuffle(scanNodes);
for (ScanNode scanNode : scanNodes) {
if (!(scanNode instanceof ExternalScanNode)) {
isAllExternalScan = false;
}
List<TScanRangeLocations> locations;
locations = scanNode.getScanRangeLocations(0);
if (locations == null) {
continue;
}
Collections.shuffle(locations);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.computeIfAbsent(scanNode.getFragmentId(),
k -> Sets.newHashSet());
scanNodeIds.add(scanNode.getId().asInt());
if (scanNode instanceof FileQueryScanNode) {
fileScanRangeParamsMap.put(
scanNode.getId().asInt(), ((FileQueryScanNode) scanNode).getFileScanRangeParams());
}
FragmentScanRangeAssignment assignment
= fragmentExecParamsMap.get(scanNode.getFragmentId()).scanRangeAssignment;
boolean fragmentContainsColocateJoin = isColocateFragment(scanNode.getFragment(),
scanNode.getFragment().getPlanRoot());
boolean fragmentContainsBucketShuffleJoin = bucketShuffleJoinController
.isBucketShuffleJoin(scanNode.getFragmentId().asInt(), scanNode.getFragment().getPlanRoot());
if (fragmentContainsColocateJoin) {
computeScanRangeAssignmentByColocate((OlapScanNode) scanNode, assignedBytesPerHost, replicaNumPerHost);
}
if (fragmentContainsBucketShuffleJoin) {
bucketShuffleJoinController.computeScanRangeAssignmentByBucket((OlapScanNode) scanNode,
idToBackend, addressToBackendID, replicaNumPerHost);
}
if (!(fragmentContainsColocateJoin || fragmentContainsBucketShuffleJoin)) {
computeScanRangeAssignmentByScheduler(scanNode, locations, assignment, assignedBytesPerHost,
replicaNumPerHost);
}
}
}
private void computeScanRangeAssignmentByColocate(
final OlapScanNode scanNode, Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
if (!fragmentIdToSeqToAddressMap.containsKey(scanNode.getFragmentId())) {
fragmentIdToSeqToAddressMap.put(scanNode.getFragmentId(), new HashMap<>());
fragmentIdTobucketSeqToScanRangeMap.put(scanNode.getFragmentId(), new BucketSeqToScanRange());
}
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(scanNode.getFragmentId());
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdTobucketSeqToScanRangeMap.get(scanNode.getFragmentId());
for (Integer bucketSeq : scanNode.bucketSeq2locations.keySet()) {
List<TScanRangeLocations> locations = scanNode.bucketSeq2locations.get(bucketSeq);
if (!bucketSeqToAddress.containsKey(bucketSeq)) {
getExecHostPortForFragmentIDAndBucketSeq(locations.get(0),
scanNode.getFragmentId(), bucketSeq, assignedBytesPerHost, replicaNumPerHost);
}
for (TScanRangeLocations location : locations) {
Map<Integer, List<TScanRangeParams>> scanRanges =
findOrInsert(bucketSeqToScanRange, bucketSeq, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList =
findOrInsert(scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = location.scan_range;
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
}
private void getExecHostPortForFragmentIDAndBucketSeq(TScanRangeLocations seqLocation,
PlanFragmentId fragmentId, Integer bucketSeq, Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost)
throws Exception {
Reference<Long> backendIdRef = new Reference<Long>();
selectBackendsByRoundRobin(seqLocation, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
Backend backend = this.idToBackend.get(backendIdRef.getRef());
TNetworkAddress execHostPort = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.addressToBackendID.put(execHostPort, backendIdRef.getRef());
this.fragmentIdToSeqToAddressMap.get(fragmentId).put(bucketSeq, execHostPort);
}
public TScanRangeLocation selectBackendsByRoundRobin(TScanRangeLocations seqLocation,
Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost,
Reference<Long> backendIdRef) throws UserException {
if (!Config.enable_local_replica_selection) {
return selectBackendsByRoundRobin(seqLocation.getLocations(), assignedBytesPerHost, replicaNumPerHost,
backendIdRef);
}
List<TScanRangeLocation> localLocations = new ArrayList<>();
List<TScanRangeLocation> nonlocalLocations = new ArrayList<>();
long localBeId = Env.getCurrentSystemInfo().getBackendIdByHost(FrontendOptions.getLocalHostAddress());
for (final TScanRangeLocation location : seqLocation.getLocations()) {
if (location.backend_id == localBeId) {
localLocations.add(location);
} else {
nonlocalLocations.add(location);
}
}
try {
return selectBackendsByRoundRobin(localLocations, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
} catch (UserException ue) {
if (!Config.enable_local_replica_selection_fallback) {
throw ue;
}
return selectBackendsByRoundRobin(nonlocalLocations, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
}
}
public TScanRangeLocation selectBackendsByRoundRobin(List<TScanRangeLocation> locations,
Map<TNetworkAddress, Long> assignedBytesPerHost, Map<TNetworkAddress, Long> replicaNumPerHost,
Reference<Long> backendIdRef) throws UserException {
Long minAssignedBytes = Long.MAX_VALUE;
Long minReplicaNum = Long.MAX_VALUE;
TScanRangeLocation minLocation = null;
Long step = 1L;
for (final TScanRangeLocation location : locations) {
Long assignedBytes = findOrInsert(assignedBytesPerHost, location.server, 0L);
if (assignedBytes < minAssignedBytes || (assignedBytes.equals(minAssignedBytes)
&& replicaNumPerHost.get(location.server) < minReplicaNum)) {
minAssignedBytes = assignedBytes;
minReplicaNum = replicaNumPerHost.get(location.server);
minLocation = location;
}
}
for (TScanRangeLocation location : locations) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) - 1);
}
TScanRangeLocation location = SimpleScheduler.getLocation(minLocation, locations,
this.idToBackend, backendIdRef);
assignedBytesPerHost.put(location.server, assignedBytesPerHost.get(location.server) + step);
return location;
}
private void computeScanRangeAssignmentByScheduler(
final ScanNode scanNode,
final List<TScanRangeLocations> locations,
FragmentScanRangeAssignment assignment,
Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
for (TScanRangeLocations scanRangeLocations : locations) {
Reference<Long> backendIdRef = new Reference<Long>();
TScanRangeLocation minLocation = selectBackendsByRoundRobin(scanRangeLocations,
assignedBytesPerHost, replicaNumPerHost, backendIdRef);
Backend backend = this.idToBackend.get(backendIdRef.getRef());
TNetworkAddress execHostPort = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.addressToBackendID.put(execHostPort, backendIdRef.getRef());
Map<Integer, List<TScanRangeParams>> scanRanges = findOrInsert(assignment, execHostPort,
new HashMap<Integer, List<TScanRangeParams>>());
List<TScanRangeParams> scanRangeParamsList = findOrInsert(scanRanges, scanNode.getId().asInt(),
new ArrayList<TScanRangeParams>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = scanRangeLocations.scan_range;
scanRangeParams.setVolumeId(minLocation.volume_id);
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
private void updateScanRangeNumByScanRange(TScanRangeParams param) {
TScanRange scanRange = param.getScanRange();
if (scanRange == null) {
return;
}
TBrokerScanRange brokerScanRange = scanRange.getBrokerScanRange();
if (brokerScanRange != null) {
scanRangeNum += brokerScanRange.getRanges().size();
}
TExternalScanRange externalScanRange = scanRange.getExtScanRange();
if (externalScanRange != null) {
TFileScanRange fileScanRange = externalScanRange.getFileScanRange();
if (fileScanRange != null) {
scanRangeNum += fileScanRange.getRanges().size();
}
}
TPaloScanRange paloScanRange = scanRange.getPaloScanRange();
if (paloScanRange != null) {
scanRangeNum = scanRangeNum + 1;
}
}
public void updateFragmentExecStatus(TReportExecStatusParams params) {
if (enablePipelineEngine) {
PipelineExecContext ctx = pipelineExecContexts.get(Pair.of(params.getFragmentId(), params.getBackendId()));
if (!ctx.updateProfile(params)) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
ctx.printProfile(builder);
LOG.debug("profile for query_id={} instance_id={}\n{}",
DebugUtil.printId(queryId),
DebugUtil.printId(params.getFragmentInstanceId()),
builder.toString());
}
Status status = new Status(params.status);
if (!(returnedAllResults && status.isCancelled()) && !status.ok()) {
LOG.warn("one instance report fail, query_id={} instance_id={}, error message: {}",
DebugUtil.printId(queryId), DebugUtil.printId(params.getFragmentInstanceId()),
status.getErrorMsg());
updateStatus(status, params.getFragmentInstanceId());
}
if (ctx.fragmentInstancesMap.get(params.fragment_instance_id).getIsDone() && params.isDone()) {
if (params.isSetDeltaUrls()) {
updateDeltas(params.getDeltaUrls());
}
if (params.isSetLoadCounters()) {
updateLoadCounters(params.getLoadCounters());
}
if (params.isSetTrackingUrl()) {
trackingUrl = params.getTrackingUrl();
}
if (params.isSetExportFiles()) {
updateExportFiles(params.getExportFiles());
}
if (params.isSetCommitInfos()) {
updateCommitInfos(params.getCommitInfos());
}
if (params.isSetErrorTabletInfos()) {
updateErrorTabletInfos(params.getErrorTabletInfos());
}
executionProfile.markOneInstanceDone(params.getFragmentInstanceId());
}
} else {
if (params.backend_num >= backendExecStates.size()) {
LOG.warn("unknown backend number: {}, expected less than: {}",
params.backend_num, backendExecStates.size());
return;
}
BackendExecState execState = backendExecStates.get(params.backend_num);
if (!execState.updateProfile(params)) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
execState.printProfile(builder);
LOG.debug("profile for query_id={} instance_id={}\n{}",
DebugUtil.printId(queryId),
DebugUtil.printId(params.getFragmentInstanceId()),
builder.toString());
}
Status status = new Status(params.status);
if (!(returnedAllResults && status.isCancelled()) && !status.ok()) {
LOG.warn("one instance report fail, query_id={} instance_id={}, error message: {}",
DebugUtil.printId(queryId), DebugUtil.printId(params.getFragmentInstanceId()),
status.getErrorMsg());
updateStatus(status, params.getFragmentInstanceId());
}
if (execState.done && params.isDone()) {
if (params.isSetDeltaUrls()) {
updateDeltas(params.getDeltaUrls());
}
if (params.isSetLoadCounters()) {
updateLoadCounters(params.getLoadCounters());
}
if (params.isSetTrackingUrl()) {
trackingUrl = params.getTrackingUrl();
}
if (params.isSetExportFiles()) {
updateExportFiles(params.getExportFiles());
}
if (params.isSetCommitInfos()) {
updateCommitInfos(params.getCommitInfos());
}
if (params.isSetErrorTabletInfos()) {
updateErrorTabletInfos(params.getErrorTabletInfos());
}
executionProfile.markOneInstanceDone(params.getFragmentInstanceId());
}
}
if (params.isSetLoadedRows() && jobId != -1) {
Env.getCurrentEnv().getLoadManager().updateJobProgress(
jobId, params.getBackendId(), params.getQueryId(), params.getFragmentInstanceId(),
params.getLoadedRows(), params.getLoadedBytes(), params.isDone());
Env.getCurrentEnv().getProgressManager().updateProgress(String.valueOf(jobId),
params.getQueryId(), params.getFragmentInstanceId(), params.getFinishedScanRanges());
}
}
/*
* Waiting the coordinator finish executing.
* return false if waiting timeout.
* return true otherwise.
* NOTICE: return true does not mean that coordinator executed success,
* the caller should check queryStatus for result.
*
* We divide the entire waiting process into multiple rounds,
* with a maximum of 30 seconds per round. And after each round of waiting,
* check the status of the BE. If the BE status is abnormal, the wait is ended
* and the result is returned. Otherwise, continue to the next round of waiting.
* This method mainly avoids the problem that the Coordinator waits for a long time
* after some BE can no long return the result due to some exception, such as BE is down.
*/
public boolean join(int timeoutS) {
final long fixedMaxWaitTime = 30;
long leftTimeoutS = timeoutS;
while (leftTimeoutS > 0) {
long waitTime = Math.min(leftTimeoutS, fixedMaxWaitTime);
boolean awaitRes = false;
try {
awaitRes = executionProfile.awaitAllInstancesDone(waitTime);
} catch (InterruptedException e) {
}
if (awaitRes) {
return true;
}
if (!checkBackendState()) {
return true;
}
leftTimeoutS -= waitTime;
}
return false;
}
/*
* Check the state of backends in needCheckBackendExecStates.
* return true if all of them are OK. Otherwise, return false.
*/
private boolean checkBackendState() {
if (enablePipelineEngine) {
for (PipelineExecContext ctx : needCheckPipelineExecContexts) {
if (!ctx.isBackendStateHealthy()) {
queryStatus = new Status(TStatusCode.INTERNAL_ERROR, "backend "
+ ctx.backend.getId() + " is down");
return false;
}
}
} else {
for (BackendExecState backendExecState : needCheckBackendExecStates) {
if (!backendExecState.isBackendStateHealthy()) {
queryStatus = new Status(TStatusCode.INTERNAL_ERROR, "backend "
+ backendExecState.backend.getId() + " is down");
return false;
}
}
}
return true;
}
public boolean isDone() {
return executionProfile.isAllInstancesDone();
}
class FragmentScanRangeAssignment
extends HashMap<TNetworkAddress, Map<Integer, List<TScanRangeParams>>> {
}
class BucketSeqToScanRange extends HashMap<Integer, Map<Integer, List<TScanRangeParams>>> {
}
class BucketShuffleJoinController {
private final Map<PlanFragmentId, BucketSeqToScanRange> fragmentIdBucketSeqToScanRangeMap = Maps.newHashMap();
private final Map<PlanFragmentId, Map<Integer, TNetworkAddress>> fragmentIdToSeqToAddressMap
= Maps.newHashMap();
private final Map<PlanFragmentId, Map<Long, Integer>> fragmentIdToBuckendIdBucketCountMap = Maps.newHashMap();
private final Map<PlanFragmentId, Integer> fragmentIdToBucketNumMap = Maps.newHashMap();
private final Set<Integer> bucketShuffleFragmentIds = new HashSet<>();
private final Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds;
public BucketShuffleJoinController(Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds) {
this.fragmentIdToScanNodeIds = fragmentIdToScanNodeIds;
}
private boolean isBucketShuffleJoin(int fragmentId, PlanNode node) {
if (ConnectContext.get() != null) {
if (!ConnectContext.get().getSessionVariable().isEnableBucketShuffleJoin()
&& !ConnectContext.get().getSessionVariable().isEnableNereidsPlanner()) {
return false;
}
}
if (fragmentId != node.getFragmentId().asInt()) {
return false;
}
if (bucketShuffleFragmentIds.contains(fragmentId)) {
return true;
}
if (node instanceof HashJoinNode) {
HashJoinNode joinNode = (HashJoinNode) node;
if (joinNode.isBucketShuffle()) {
bucketShuffleFragmentIds.add(joinNode.getFragmentId().asInt());
return true;
}
}
for (PlanNode childNode : node.getChildren()) {
if (isBucketShuffleJoin(fragmentId, childNode)) {
return true;
}
}
return false;
}
private boolean isBucketShuffleJoin(int fragmentId) {
return bucketShuffleFragmentIds.contains(fragmentId);
}
private int getFragmentBucketNum(PlanFragmentId fragmentId) {
return fragmentIdToBucketNumMap.get(fragmentId);
}
private void getExecHostPortForFragmentIDAndBucketSeq(TScanRangeLocations seqLocation,
PlanFragmentId fragmentId, Integer bucketSeq, ImmutableMap<Long, Backend> idToBackend,
Map<TNetworkAddress, Long> addressToBackendID,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
Map<Long, Integer> buckendIdToBucketCountMap = fragmentIdToBuckendIdBucketCountMap.get(fragmentId);
int maxBucketNum = Integer.MAX_VALUE;
long buckendId = Long.MAX_VALUE;
Long minReplicaNum = Long.MAX_VALUE;
for (TScanRangeLocation location : seqLocation.locations) {
if (buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0) < maxBucketNum) {
maxBucketNum = buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0);
buckendId = location.backend_id;
minReplicaNum = replicaNumPerHost.get(location.server);
} else if (buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0) == maxBucketNum
&& replicaNumPerHost.get(location.server) < minReplicaNum) {
buckendId = location.backend_id;
minReplicaNum = replicaNumPerHost.get(location.server);
}
}
Reference<Long> backendIdRef = new Reference<>();
TNetworkAddress execHostPort = SimpleScheduler.getHost(buckendId,
seqLocation.locations, idToBackend, backendIdRef);
if (backendIdRef.getRef() != buckendId) {
buckendIdToBucketCountMap.put(backendIdRef.getRef(),
buckendIdToBucketCountMap.getOrDefault(backendIdRef.getRef(), 0) + 1);
} else {
buckendIdToBucketCountMap.put(buckendId, buckendIdToBucketCountMap.getOrDefault(buckendId, 0) + 1);
}
for (TScanRangeLocation location : seqLocation.locations) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) - 1);
}
addressToBackendID.put(execHostPort, backendIdRef.getRef());
this.fragmentIdToSeqToAddressMap.get(fragmentId).put(bucketSeq, execHostPort);
}
private void computeScanRangeAssignmentByBucket(
final OlapScanNode scanNode, ImmutableMap<Long, Backend> idToBackend,
Map<TNetworkAddress, Long> addressToBackendID,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
if (!fragmentIdToSeqToAddressMap.containsKey(scanNode.getFragmentId())) {
int bucketNum = 0;
if (scanNode.getOlapTable().isColocateTable()) {
bucketNum = scanNode.getOlapTable().getDefaultDistributionInfo().getBucketNum();
} else {
bucketNum = (int) (scanNode.getTotalTabletsNum());
}
fragmentIdToBucketNumMap.put(scanNode.getFragmentId(), bucketNum);
fragmentIdToSeqToAddressMap.put(scanNode.getFragmentId(), new HashMap<>());
fragmentIdBucketSeqToScanRangeMap.put(scanNode.getFragmentId(), new BucketSeqToScanRange());
fragmentIdToBuckendIdBucketCountMap.put(scanNode.getFragmentId(), new HashMap<>());
}
Map<Integer, TNetworkAddress> bucketSeqToAddress
= fragmentIdToSeqToAddressMap.get(scanNode.getFragmentId());
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdBucketSeqToScanRangeMap.get(scanNode.getFragmentId());
for (Integer bucketSeq : scanNode.bucketSeq2locations.keySet()) {
List<TScanRangeLocations> locations = scanNode.bucketSeq2locations.get(bucketSeq);
if (!bucketSeqToAddress.containsKey(bucketSeq)) {
getExecHostPortForFragmentIDAndBucketSeq(locations.get(0), scanNode.getFragmentId(),
bucketSeq, idToBackend, addressToBackendID, replicaNumPerHost);
}
for (TScanRangeLocations location : locations) {
Map<Integer, List<TScanRangeParams>> scanRanges =
findOrInsert(bucketSeqToScanRange, bucketSeq, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList =
findOrInsert(scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = location.scan_range;
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
}
private void computeInstanceParam(PlanFragmentId fragmentId,
int parallelExecInstanceNum, FragmentExecParams params) {
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(fragmentId);
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdBucketSeqToScanRangeMap.get(fragmentId);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.get(fragmentId);
Map<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressToScanRanges
= Maps.newHashMap();
for (Map.Entry<Integer, Map<Integer, List<TScanRangeParams>>> scanRanges
: bucketSeqToScanRange.entrySet()) {
TNetworkAddress address = bucketSeqToAddress.get(scanRanges.getKey());
Map<Integer, List<TScanRangeParams>> nodeScanRanges = scanRanges.getValue();
Map<Integer, List<TScanRangeParams>> filteredNodeScanRanges = Maps.newHashMap();
for (Integer scanNodeId : nodeScanRanges.keySet()) {
if (scanNodeIds.contains(scanNodeId)) {
filteredNodeScanRanges.put(scanNodeId, nodeScanRanges.get(scanNodeId));
}
}
Pair<Integer, Map<Integer, List<TScanRangeParams>>> filteredScanRanges
= Pair.of(scanRanges.getKey(), filteredNodeScanRanges);
if (!addressToScanRanges.containsKey(address)) {
addressToScanRanges.put(address, Lists.newArrayList());
}
addressToScanRanges.get(address).add(filteredScanRanges);
}
FragmentScanRangeAssignment assignment = params.scanRangeAssignment;
for (Map.Entry<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressScanRange
: addressToScanRanges.entrySet()) {
List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> scanRange = addressScanRange.getValue();
Map<Integer, List<TScanRangeParams>> range
= findOrInsert(assignment, addressScanRange.getKey(), new HashMap<>());
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(scanRange.size(), parallelExecInstanceNum);
}
List<List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> perInstanceScanRanges
= ListUtil.splitBySize(scanRange, expectedInstanceNum);
for (List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> perInstanceScanRange
: perInstanceScanRanges) {
FInstanceExecParam instanceParam = new FInstanceExecParam(
null, addressScanRange.getKey(), 0, params);
for (Pair<Integer, Map<Integer, List<TScanRangeParams>>> nodeScanRangeMap : perInstanceScanRange) {
instanceParam.addBucketSeq(nodeScanRangeMap.first);
for (Map.Entry<Integer, List<TScanRangeParams>> nodeScanRange
: nodeScanRangeMap.second.entrySet()) {
if (!instanceParam.perNodeScanRanges.containsKey(nodeScanRange.getKey())) {
range.put(nodeScanRange.getKey(), Lists.newArrayList());
instanceParam.perNodeScanRanges.put(nodeScanRange.getKey(), Lists.newArrayList());
}
range.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
instanceParam.perNodeScanRanges.get(nodeScanRange.getKey())
.addAll(nodeScanRange.getValue());
}
}
params.instanceExecParams.add(instanceParam);
}
}
}
}
private final Map<PlanFragmentId, BucketSeqToScanRange> fragmentIdTobucketSeqToScanRangeMap = Maps.newHashMap();
private final Map<PlanFragmentId, Map<Integer, TNetworkAddress>> fragmentIdToSeqToAddressMap = Maps.newHashMap();
private final Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds = Maps.newHashMap();
private final Set<Integer> colocateFragmentIds = new HashSet<>();
private final BucketShuffleJoinController bucketShuffleJoinController
= new BucketShuffleJoinController(fragmentIdToScanNodeIds);
public class BackendExecState {
TExecPlanFragmentParams rpcParams;
PlanFragmentId fragmentId;
boolean initiated;
volatile boolean done;
boolean hasCanceled;
int profileFragmentId;
RuntimeProfile instanceProfile;
RuntimeProfile loadChannelProfile;
TNetworkAddress brpcAddress;
TNetworkAddress address;
Backend backend;
long lastMissingHeartbeatTime = -1;
TUniqueId instanceId;
public BackendExecState(PlanFragmentId fragmentId, int instanceId, int profileFragmentId,
TExecPlanFragmentParams rpcParams, Map<TNetworkAddress, Long> addressToBackendID,
RuntimeProfile loadChannelProfile) {
this.profileFragmentId = profileFragmentId;
this.fragmentId = fragmentId;
this.rpcParams = rpcParams;
this.initiated = false;
this.done = false;
FInstanceExecParam fi = fragmentExecParamsMap.get(fragmentId).instanceExecParams.get(instanceId);
this.instanceId = fi.instanceId;
this.address = fi.host;
this.backend = idToBackend.get(addressToBackendID.get(address));
this.brpcAddress = new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
String name = "Instance " + DebugUtil.printId(fi.instanceId) + " (host=" + address + ")";
this.loadChannelProfile = loadChannelProfile;
this.instanceProfile = new RuntimeProfile(name);
this.hasCanceled = false;
this.lastMissingHeartbeatTime = backend.getLastMissingHeartbeatTime();
}
/**
* Some information common to all Fragments does not need to be sent repeatedly.
* Therefore, when we confirm that a certain BE has accepted the information,
* we will delete the information in the subsequent Fragment to avoid repeated sending.
* This information can be obtained from the cache of BE.
*/
public void unsetFields() {
this.rpcParams.unsetDescTbl();
this.rpcParams.unsetFileScanParams();
this.rpcParams.unsetCoord();
this.rpcParams.unsetQueryGlobals();
this.rpcParams.unsetResourceInfo();
this.rpcParams.setIsSimplifiedParam(true);
}
public synchronized boolean updateProfile(TReportExecStatusParams params) {
if (this.done) {
return false;
}
if (params.isSetProfile()) {
instanceProfile.update(params.profile);
}
if (params.isSetLoadChannelProfile()) {
loadChannelProfile.update(params.loadChannelProfile);
}
this.done = params.done;
if (statsErrorEstimator != null) {
statsErrorEstimator.updateExactReturnedRows(params);
}
return true;
}
public synchronized void printProfile(StringBuilder builder) {
this.instanceProfile.computeTimeInProfile();
this.instanceProfile.prettyPrint(builder, "");
}
public synchronized boolean cancelFragmentInstance(Types.PPlanFragmentCancelReason cancelReason) {
if (LOG.isDebugEnabled()) {
LOG.debug("cancelRemoteFragments initiated={} done={} hasCanceled={} backend: {},"
+ " fragment instance id={}, reason: {}",
this.initiated, this.done, this.hasCanceled, backend.getId(),
DebugUtil.printId(fragmentInstanceId()), cancelReason.name());
}
try {
if (!this.initiated) {
return false;
}
if (this.done) {
return false;
}
if (this.hasCanceled) {
return false;
}
Span span = ConnectContext.get() != null
? ConnectContext.get().getTracer().spanBuilder("cancelPlanFragmentAsync")
.setParent(Context.current()).setSpanKind(SpanKind.CLIENT).startSpan()
: Telemetry.getNoopSpan();
try (Scope scope = span.makeCurrent()) {
BackendServiceProxy.getInstance().cancelPlanFragmentAsync(brpcAddress,
fragmentInstanceId(), cancelReason);
} catch (RpcException e) {
span.recordException(e);
LOG.warn("cancel plan fragment get a exception, address={}:{}", brpcAddress.getHostname(),
brpcAddress.getPort());
SimpleScheduler.addToBlacklist(addressToBackendID.get(brpcAddress), e.getMessage());
} finally {
span.end();
}
this.hasCanceled = true;
} catch (Exception e) {
LOG.warn("catch a exception", e);
return false;
}
return true;
}
public synchronized boolean computeTimeInProfile(int maxFragmentId) {
if (this.profileFragmentId < 0 || this.profileFragmentId > maxFragmentId) {
LOG.warn("profileFragmentId {} should be in [0, {})", profileFragmentId, maxFragmentId);
return false;
}
instanceProfile.computeTimeInProfile();
return true;
}
public boolean isBackendStateHealthy() {
if (backend.getLastMissingHeartbeatTime() > lastMissingHeartbeatTime && !backend.isAlive()) {
LOG.warn("backend {} is down while joining the coordinator. job id: {}",
backend.getId(), jobId);
return false;
}
return true;
}
public FragmentInstanceInfo buildFragmentInstanceInfo() {
return new QueryStatisticsItem.FragmentInstanceInfo.Builder().instanceId(fragmentInstanceId())
.fragmentId(String.valueOf(fragmentId)).address(this.address).build();
}
private TUniqueId fragmentInstanceId() {
return this.rpcParams.params.getFragmentInstanceId();
}
}
public class PipelineExecContext {
TPipelineFragmentParams rpcParams;
PlanFragmentId fragmentId;
boolean initiated;
volatile boolean done;
boolean hasCanceled;
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap;
RuntimeProfile loadChannelProfile;
int cancelProgress = 0;
int profileFragmentId;
TNetworkAddress brpcAddress;
TNetworkAddress address;
Backend backend;
long lastMissingHeartbeatTime = -1;
long profileReportProgress = 0;
private final int numInstances;
public PipelineExecContext(PlanFragmentId fragmentId, int profileFragmentId,
TPipelineFragmentParams rpcParams, Long backendId,
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap,
RuntimeProfile loadChannelProfile) {
this.profileFragmentId = profileFragmentId;
this.fragmentId = fragmentId;
this.rpcParams = rpcParams;
this.numInstances = rpcParams.local_params.size();
this.fragmentInstancesMap = fragmentInstancesMap;
this.loadChannelProfile = loadChannelProfile;
this.initiated = false;
this.done = false;
this.backend = idToBackend.get(backendId);
this.address = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.brpcAddress = new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
this.hasCanceled = false;
this.lastMissingHeartbeatTime = backend.getLastMissingHeartbeatTime();
}
/**
* Some information common to all Fragments does not need to be sent repeatedly.
* Therefore, when we confirm that a certain BE has accepted the information,
* we will delete the information in the subsequent Fragment to avoid repeated sending.
* This information can be obtained from the cache of BE.
*/
public void unsetFields() {
this.rpcParams.unsetDescTbl();
this.rpcParams.unsetFileScanParams();
this.rpcParams.unsetCoord();
this.rpcParams.unsetQueryGlobals();
this.rpcParams.unsetResourceInfo();
this.rpcParams.setIsSimplifiedParam(true);
}
public synchronized boolean updateProfile(TReportExecStatusParams params) {
RuntimeProfile profile = fragmentInstancesMap.get(params.fragment_instance_id);
if (params.done && profile.getIsDone()) {
return false;
}
if (params.isSetProfile()) {
profile.update(params.profile);
}
if (params.isSetLoadChannelProfile()) {
loadChannelProfile.update(params.loadChannelProfile);
}
if (params.done) {
profile.setIsDone(true);
profileReportProgress++;
}
if (profileReportProgress == numInstances) {
this.done = true;
}
return true;
}
public synchronized void printProfile(StringBuilder builder) {
this.fragmentInstancesMap.values().stream().forEach(p -> {
p.computeTimeInProfile();
p.prettyPrint(builder, "");
});
}
public synchronized boolean cancelFragmentInstance(Types.PPlanFragmentCancelReason cancelReason) {
if (!this.initiated) {
return false;
}
if (this.done) {
return false;
}
if (this.hasCanceled) {
return false;
}
for (TPipelineInstanceParams localParam : rpcParams.local_params) {
if (LOG.isDebugEnabled()) {
LOG.debug("cancelRemoteFragments initiated={} done={} hasCanceled={} backend: {},"
+ " fragment instance id={}, reason: {}",
this.initiated, this.done, this.hasCanceled, backend.getId(),
DebugUtil.printId(localParam.fragment_instance_id), cancelReason.name());
}
RuntimeProfile profile = fragmentInstancesMap.get(localParam.fragment_instance_id);
if (profile.getIsDone() || profile.getIsCancel()) {
continue;
}
this.hasCanceled = true;
try {
Span span = ConnectContext.get() != null
? ConnectContext.get().getTracer().spanBuilder("cancelPlanFragmentAsync")
.setParent(Context.current()).setSpanKind(SpanKind.CLIENT).startSpan()
: Telemetry.getNoopSpan();
try (Scope scope = span.makeCurrent()) {
BackendServiceProxy.getInstance().cancelPlanFragmentAsync(brpcAddress,
localParam.fragment_instance_id, cancelReason);
} catch (RpcException e) {
span.recordException(e);
LOG.warn("cancel plan fragment get a exception, address={}:{}", brpcAddress.getHostname(),
brpcAddress.getPort());
SimpleScheduler.addToBlacklist(addressToBackendID.get(brpcAddress), e.getMessage());
} finally {
span.end();
}
} catch (Exception e) {
LOG.warn("catch a exception", e);
return false;
}
}
if (!this.hasCanceled) {
return false;
}
for (int i = 0; i < this.numInstances; i++) {
fragmentInstancesMap.get(rpcParams.local_params.get(i).fragment_instance_id).setIsCancel(true);
}
cancelProgress = numInstances;
return true;
}
public synchronized boolean computeTimeInProfile(int maxFragmentId) {
if (this.profileFragmentId < 0 || this.profileFragmentId > maxFragmentId) {
LOG.warn("profileFragmentId {} should be in [0, {})", profileFragmentId, maxFragmentId);
return false;
}
return true;
}
public boolean isBackendStateHealthy() {
if (backend.getLastMissingHeartbeatTime() > lastMissingHeartbeatTime && !backend.isAlive()) {
LOG.warn("backend {} is down while joining the coordinator. job id: {}",
backend.getId(), jobId);
return false;
}
return true;
}
public List<QueryStatisticsItem.FragmentInstanceInfo> buildFragmentInstanceInfo() {
return this.rpcParams.local_params.stream().map(it -> new FragmentInstanceInfo.Builder()
.instanceId(it.fragment_instance_id).fragmentId(String.valueOf(fragmentId))
.address(this.address).build()).collect(Collectors.toList());
}
}
/**
* A set of BackendExecState for same Backend
*/
public class BackendExecStates {
long beId;
TNetworkAddress brpcAddr;
List<BackendExecState> states = Lists.newArrayList();
boolean twoPhaseExecution = false;
ScopedSpan scopedSpan = new ScopedSpan();
public BackendExecStates(long beId, TNetworkAddress brpcAddr, boolean twoPhaseExecution) {
this.beId = beId;
this.brpcAddr = brpcAddr;
this.twoPhaseExecution = twoPhaseExecution;
}
public void addState(BackendExecState state) {
this.states.add(state);
}
/**
* The BackendExecState in states are all send to the same BE.
* So only the first BackendExecState need to carry some common fields, such as DescriptorTbl,
* the other BackendExecState does not need those fields. Unset them to reduce size.
*/
public void unsetFields() {
boolean first = true;
for (BackendExecState state : states) {
if (first) {
first = false;
continue;
}
state.unsetFields();
}
}
public Future<InternalService.PExecPlanFragmentResult> execRemoteFragmentsAsync(BackendServiceProxy proxy)
throws TException {
try {
TExecPlanFragmentParamsList paramsList = new TExecPlanFragmentParamsList();
for (BackendExecState state : states) {
state.initiated = true;
paramsList.addToParamsList(state.rpcParams);
}
return proxy.execPlanFragmentsAsync(brpcAddr, paramsList, twoPhaseExecution);
} catch (RpcException e) {
return futureWithException(e);
}
}
public Future<InternalService.PExecPlanFragmentResult> execPlanFragmentStartAsync(BackendServiceProxy proxy)
throws TException {
try {
PExecPlanFragmentStartRequest.Builder builder = PExecPlanFragmentStartRequest.newBuilder();
PUniqueId qid = PUniqueId.newBuilder().setHi(queryId.hi).setLo(queryId.lo).build();
builder.setQueryId(qid);
return proxy.execPlanFragmentStartAsync(brpcAddr, builder.build());
} catch (RpcException e) {
return futureWithException(e);
}
}
@NotNull
private Future<PExecPlanFragmentResult> futureWithException(RpcException e) {
return new Future<PExecPlanFragmentResult>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public PExecPlanFragmentResult get() {
PExecPlanFragmentResult result = PExecPlanFragmentResult.newBuilder().setStatus(
Types.PStatus.newBuilder().addErrorMsgs(e.getMessage())
.setStatusCode(TStatusCode.THRIFT_RPC_ERROR.getValue()).build()).build();
return result;
}
@Override
public PExecPlanFragmentResult get(long timeout, TimeUnit unit) {
return get();
}
};
}
}
public class PipelineExecContexts {
long beId;
TNetworkAddress brpcAddr;
List<PipelineExecContext> ctxs = Lists.newArrayList();
boolean twoPhaseExecution = false;
ScopedSpan scopedSpan = new ScopedSpan();
int instanceNumber;
public PipelineExecContexts(long beId, TNetworkAddress brpcAddr, boolean twoPhaseExecution,
int instanceNumber) {
this.beId = beId;
this.brpcAddr = brpcAddr;
this.twoPhaseExecution = twoPhaseExecution;
this.instanceNumber = instanceNumber;
}
public void addContext(PipelineExecContext ctx) {
this.ctxs.add(ctx);
}
public int getInstanceNumber() {
return instanceNumber;
}
/**
* The BackendExecState in states are all send to the same BE.
* So only the first BackendExecState need to carry some common fields, such as DescriptorTbl,
* the other BackendExecState does not need those fields. Unset them to reduce size.
*/
public void unsetFields() {
boolean first = true;
for (PipelineExecContext ctx : ctxs) {
if (first) {
first = false;
continue;
}
ctx.unsetFields();
}
}
public Future<InternalService.PExecPlanFragmentResult> execRemoteFragmentsAsync(BackendServiceProxy proxy)
throws TException {
try {
TPipelineFragmentParamsList paramsList = new TPipelineFragmentParamsList();
for (PipelineExecContext cts : ctxs) {
cts.initiated = true;
paramsList.addToParamsList(cts.rpcParams);
}
return proxy.execPlanFragmentsAsync(brpcAddr, paramsList, twoPhaseExecution);
} catch (RpcException e) {
return futureWithException(e);
}
}
public Future<InternalService.PExecPlanFragmentResult> execPlanFragmentStartAsync(BackendServiceProxy proxy)
throws TException {
try {
PExecPlanFragmentStartRequest.Builder builder = PExecPlanFragmentStartRequest.newBuilder();
PUniqueId qid = PUniqueId.newBuilder().setHi(queryId.hi).setLo(queryId.lo).build();
builder.setQueryId(qid);
return proxy.execPlanFragmentStartAsync(brpcAddr, builder.build());
} catch (RpcException e) {
return futureWithException(e);
}
}
@NotNull
private Future<PExecPlanFragmentResult> futureWithException(RpcException e) {
return new Future<PExecPlanFragmentResult>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public PExecPlanFragmentResult get() {
PExecPlanFragmentResult result = PExecPlanFragmentResult.newBuilder().setStatus(
Types.PStatus.newBuilder().addErrorMsgs(e.getMessage())
.setStatusCode(TStatusCode.THRIFT_RPC_ERROR.getValue()).build()).build();
return result;
}
@Override
public PExecPlanFragmentResult get(long timeout, TimeUnit unit) {
return get();
}
};
}
}
protected class FragmentExecParams {
public PlanFragment fragment;
public List<TPlanFragmentDestination> destinations = Lists.newArrayList();
public Map<Integer, Integer> perExchNumSenders = Maps.newHashMap();
public List<PlanFragmentId> inputFragments = Lists.newArrayList();
public List<FInstanceExecParam> instanceExecParams = Lists.newArrayList();
public FragmentScanRangeAssignment scanRangeAssignment = new FragmentScanRangeAssignment();
public FragmentExecParams(PlanFragment fragment) {
this.fragment = fragment;
}
List<TExecPlanFragmentParams> toThrift(int backendNum) {
List<TExecPlanFragmentParams> paramsList = Lists.newArrayList();
for (int i = 0; i < instanceExecParams.size(); ++i) {
final FInstanceExecParam instanceExecParam = instanceExecParams.get(i);
TExecPlanFragmentParams params = new TExecPlanFragmentParams();
params.setProtocolVersion(PaloInternalServiceVersion.V1);
params.setFragment(fragment.toThrift());
params.setDescTbl(descTable);
params.setParams(new TPlanFragmentExecParams());
params.setBuildHashTableForBroadcastJoin(instanceExecParam.buildHashTableForBroadcastJoin);
params.params.setQueryId(queryId);
params.params.setFragmentInstanceId(instanceExecParam.instanceId);
Map<Integer, List<TScanRangeParams>> scanRanges = instanceExecParam.perNodeScanRanges;
if (scanRanges == null) {
scanRanges = Maps.newHashMap();
}
params.params.setPerNodeScanRanges(scanRanges);
params.params.setPerExchNumSenders(perExchNumSenders);
params.params.setDestinations(destinations);
params.params.setSenderId(i);
params.params.setNumSenders(instanceExecParams.size());
params.setCoord(coordAddress);
params.setBackendNum(backendNum++);
params.setQueryGlobals(queryGlobals);
params.setQueryOptions(queryOptions);
params.query_options.setEnablePipelineEngine(false);
params.params.setSendQueryStatisticsWithEveryBatch(
fragment.isTransferQueryStatisticsWithEveryBatch());
params.params.setRuntimeFilterParams(new TRuntimeFilterParams());
params.params.runtime_filter_params.setRuntimeFilterMergeAddr(runtimeFilterMergeAddr);
if (instanceExecParam.instanceId.equals(runtimeFilterMergeInstanceId)) {
for (RuntimeFilter rf : assignedRuntimeFilters) {
if (!ridToTargetParam.containsKey(rf.getFilterId())) {
continue;
}
List<FRuntimeFilterTargetParam> fParams = ridToTargetParam.get(rf.getFilterId());
rf.computeUseRemoteRfOpt();
if (rf.getUseRemoteRfOpt()) {
Map<TNetworkAddress, TRuntimeFilterTargetParamsV2> targetParamsV2 = new HashMap<>();
for (FRuntimeFilterTargetParam targetParam : fParams) {
if (targetParamsV2.containsKey(targetParam.targetFragmentInstanceAddr)) {
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
} else {
targetParamsV2.put(targetParam.targetFragmentInstanceAddr,
new TRuntimeFilterTargetParamsV2());
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_addr
= targetParam.targetFragmentInstanceAddr;
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
= new ArrayList<>();
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
}
}
params.params.runtime_filter_params.putToRidToTargetParamv2(rf.getFilterId().asInt(),
new ArrayList<TRuntimeFilterTargetParamsV2>(targetParamsV2.values()));
} else {
List<TRuntimeFilterTargetParams> targetParams = Lists.newArrayList();
for (FRuntimeFilterTargetParam targetParam : fParams) {
targetParams.add(new TRuntimeFilterTargetParams(targetParam.targetFragmentInstanceId,
targetParam.targetFragmentInstanceAddr));
}
params.params.runtime_filter_params.putToRidToTargetParam(rf.getFilterId().asInt(),
targetParams);
}
}
for (Map.Entry<RuntimeFilterId, Integer> entry : ridToBuilderNum.entrySet()) {
params.params.runtime_filter_params.putToRuntimeFilterBuilderNum(
entry.getKey().asInt(), entry.getValue());
}
for (RuntimeFilter rf : assignedRuntimeFilters) {
params.params.runtime_filter_params.putToRidToRuntimeFilter(
rf.getFilterId().asInt(), rf.toThrift());
}
}
params.setFileScanParams(fileScanRangeParamsMap);
paramsList.add(params);
}
return paramsList;
}
Map<TNetworkAddress, TPipelineFragmentParams> toTPipelineParams(int backendNum) {
long memLimit = queryOptions.getMemLimit();
if (colocateFragmentIds.contains(fragment.getFragmentId().asInt())) {
int rate = Math.min(Config.query_colocate_join_memory_limit_penalty_factor, instanceExecParams.size());
memLimit = queryOptions.getMemLimit() / rate;
}
Map<TNetworkAddress, TPipelineFragmentParams> res = new HashMap();
for (int i = 0; i < instanceExecParams.size(); ++i) {
final FInstanceExecParam instanceExecParam = instanceExecParams.get(i);
if (!res.containsKey(instanceExecParam.host)) {
TPipelineFragmentParams params = new TPipelineFragmentParams();
params.setProtocolVersion(PaloInternalServiceVersion.V1);
params.setDescTbl(descTable);
params.setQueryId(queryId);
params.setPerExchNumSenders(perExchNumSenders);
params.setDestinations(destinations);
params.setNumSenders(instanceExecParams.size());
params.setCoord(coordAddress);
params.setQueryGlobals(queryGlobals);
params.setQueryOptions(queryOptions);
params.query_options.setEnablePipelineEngine(true);
params.query_options.setMemLimit(memLimit);
params.setSendQueryStatisticsWithEveryBatch(
fragment.isTransferQueryStatisticsWithEveryBatch());
params.setFragment(fragment.toThrift());
params.setLocalParams(Lists.newArrayList());
if (tWorkloadGroups != null) {
params.setWorkloadGroups(tWorkloadGroups);
}
params.setFileScanParams(fileScanRangeParamsMap);
res.put(instanceExecParam.host, params);
}
TPipelineFragmentParams params = res.get(instanceExecParam.host);
TPipelineInstanceParams localParams = new TPipelineInstanceParams();
localParams.setBuildHashTableForBroadcastJoin(instanceExecParam.buildHashTableForBroadcastJoin);
localParams.setFragmentInstanceId(instanceExecParam.instanceId);
Map<Integer, List<TScanRangeParams>> scanRanges = instanceExecParam.perNodeScanRanges;
Map<Integer, Boolean> perNodeSharedScans = instanceExecParam.perNodeSharedScans;
if (scanRanges == null) {
scanRanges = Maps.newHashMap();
perNodeSharedScans = Maps.newHashMap();
}
localParams.setPerNodeScanRanges(scanRanges);
localParams.setPerNodeSharedScans(perNodeSharedScans);
localParams.setSenderId(i);
localParams.setBackendNum(backendNum++);
localParams.setRuntimeFilterParams(new TRuntimeFilterParams());
localParams.runtime_filter_params.setRuntimeFilterMergeAddr(runtimeFilterMergeAddr);
if (instanceExecParam.instanceId.equals(runtimeFilterMergeInstanceId)) {
for (RuntimeFilter rf : assignedRuntimeFilters) {
if (!ridToTargetParam.containsKey(rf.getFilterId())) {
continue;
}
List<FRuntimeFilterTargetParam> fParams = ridToTargetParam.get(rf.getFilterId());
rf.computeUseRemoteRfOpt();
if (rf.getUseRemoteRfOpt()) {
Map<TNetworkAddress, TRuntimeFilterTargetParamsV2> targetParamsV2 = new HashMap<>();
for (FRuntimeFilterTargetParam targetParam : fParams) {
if (targetParamsV2.containsKey(targetParam.targetFragmentInstanceAddr)) {
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
} else {
targetParamsV2.put(targetParam.targetFragmentInstanceAddr,
new TRuntimeFilterTargetParamsV2());
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_addr
= targetParam.targetFragmentInstanceAddr;
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
= new ArrayList<>();
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
}
}
localParams.runtime_filter_params.putToRidToTargetParamv2(rf.getFilterId().asInt(),
new ArrayList<TRuntimeFilterTargetParamsV2>(targetParamsV2.values()));
} else {
List<TRuntimeFilterTargetParams> targetParams = Lists.newArrayList();
for (FRuntimeFilterTargetParam targetParam : fParams) {
targetParams.add(new TRuntimeFilterTargetParams(targetParam.targetFragmentInstanceId,
targetParam.targetFragmentInstanceAddr));
}
localParams.runtime_filter_params.putToRidToTargetParam(rf.getFilterId().asInt(),
targetParams);
}
}
for (Map.Entry<RuntimeFilterId, Integer> entry : ridToBuilderNum.entrySet()) {
localParams.runtime_filter_params.putToRuntimeFilterBuilderNum(
entry.getKey().asInt(), entry.getValue());
}
for (RuntimeFilter rf : assignedRuntimeFilters) {
localParams.runtime_filter_params.putToRidToRuntimeFilter(
rf.getFilterId().asInt(), rf.toThrift());
}
}
params.getLocalParams().add(localParams);
}
return res;
}
public void appendScanRange(StringBuilder sb, List<TScanRangeParams> params) {
sb.append("range=[");
int idx = 0;
for (TScanRangeParams range : params) {
TPaloScanRange paloScanRange = range.getScanRange().getPaloScanRange();
if (paloScanRange != null) {
if (idx++ != 0) {
sb.append(",");
}
sb.append("{tid=").append(paloScanRange.getTabletId())
.append(",ver=").append(paloScanRange.getVersion()).append("}");
}
TEsScanRange esScanRange = range.getScanRange().getEsScanRange();
if (esScanRange != null) {
sb.append("{ index=").append(esScanRange.getIndex())
.append(", shardid=").append(esScanRange.getShardId())
.append("}");
}
}
sb.append("]");
}
public void appendTo(StringBuilder sb) {
sb.append("{plan=");
fragment.getPlanRoot().appendTrace(sb);
sb.append(",instance=[");
for (int i = 0; i < instanceExecParams.size(); ++i) {
if (i != 0) {
sb.append(",");
}
TNetworkAddress address = instanceExecParams.get(i).host;
Map<Integer, List<TScanRangeParams>> scanRanges =
scanRangeAssignment.get(address);
sb.append("{");
sb.append("id=").append(DebugUtil.printId(instanceExecParams.get(i).instanceId));
sb.append(",host=").append(instanceExecParams.get(i).host);
if (scanRanges == null) {
sb.append("}");
continue;
}
sb.append(",range=[");
int eIdx = 0;
for (Map.Entry<Integer, List<TScanRangeParams>> entry : scanRanges.entrySet()) {
if (eIdx++ != 0) {
sb.append(",");
}
sb.append("id").append(entry.getKey()).append(",");
appendScanRange(sb, entry.getValue());
}
sb.append("]");
sb.append("}");
}
sb.append("]");
sb.append("}");
}
}
|
class Coordinator implements CoordInterface {
private static final Logger LOG = LogManager.getLogger(Coordinator.class);
private static final String localIP = FrontendOptions.getLocalHostAddress();
private static final Random instanceRandom = new Random();
Status queryStatus = new Status();
Map<TNetworkAddress, Long> addressToBackendID = Maps.newHashMap();
private ImmutableMap<Long, Backend> idToBackend = ImmutableMap.of();
private final TDescriptorTable descTable;
private Map<Integer, TFileScanRangeParams> fileScanRangeParamsMap = Maps.newHashMap();
private final TQueryGlobals queryGlobals = new TQueryGlobals();
private TQueryOptions queryOptions;
private TNetworkAddress coordAddress;
private final Lock lock = new ReentrantLock();
private boolean returnedAllResults;
private final Map<PlanFragmentId, FragmentExecParams> fragmentExecParamsMap = Maps.newHashMap();
private final List<PlanFragment> fragments;
private int instanceTotalNum;
private Map<Long, BackendExecStates> beToExecStates = Maps.newHashMap();
private Map<Long, PipelineExecContexts> beToPipelineExecCtxs = Maps.newHashMap();
private final List<BackendExecState> backendExecStates = Lists.newArrayList();
private final Map<Pair<Integer, Long>, PipelineExecContext> pipelineExecContexts = new HashMap<>();
private final List<BackendExecState> needCheckBackendExecStates = Lists.newArrayList();
private final List<PipelineExecContext> needCheckPipelineExecContexts = Lists.newArrayList();
private ResultReceiver receiver;
private final List<ScanNode> scanNodes;
private int scanRangeNum = 0;
private final Set<TUniqueId> instanceIds = Sets.newHashSet();
private final boolean isBlockQuery;
private int numReceivedRows = 0;
private List<String> deltaUrls;
private Map<String, String> loadCounters;
private String trackingUrl;
private List<String> exportFiles;
private final List<TTabletCommitInfo> commitInfos = Lists.newArrayList();
private final List<TErrorTabletInfo> errorTabletInfos = Lists.newArrayList();
private long jobId = -1;
private TUniqueId queryId;
private final boolean needReport;
private final TUniqueId nextInstanceId;
private long timeoutDeadline;
private boolean enableShareHashTableForBroadcastJoin = false;
private boolean enablePipelineEngine = false;
private boolean fasterFloatConvert = false;
private int maxMsgSizeOfResultReceiver = TConfiguration.DEFAULT_MAX_MESSAGE_SIZE;
public TNetworkAddress runtimeFilterMergeAddr;
public TUniqueId runtimeFilterMergeInstanceId;
public Map<RuntimeFilterId, List<FRuntimeFilterTargetParam>> ridToTargetParam = Maps.newHashMap();
public List<RuntimeFilter> assignedRuntimeFilters = new ArrayList<>();
public Map<RuntimeFilterId, Integer> ridToBuilderNum = Maps.newHashMap();
private PointQueryExec pointExec = null;
private StatsErrorEstimator statsErrorEstimator;
public void setTWorkloadGroups(List<TPipelineWorkloadGroup> tWorkloadGroups) {
this.tWorkloadGroups = tWorkloadGroups;
}
private List<TPipelineWorkloadGroup> tWorkloadGroups = Lists.newArrayList();
private final ExecutionProfile executionProfile;
public ExecutionProfile getExecutionProfile() {
return executionProfile;
}
private boolean isAllExternalScan = true;
public Coordinator(ConnectContext context, Analyzer analyzer, Planner planner,
StatsErrorEstimator statsErrorEstimator) {
this(context, analyzer, planner);
this.statsErrorEstimator = statsErrorEstimator;
}
public Coordinator(ConnectContext context, Analyzer analyzer, Planner planner) {
this.isBlockQuery = planner.isBlockQuery();
this.queryId = context.queryId();
this.fragments = planner.getFragments();
this.scanNodes = planner.getScanNodes();
this.descTable = planner.getDescTable().toThrift();
this.returnedAllResults = false;
this.enableShareHashTableForBroadcastJoin = context.getSessionVariable().enableShareHashTableForBroadcastJoin;
this.enablePipelineEngine = context.getSessionVariable().getEnablePipelineEngine()
&& (fragments.size() > 0 && fragments.get(0).getSink() instanceof ResultSink);
this.fasterFloatConvert = context.getSessionVariable().fasterFloatConvert();
initQueryOptions(context);
setFromUserProperty(context);
this.queryGlobals.setNowString(TimeUtils.DATETIME_FORMAT.format(LocalDateTime.now()));
this.queryGlobals.setTimestampMs(System.currentTimeMillis());
this.queryGlobals.setNanoSeconds(LocalDateTime.now().getNano());
this.queryGlobals.setLoadZeroTolerance(false);
if (context.getSessionVariable().getTimeZone().equals("CST")) {
this.queryGlobals.setTimeZone(TimeUtils.DEFAULT_TIME_ZONE);
} else {
this.queryGlobals.setTimeZone(context.getSessionVariable().getTimeZone());
}
this.needReport = context.getSessionVariable().enableProfile();
this.nextInstanceId = new TUniqueId();
nextInstanceId.setHi(queryId.hi);
nextInstanceId.setLo(queryId.lo + 1);
this.assignedRuntimeFilters = planner.getRuntimeFilters();
this.executionProfile = new ExecutionProfile(queryId, fragments.size());
this.maxMsgSizeOfResultReceiver = context.getSessionVariable().getMaxMsgSizeOfResultReceiver();
}
public Coordinator(Long jobId, TUniqueId queryId, DescriptorTable descTable, List<PlanFragment> fragments,
List<ScanNode> scanNodes, String timezone, boolean loadZeroTolerance) {
this.isBlockQuery = true;
this.jobId = jobId;
this.queryId = queryId;
this.descTable = descTable.toThrift();
this.fragments = fragments;
this.scanNodes = scanNodes;
this.queryOptions = new TQueryOptions();
this.queryGlobals.setNowString(TimeUtils.DATETIME_FORMAT.format(LocalDateTime.now()));
this.queryGlobals.setTimestampMs(System.currentTimeMillis());
this.queryGlobals.setTimeZone(timezone);
this.queryGlobals.setLoadZeroTolerance(loadZeroTolerance);
this.queryOptions.setBeExecVersion(Config.be_exec_version);
this.needReport = true;
this.nextInstanceId = new TUniqueId();
nextInstanceId.setHi(queryId.hi);
nextInstanceId.setLo(queryId.lo + 1);
this.executionProfile = new ExecutionProfile(queryId, fragments.size());
}
private void setFromUserProperty(ConnectContext connectContext) {
String qualifiedUser = connectContext.getQualifiedUser();
int cpuLimit = Env.getCurrentEnv().getAuth().getCpuResourceLimit(qualifiedUser);
if (cpuLimit > 0) {
TResourceLimit resourceLimit = new TResourceLimit();
resourceLimit.setCpuLimit(cpuLimit);
this.queryOptions.setResourceLimit(resourceLimit);
}
long maxExecMemByte = connectContext.getSessionVariable().getMaxExecMemByte();
long memLimit = maxExecMemByte > 0 ? maxExecMemByte :
Env.getCurrentEnv().getAuth().getExecMemLimit(qualifiedUser);
if (memLimit > 0) {
this.queryOptions.setMemLimit(memLimit);
this.queryOptions.setMaxReservation(memLimit);
this.queryOptions.setInitialReservationTotalClaims(memLimit);
this.queryOptions.setBufferPoolLimit(memLimit);
}
}
public long getJobId() {
return jobId;
}
public TUniqueId getQueryId() {
return queryId;
}
public int getScanRangeNum() {
return scanRangeNum;
}
public void setQueryId(TUniqueId queryId) {
this.queryId = queryId;
}
public void setQueryType(TQueryType type) {
this.queryOptions.setQueryType(type);
}
public void setExecPipEngine(boolean vec) {
this.queryOptions.setEnablePipelineEngine(vec);
}
public Status getExecStatus() {
return queryStatus;
}
public List<String> getDeltaUrls() {
return deltaUrls;
}
public Map<String, String> getLoadCounters() {
return loadCounters;
}
public String getTrackingUrl() {
return trackingUrl;
}
public void setExecMemoryLimit(long execMemoryLimit) {
this.queryOptions.setMemLimit(execMemoryLimit);
}
public void setLoadMemLimit(long loadMemLimit) {
this.queryOptions.setLoadMemLimit(loadMemLimit);
}
public void setTimeout(int timeout) {
this.queryOptions.setQueryTimeout(timeout);
this.queryOptions.setExecutionTimeout(timeout);
}
public void setLoadZeroTolerance(boolean loadZeroTolerance) {
this.queryGlobals.setLoadZeroTolerance(loadZeroTolerance);
}
public void clearExportStatus() {
lock.lock();
try {
this.backendExecStates.clear();
this.pipelineExecContexts.clear();
this.queryStatus.setStatus(new Status());
if (this.exportFiles == null) {
this.exportFiles = Lists.newArrayList();
}
this.exportFiles.clear();
this.needCheckBackendExecStates.clear();
this.needCheckPipelineExecContexts.clear();
} finally {
lock.unlock();
}
}
public List<TTabletCommitInfo> getCommitInfos() {
return commitInfos;
}
public List<TErrorTabletInfo> getErrorTabletInfos() {
return errorTabletInfos;
}
public Map<String, Integer> getBeToInstancesNum() {
Map<String, Integer> result = Maps.newTreeMap();
if (enablePipelineEngine) {
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
result.put(ctxs.brpcAddr.hostname.concat(":").concat("" + ctxs.brpcAddr.port),
ctxs.getInstanceNumber());
}
} else {
for (BackendExecStates states : beToExecStates.values()) {
result.put(states.brpcAddr.hostname.concat(":").concat("" + states.brpcAddr.port),
states.states.size());
}
}
return result;
}
@Override
public int getInstanceTotalNum() {
return instanceTotalNum;
}
private void prepare() {
for (PlanFragment fragment : fragments) {
fragmentExecParamsMap.put(fragment.getFragmentId(), new FragmentExecParams(fragment));
}
for (PlanFragment fragment : fragments) {
if (!(fragment.getSink() instanceof DataStreamSink)) {
continue;
}
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getDestFragment().getFragmentId());
params.inputFragments.add(fragment.getFragmentId());
}
coordAddress = new TNetworkAddress(localIP, Config.rpc_port);
this.idToBackend = Env.getCurrentSystemInfo().getIdToBackend();
if (LOG.isDebugEnabled()) {
LOG.debug("idToBackend size={}", idToBackend.size());
for (Map.Entry<Long, Backend> entry : idToBackend.entrySet()) {
Long backendID = entry.getKey();
Backend backend = entry.getValue();
LOG.debug("backend: {}-{}-{}", backendID, backend.getHost(), backend.getBePort());
}
}
}
private void lock() {
lock.lock();
}
private void unlock() {
lock.unlock();
}
private void traceInstance() {
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder();
int idx = 0;
sb.append("query id=").append(DebugUtil.printId(queryId)).append(",");
sb.append("fragment=[");
for (Map.Entry<PlanFragmentId, FragmentExecParams> entry : fragmentExecParamsMap.entrySet()) {
if (idx++ != 0) {
sb.append(",");
}
sb.append(entry.getKey());
entry.getValue().appendTo(sb);
}
sb.append("]");
LOG.debug(sb.toString());
}
}
@Override
public void exec() throws Exception {
if (LOG.isDebugEnabled() && !scanNodes.isEmpty()) {
LOG.debug("debug: in Coordinator::exec. query id: {}, planNode: {}",
DebugUtil.printId(queryId), scanNodes.get(0).treeToThrift());
}
if (LOG.isDebugEnabled() && !fragments.isEmpty()) {
LOG.debug("debug: in Coordinator::exec. query id: {}, fragment: {}",
DebugUtil.printId(queryId), fragments.get(0).toThrift());
}
prepare();
computeScanRangeAssignment();
computeFragmentExecParams();
traceInstance();
QeProcessorImpl.INSTANCE.registerInstances(queryId, instanceIds.size());
PlanFragmentId topId = fragments.get(0).getFragmentId();
FragmentExecParams topParams = fragmentExecParamsMap.get(topId);
DataSink topDataSink = topParams.fragment.getSink();
this.timeoutDeadline = System.currentTimeMillis() + queryOptions.getExecutionTimeout() * 1000L;
if (topDataSink instanceof ResultSink || topDataSink instanceof ResultFileSink) {
TNetworkAddress execBeAddr = topParams.instanceExecParams.get(0).host;
receiver = new ResultReceiver(queryId, topParams.instanceExecParams.get(0).instanceId,
addressToBackendID.get(execBeAddr), toBrpcHost(execBeAddr), this.timeoutDeadline,
this.maxMsgSizeOfResultReceiver);
if (LOG.isDebugEnabled()) {
LOG.debug("dispatch query job: {} to {}", DebugUtil.printId(queryId),
topParams.instanceExecParams.get(0).host);
}
if (topDataSink instanceof ResultFileSink
&& ((ResultFileSink) topDataSink).getStorageType() == StorageBackend.StorageType.BROKER) {
ResultFileSink topResultFileSink = (ResultFileSink) topDataSink;
FsBroker broker = Env.getCurrentEnv().getBrokerMgr()
.getBroker(topResultFileSink.getBrokerName(), execBeAddr.getHostname());
topResultFileSink.setBrokerAddr(broker.host, broker.port);
}
} else {
this.queryOptions.setIsReportSuccess(true);
deltaUrls = Lists.newArrayList();
loadCounters = Maps.newHashMap();
List<Long> relatedBackendIds = Lists.newArrayList(addressToBackendID.values());
Env.getCurrentEnv().getLoadManager().initJobProgress(jobId, queryId, instanceIds,
relatedBackendIds);
Env.getCurrentEnv().getProgressManager().addTotalScanNums(String.valueOf(jobId), scanRangeNum);
LOG.info("dispatch load job: {} to {}", DebugUtil.printId(queryId), addressToBackendID.keySet());
}
executionProfile.markInstances(instanceIds);
if (enablePipelineEngine) {
sendPipelineCtx();
} else {
sendFragment();
}
}
/**
* The logic for sending query plan fragments is as follows:
* First, plan fragments are dependent. According to the order in "fragments" list,
* it must be ensured that on the BE side, the next fragment instance can be executed
* only after the previous fragment instance is ready,
* <p>
* In the previous logic, we will send fragment instances in sequence through RPC,
* and will wait for the RPC of the previous fragment instance to return successfully
* before sending the next one. But for some complex queries, this may lead to too many RPCs.
* <p>
* The optimized logic is as follows:
* 1. If the number of fragment instance is <= 2, the original logic is still used
* to complete the sending of fragments through at most 2 RPCs.
* 2. If the number of fragment instance is >= 3, first group all fragments by BE,
* and send all fragment instances to the corresponding BE node through the FIRST rpc,
* but these fragment instances will only perform the preparation phase but will not be actually executed.
* After that, the execution logic of all fragment instances is started through the SECOND RPC.
* <p>
* After optimization, a query on a BE node will only send two RPCs at most.
* Thereby reducing the "send fragment timeout" error caused by too many RPCs and BE unable to process in time.
*
* @throws TException
* @throws RpcException
* @throws UserException
*/
private void sendFragment() throws TException, RpcException, UserException {
lock();
try {
Multiset<TNetworkAddress> hostCounter = HashMultiset.create();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
for (FInstanceExecParam fi : params.instanceExecParams) {
hostCounter.add(fi.host);
}
}
int backendIdx = 0;
int profileFragmentId = 0;
long memoryLimit = queryOptions.getMemLimit();
beToExecStates.clear();
boolean twoPhaseExecution = fragments.size() >= 2;
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
int instanceNum = params.instanceExecParams.size();
Preconditions.checkState(instanceNum > 0);
instanceTotalNum += instanceNum;
List<TExecPlanFragmentParams> tParams = params.toThrift(backendIdx);
if (colocateFragmentIds.contains(fragment.getFragmentId().asInt())) {
int rate = Math.min(Config.query_colocate_join_memory_limit_penalty_factor, instanceNum);
long newMemory = memoryLimit / rate;
for (TExecPlanFragmentParams tParam : tParams) {
tParam.query_options.setMemLimit(newMemory);
}
}
boolean needCheckBackendState = false;
if (queryOptions.getQueryType() == TQueryType.LOAD && profileFragmentId == 0) {
needCheckBackendState = true;
}
int instanceId = 0;
for (TExecPlanFragmentParams tParam : tParams) {
BackendExecState execState =
new BackendExecState(fragment.getFragmentId(), instanceId++,
profileFragmentId, tParam, this.addressToBackendID,
executionProfile.getLoadChannelProfile());
tParam.setFragmentNumOnHost(hostCounter.count(execState.address));
tParam.setBackendId(execState.backend.getId());
tParam.setNeedWaitExecutionTrigger(twoPhaseExecution);
backendExecStates.add(execState);
if (needCheckBackendState) {
needCheckBackendExecStates.add(execState);
if (LOG.isDebugEnabled()) {
LOG.debug("add need check backend {} for fragment, {} job: {}",
execState.backend.getId(), fragment.getFragmentId().asInt(), jobId);
}
}
BackendExecStates states = beToExecStates.get(execState.backend.getId());
if (states == null) {
states = new BackendExecStates(execState.backend.getId(), execState.brpcAddress,
twoPhaseExecution);
beToExecStates.putIfAbsent(execState.backend.getId(), states);
}
states.addState(execState);
++backendIdx;
}
profileFragmentId += 1;
}
List<Triple<BackendExecStates, BackendServiceProxy, Future<InternalService.PExecPlanFragmentResult>>>
futures = Lists.newArrayList();
Context parentSpanContext = Context.current();
for (BackendExecStates states : beToExecStates.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execRemoteFragmentsAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
states.scopedSpan = new ScopedSpan(span);
states.unsetFields();
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(states, proxy, states.execRemoteFragmentsAsync(proxy)));
}
waitRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send fragments");
if (twoPhaseExecution) {
futures.clear();
for (BackendExecStates states : beToExecStates.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execPlanFragmentStartAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
states.scopedSpan = new ScopedSpan(span);
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(states, proxy, states.execPlanFragmentStartAsync(proxy)));
}
waitRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send execution start");
}
attachInstanceProfileToFragmentProfile();
} finally {
unlock();
}
}
private void sendPipelineCtx() throws TException, RpcException, UserException {
lock();
try {
Multiset<TNetworkAddress> hostCounter = HashMultiset.create();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
for (FInstanceExecParam fi : params.instanceExecParams) {
hostCounter.add(fi.host);
}
}
int backendIdx = 0;
int profileFragmentId = 0;
beToPipelineExecCtxs.clear();
boolean twoPhaseExecution = fragments.size() >= 2;
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
int instanceNum = params.instanceExecParams.size();
Preconditions.checkState(instanceNum > 0);
Map<TNetworkAddress, TPipelineFragmentParams> tParams = params.toTPipelineParams(backendIdx);
boolean needCheckBackendState = false;
if (queryOptions.getQueryType() == TQueryType.LOAD && profileFragmentId == 0) {
needCheckBackendState = true;
}
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap = new HashMap<TUniqueId, RuntimeProfile>();
for (Map.Entry<TNetworkAddress, TPipelineFragmentParams> entry : tParams.entrySet()) {
for (TPipelineInstanceParams instanceParam : entry.getValue().local_params) {
String name = "Instance " + DebugUtil.printId(instanceParam.fragment_instance_id)
+ " (host=" + entry.getKey() + ")";
fragmentInstancesMap.put(instanceParam.fragment_instance_id, new RuntimeProfile(name));
}
}
for (Map.Entry<TNetworkAddress, TPipelineFragmentParams> entry : tParams.entrySet()) {
Long backendId = this.addressToBackendID.get(entry.getKey());
PipelineExecContext pipelineExecContext = new PipelineExecContext(fragment.getFragmentId(),
profileFragmentId, entry.getValue(), backendId, fragmentInstancesMap,
executionProfile.getLoadChannelProfile());
entry.getValue().setFragmentNumOnHost(hostCounter.count(pipelineExecContext.address));
entry.getValue().setBackendId(pipelineExecContext.backend.getId());
entry.getValue().setNeedWaitExecutionTrigger(twoPhaseExecution);
entry.getValue().setFragmentId(fragment.getFragmentId().asInt());
pipelineExecContexts.put(Pair.of(fragment.getFragmentId().asInt(), backendId), pipelineExecContext);
if (needCheckBackendState) {
needCheckPipelineExecContexts.add(pipelineExecContext);
if (LOG.isDebugEnabled()) {
LOG.debug("add need check backend {} for fragment, {} job: {}",
pipelineExecContext.backend.getId(), fragment.getFragmentId().asInt(), jobId);
}
}
PipelineExecContexts ctxs = beToPipelineExecCtxs.get(pipelineExecContext.backend.getId());
if (ctxs == null) {
ctxs = new PipelineExecContexts(pipelineExecContext.backend.getId(),
pipelineExecContext.brpcAddress, twoPhaseExecution,
entry.getValue().getFragmentNumOnHost());
beToPipelineExecCtxs.putIfAbsent(pipelineExecContext.backend.getId(), ctxs);
}
ctxs.addContext(pipelineExecContext);
++backendIdx;
}
profileFragmentId += 1;
}
List<Triple<PipelineExecContexts, BackendServiceProxy, Future<InternalService.PExecPlanFragmentResult>>>
futures = Lists.newArrayList();
Context parentSpanContext = Context.current();
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execRemoteFragmentsAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
ctxs.scopedSpan = new ScopedSpan(span);
ctxs.unsetFields();
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(ctxs, proxy, ctxs.execRemoteFragmentsAsync(proxy)));
}
waitPipelineRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send fragments");
if (twoPhaseExecution) {
futures.clear();
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execPlanFragmentStartAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
ctxs.scopedSpan = new ScopedSpan(span);
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(ctxs, proxy, ctxs.execPlanFragmentStartAsync(proxy)));
}
waitPipelineRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send execution start");
}
attachInstanceProfileToFragmentProfile();
} finally {
unlock();
}
}
private void waitRpc(List<Triple<BackendExecStates, BackendServiceProxy, Future<PExecPlanFragmentResult>>> futures,
long leftTimeMs,
String operation) throws RpcException, UserException {
if (leftTimeMs <= 0) {
long elapsed = (System.currentTimeMillis() - timeoutDeadline) / 1000 + queryOptions.getExecutionTimeout();
String msg = String.format(
"timeout before waiting %s rpc, query timeout:%d, already elapsed:%d, left for this:%d",
operation, queryOptions.getExecutionTimeout(), elapsed, leftTimeMs);
LOG.warn("Query {} {}", DebugUtil.printId(queryId), msg);
throw new UserException(msg);
}
long timeoutMs = Math.min(leftTimeMs, Config.remote_fragment_exec_timeout_ms);
for (Triple<BackendExecStates, BackendServiceProxy, Future<PExecPlanFragmentResult>> triple : futures) {
TStatusCode code;
String errMsg = null;
Exception exception = null;
Span span = triple.getLeft().scopedSpan.getSpan();
try {
PExecPlanFragmentResult result = triple.getRight().get(timeoutMs, TimeUnit.MILLISECONDS);
code = TStatusCode.findByValue(result.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
if (!result.getStatus().getErrorMsgsList().isEmpty()) {
errMsg = result.getStatus().getErrorMsgsList().get(0);
} else {
errMsg = operation + " failed. backend id: " + triple.getLeft().beId;
}
}
} catch (ExecutionException e) {
exception = e;
code = TStatusCode.THRIFT_RPC_ERROR;
triple.getMiddle().removeProxy(triple.getLeft().brpcAddr);
} catch (InterruptedException e) {
exception = e;
code = TStatusCode.INTERNAL_ERROR;
} catch (TimeoutException e) {
exception = e;
errMsg = String.format(
"timeout when waiting for %s rpc, query timeout:%d, left timeout for this operation:%d",
operation, queryOptions.getExecutionTimeout(), timeoutMs / 1000);
LOG.warn("Query {} {}", DebugUtil.printId(queryId), errMsg);
code = TStatusCode.TIMEOUT;
}
try {
if (code != TStatusCode.OK) {
if (exception != null && errMsg == null) {
errMsg = operation + " failed. " + exception.getMessage();
}
queryStatus.setStatus(errMsg);
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
switch (code) {
case TIMEOUT:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
case THRIFT_RPC_ERROR:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
SimpleScheduler.addToBlacklist(triple.getLeft().beId, errMsg);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
default:
throw new UserException(errMsg, exception);
}
}
} catch (Exception e) {
span.recordException(e);
throw e;
} finally {
triple.getLeft().scopedSpan.endSpan();
}
}
}
private void waitPipelineRpc(List<Triple<PipelineExecContexts, BackendServiceProxy,
Future<PExecPlanFragmentResult>>> futures, long leftTimeMs,
String operation) throws RpcException, UserException {
if (leftTimeMs <= 0) {
long elapsed = (System.currentTimeMillis() - timeoutDeadline) / 1000 + queryOptions.getExecutionTimeout();
String msg = String.format(
"timeout before waiting %s rpc, query timeout:%d, already elapsed:%d, left for this:%d",
operation, queryOptions.getExecutionTimeout(), elapsed, leftTimeMs);
LOG.warn("Query {} {}", DebugUtil.printId(queryId), msg);
throw new UserException(msg);
}
long timeoutMs = Math.min(leftTimeMs, Config.remote_fragment_exec_timeout_ms);
for (Triple<PipelineExecContexts, BackendServiceProxy, Future<PExecPlanFragmentResult>> triple : futures) {
TStatusCode code;
String errMsg = null;
Exception exception = null;
Span span = triple.getLeft().scopedSpan.getSpan();
try {
PExecPlanFragmentResult result = triple.getRight().get(timeoutMs, TimeUnit.MILLISECONDS);
code = TStatusCode.findByValue(result.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
if (!result.getStatus().getErrorMsgsList().isEmpty()) {
errMsg = result.getStatus().getErrorMsgsList().get(0);
} else {
errMsg = operation + " failed. backend id: " + triple.getLeft().beId;
}
}
} catch (ExecutionException e) {
exception = e;
code = TStatusCode.THRIFT_RPC_ERROR;
triple.getMiddle().removeProxy(triple.getLeft().brpcAddr);
} catch (InterruptedException e) {
exception = e;
code = TStatusCode.INTERNAL_ERROR;
} catch (TimeoutException e) {
exception = e;
errMsg = String.format(
"timeout when waiting for %s rpc, query timeout:%d, left timeout for this operation:%d",
operation, queryOptions.getExecutionTimeout(), timeoutMs / 1000);
LOG.warn("Query {} {}", DebugUtil.printId(queryId), errMsg);
code = TStatusCode.TIMEOUT;
}
try {
if (code != TStatusCode.OK) {
if (exception != null && errMsg == null) {
errMsg = operation + " failed. " + exception.getMessage();
}
queryStatus.setStatus(errMsg);
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
switch (code) {
case TIMEOUT:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
case THRIFT_RPC_ERROR:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
SimpleScheduler.addToBlacklist(triple.getLeft().beId, errMsg);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
default:
throw new UserException(errMsg, exception);
}
}
} catch (Exception e) {
span.recordException(e);
throw e;
} finally {
triple.getLeft().scopedSpan.endSpan();
}
}
}
public List<String> getExportFiles() {
return exportFiles;
}
void updateExportFiles(List<String> files) {
lock.lock();
try {
if (exportFiles == null) {
exportFiles = Lists.newArrayList();
}
exportFiles.addAll(files);
} finally {
lock.unlock();
}
}
void updateDeltas(List<String> urls) {
lock.lock();
try {
deltaUrls.addAll(urls);
} finally {
lock.unlock();
}
}
private void updateLoadCounters(Map<String, String> newLoadCounters) {
lock.lock();
try {
long numRowsNormal = 0L;
String value = this.loadCounters.get(LoadEtlTask.DPP_NORMAL_ALL);
if (value != null) {
numRowsNormal = Long.parseLong(value);
}
long numRowsAbnormal = 0L;
value = this.loadCounters.get(LoadEtlTask.DPP_ABNORMAL_ALL);
if (value != null) {
numRowsAbnormal = Long.parseLong(value);
}
long numRowsUnselected = 0L;
value = this.loadCounters.get(LoadJob.UNSELECTED_ROWS);
if (value != null) {
numRowsUnselected = Long.parseLong(value);
}
value = newLoadCounters.get(LoadEtlTask.DPP_NORMAL_ALL);
if (value != null) {
numRowsNormal += Long.parseLong(value);
}
value = newLoadCounters.get(LoadEtlTask.DPP_ABNORMAL_ALL);
if (value != null) {
numRowsAbnormal += Long.parseLong(value);
}
value = newLoadCounters.get(LoadJob.UNSELECTED_ROWS);
if (value != null) {
numRowsUnselected += Long.parseLong(value);
}
this.loadCounters.put(LoadEtlTask.DPP_NORMAL_ALL, "" + numRowsNormal);
this.loadCounters.put(LoadEtlTask.DPP_ABNORMAL_ALL, "" + numRowsAbnormal);
this.loadCounters.put(LoadJob.UNSELECTED_ROWS, "" + numRowsUnselected);
} finally {
lock.unlock();
}
}
private void updateCommitInfos(List<TTabletCommitInfo> commitInfos) {
lock.lock();
try {
this.commitInfos.addAll(commitInfos);
} finally {
lock.unlock();
}
}
private void updateErrorTabletInfos(List<TErrorTabletInfo> errorTabletInfos) {
lock.lock();
try {
if (this.errorTabletInfos.size() <= Config.max_error_tablet_of_broker_load) {
this.errorTabletInfos.addAll(errorTabletInfos.stream().limit(Config.max_error_tablet_of_broker_load
- this.errorTabletInfos.size()).collect(Collectors.toList()));
}
} finally {
lock.unlock();
}
}
private void updateStatus(Status status, TUniqueId instanceId) {
lock.lock();
try {
if (returnedAllResults && status.isCancelled()) {
return;
}
if (status.ok()) {
return;
}
if (!queryStatus.ok()) {
return;
}
queryStatus.setStatus(status);
LOG.warn("one instance report fail throw updateStatus(), need cancel. job id: {},"
+ " query id: {}, instance id: {}, error message: {}",
jobId, DebugUtil.printId(queryId), instanceId != null ? DebugUtil.printId(instanceId) : "NaN",
status.getErrorMsg());
if (status.getErrorCode() == TStatusCode.TIMEOUT) {
cancelInternal(Types.PPlanFragmentCancelReason.TIMEOUT);
} else {
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
}
} finally {
lock.unlock();
}
}
@Override
public RowBatch getNext() throws Exception {
if (receiver == null) {
throw new UserException("There is no receiver.");
}
RowBatch resultBatch;
Status status = new Status();
resultBatch = receiver.getNext(status);
if (!status.ok()) {
LOG.warn("Query {} coordinator get next fail, {}, need cancel.",
DebugUtil.printId(queryId), status.toString());
}
updateStatus(status, null /* no instance id */);
Status copyStatus = null;
lock();
try {
copyStatus = new Status(queryStatus);
} finally {
unlock();
}
if (!copyStatus.ok()) {
if (Strings.isNullOrEmpty(copyStatus.getErrorMsg())) {
copyStatus.rewriteErrorMsg();
}
if (copyStatus.isRpcError()) {
throw new RpcException(null, copyStatus.getErrorMsg());
} else {
String errMsg = copyStatus.getErrorMsg();
LOG.warn("Query {} failed: {}", DebugUtil.printId(queryId), errMsg);
int hostIndex = errMsg.indexOf("host");
if (hostIndex != -1) {
errMsg = errMsg.substring(0, hostIndex);
}
throw new UserException(errMsg);
}
}
if (resultBatch.isEos()) {
this.returnedAllResults = true;
Long numLimitRows = fragments.get(0).getPlanRoot().getLimit();
boolean hasLimit = numLimitRows > 0;
if (!isBlockQuery && instanceIds.size() > 1 && hasLimit && numReceivedRows >= numLimitRows) {
LOG.debug("no block query, return num >= limit rows, need cancel");
cancelInternal(Types.PPlanFragmentCancelReason.LIMIT_REACH);
}
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable().dryRunQuery) {
numReceivedRows = 0;
numReceivedRows += resultBatch.getQueryStatistics().getReturnedRows();
}
} else if (resultBatch.getBatch() != null) {
numReceivedRows += resultBatch.getBatch().getRowsSize();
}
return resultBatch;
}
public void cancel() {
cancel(Types.PPlanFragmentCancelReason.USER_CANCEL);
}
@Override
public void cancel(Types.PPlanFragmentCancelReason cancelReason) {
lock();
try {
if (!queryStatus.ok()) {
return;
} else {
queryStatus.setStatus(Status.CANCELLED);
}
LOG.warn("cancel execution of query, this is outside invoke");
cancelInternal(cancelReason);
} finally {
unlock();
}
}
private void cancelInternal(Types.PPlanFragmentCancelReason cancelReason) {
if (null != receiver) {
receiver.cancel(cancelReason.toString());
}
if (null != pointExec) {
pointExec.cancel();
return;
}
cancelRemoteFragmentsAsync(cancelReason);
executionProfile.onCancel();
}
private void cancelRemoteFragmentsAsync(Types.PPlanFragmentCancelReason cancelReason) {
if (enablePipelineEngine) {
for (PipelineExecContext ctx : pipelineExecContexts.values()) {
ctx.cancelFragmentInstance(cancelReason);
}
} else {
for (BackendExecState backendExecState : backendExecStates) {
backendExecState.cancelFragmentInstance(cancelReason);
}
}
}
private void computeFragmentExecParams() throws Exception {
computeFragmentHosts();
instanceIds.clear();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (LOG.isDebugEnabled()) {
LOG.debug("fragment {} has instances {}",
params.fragment.getFragmentId(), params.instanceExecParams.size());
}
for (int j = 0; j < params.instanceExecParams.size(); ++j) {
TUniqueId instanceId = new TUniqueId();
instanceId.setHi(queryId.hi);
instanceId.setLo(queryId.lo + instanceIds.size() + 1);
params.instanceExecParams.get(j).instanceId = instanceId;
instanceIds.add(instanceId);
}
}
computeMultiCastFragmentParams();
assignRuntimeFilterAddr();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (params.fragment instanceof MultiCastPlanFragment) {
continue;
}
PlanFragment destFragment = params.fragment.getDestFragment();
if (destFragment == null) {
continue;
}
FragmentExecParams destParams = fragmentExecParamsMap.get(destFragment.getFragmentId());
DataSink sink = params.fragment.getSink();
PlanNodeId exchId = sink.getExchNodeId();
PlanNode exchNode = PlanNode.findPlanNodeFromPlanNodeId(destFragment.getPlanRoot(), exchId);
Preconditions.checkState(exchNode != null, "exchNode is null");
Preconditions.checkState(exchNode instanceof ExchangeNode,
"exchNode is not ExchangeNode" + exchNode.getId().toString());
if (destParams.perExchNumSenders.get(exchId.asInt()) == null) {
destParams.perExchNumSenders.put(exchId.asInt(), params.instanceExecParams.size());
} else {
destParams.perExchNumSenders.put(exchId.asInt(),
params.instanceExecParams.size() + destParams.perExchNumSenders.get(exchId.asInt()));
}
if (sink.getOutputPartition() != null
&& sink.getOutputPartition().isBucketShuffleHashPartition()) {
Preconditions.checkState(bucketShuffleJoinController
.isBucketShuffleJoin(destFragment.getFragmentId().asInt()), "Sink is"
+ "Bucket Shuffle Partition, The destFragment must have bucket shuffle join node ");
int bucketSeq = 0;
int bucketNum = bucketShuffleJoinController.getFragmentBucketNum(destFragment.getFragmentId());
if (destParams.instanceExecParams.size() == 1 && (bucketNum == 0
|| destParams.instanceExecParams.get(0).bucketSeqSet.isEmpty())) {
bucketNum = 1;
destParams.instanceExecParams.get(0).bucketSeqSet.add(0);
}
TNetworkAddress dummyServer = new TNetworkAddress("0.0.0.0", 0);
while (bucketSeq < bucketNum) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = new TUniqueId(-1, -1);
dest.server = dummyServer;
dest.setBrpcServer(dummyServer);
for (FInstanceExecParam instanceExecParams : destParams.instanceExecParams) {
if (instanceExecParams.bucketSeqSet.contains(bucketSeq)) {
dest.fragment_instance_id = instanceExecParams.instanceId;
dest.server = toRpcHost(instanceExecParams.host);
dest.setBrpcServer(toBrpcHost(instanceExecParams.host));
break;
}
}
bucketSeq++;
params.destinations.add(dest);
}
} else {
if (enablePipelineEngine && enableShareHashTableForBroadcastJoin
&& ((ExchangeNode) exchNode).isRightChildOfBroadcastHashJoin()) {
Map<TNetworkAddress, FInstanceExecParam> destHosts = new HashMap<>();
destParams.instanceExecParams.forEach(param -> {
if (destHosts.containsKey(param.host)) {
destHosts.get(param.host).instancesSharingHashTable.add(param.instanceId);
} else {
destHosts.put(param.host, param);
param.buildHashTableForBroadcastJoin = true;
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = param.instanceId;
try {
dest.server = toRpcHost(param.host);
dest.setBrpcServer(toBrpcHost(param.host));
} catch (Exception e) {
throw new RuntimeException(e);
}
params.destinations.add(dest);
}
});
} else {
for (int j = 0; j < destParams.instanceExecParams.size(); ++j) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = destParams.instanceExecParams.get(j).instanceId;
dest.server = toRpcHost(destParams.instanceExecParams.get(j).host);
dest.setBrpcServer(toBrpcHost(destParams.instanceExecParams.get(j).host));
params.destinations.add(dest);
}
}
}
}
}
private void computeMultiCastFragmentParams() throws Exception {
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (!(params.fragment instanceof MultiCastPlanFragment)) {
continue;
}
MultiCastPlanFragment multi = (MultiCastPlanFragment) params.fragment;
Preconditions.checkState(multi.getSink() instanceof MultiCastDataSink);
MultiCastDataSink multiSink = (MultiCastDataSink) multi.getSink();
for (int i = 0; i < multi.getDestFragmentList().size(); i++) {
PlanFragment destFragment = multi.getDestFragmentList().get(i);
DataStreamSink sink = multiSink.getDataStreamSinks().get(i);
if (destFragment == null) {
continue;
}
FragmentExecParams destParams = fragmentExecParamsMap.get(destFragment.getFragmentId());
multi.getDestFragmentList().get(i).setOutputPartition(params.fragment.getOutputPartition());
PlanNodeId exchId = sink.getExchNodeId();
PlanNode exchNode = PlanNode.findPlanNodeFromPlanNodeId(destFragment.getPlanRoot(), exchId);
Preconditions.checkState(!destParams.perExchNumSenders.containsKey(exchId.asInt()));
Preconditions.checkState(exchNode != null, "exchNode is null");
Preconditions.checkState(exchNode instanceof ExchangeNode,
"exchNode is not ExchangeNode" + exchNode.getId().toString());
if (destParams.perExchNumSenders.get(exchId.asInt()) == null) {
destParams.perExchNumSenders.put(exchId.asInt(), params.instanceExecParams.size());
} else {
destParams.perExchNumSenders.put(exchId.asInt(),
params.instanceExecParams.size() + destParams.perExchNumSenders.get(exchId.asInt()));
}
List<TPlanFragmentDestination> destinations = multiSink.getDestinations().get(i);
if (sink.getOutputPartition() != null
&& sink.getOutputPartition().isBucketShuffleHashPartition()) {
Preconditions.checkState(bucketShuffleJoinController
.isBucketShuffleJoin(destFragment.getFragmentId().asInt()), "Sink is"
+ "Bucket Shuffle Partition, The destFragment must have bucket shuffle join node ");
int bucketSeq = 0;
int bucketNum = bucketShuffleJoinController.getFragmentBucketNum(destFragment.getFragmentId());
if (destParams.instanceExecParams.size() == 1 && (bucketNum == 0
|| destParams.instanceExecParams.get(0).bucketSeqSet.isEmpty())) {
bucketNum = 1;
destParams.instanceExecParams.get(0).bucketSeqSet.add(0);
}
TNetworkAddress dummyServer = new TNetworkAddress("0.0.0.0", 0);
while (bucketSeq < bucketNum) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = new TUniqueId(-1, -1);
dest.server = dummyServer;
dest.setBrpcServer(dummyServer);
for (FInstanceExecParam instanceExecParams : destParams.instanceExecParams) {
if (instanceExecParams.bucketSeqSet.contains(bucketSeq)) {
dest.fragment_instance_id = instanceExecParams.instanceId;
dest.server = toRpcHost(instanceExecParams.host);
dest.setBrpcServer(toBrpcHost(instanceExecParams.host));
break;
}
}
bucketSeq++;
destinations.add(dest);
}
} else if (enablePipelineEngine && enableShareHashTableForBroadcastJoin
&& ((ExchangeNode) exchNode).isRightChildOfBroadcastHashJoin()) {
Map<TNetworkAddress, FInstanceExecParam> destHosts = new HashMap<>();
destParams.instanceExecParams.forEach(param -> {
if (destHosts.containsKey(param.host)) {
destHosts.get(param.host).instancesSharingHashTable.add(param.instanceId);
} else {
destHosts.put(param.host, param);
param.buildHashTableForBroadcastJoin = true;
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = param.instanceId;
try {
dest.server = toRpcHost(param.host);
dest.setBrpcServer(toBrpcHost(param.host));
} catch (Exception e) {
throw new RuntimeException(e);
}
destinations.add(dest);
}
});
} else {
for (int j = 0; j < destParams.instanceExecParams.size(); ++j) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = destParams.instanceExecParams.get(j).instanceId;
dest.server = toRpcHost(destParams.instanceExecParams.get(j).host);
dest.brpc_server = toBrpcHost(destParams.instanceExecParams.get(j).host);
destinations.add(dest);
}
}
}
}
}
private TNetworkAddress toRpcHost(TNetworkAddress host) throws Exception {
Backend backend = Env.getCurrentSystemInfo().getBackendWithBePort(
host.getHostname(), host.getPort());
if (backend == null) {
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
TNetworkAddress dest = new TNetworkAddress(backend.getHost(), backend.getBeRpcPort());
return dest;
}
private TNetworkAddress toBrpcHost(TNetworkAddress host) throws Exception {
Backend backend = Env.getCurrentSystemInfo().getBackendWithBePort(
host.getHostname(), host.getPort());
if (backend == null) {
throw new UserException(SystemInfoService.NO_BACKEND_LOAD_AVAILABLE_MSG);
}
if (backend.getBrpcPort() < 0) {
return null;
}
return new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
}
private boolean containsUnionNode(PlanNode node) {
if (node instanceof UnionNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof UnionNode) {
return true;
} else {
return containsUnionNode(child);
}
}
return false;
}
private boolean containsIntersectNode(PlanNode node) {
if (node instanceof IntersectNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof IntersectNode) {
return true;
} else {
return containsIntersectNode(child);
}
}
return false;
}
private boolean containsExceptNode(PlanNode node) {
if (node instanceof ExceptNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof ExceptNode) {
return true;
} else {
return containsExceptNode(child);
}
}
return false;
}
private boolean containsSetOperationNode(PlanNode node) {
if (node instanceof SetOperationNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof SetOperationNode) {
return true;
} else {
return containsSetOperationNode(child);
}
}
return false;
}
private void computeFragmentHosts() throws Exception {
for (int i = fragments.size() - 1; i >= 0; --i) {
PlanFragment fragment = fragments.get(i);
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
if (fragment.getDataPartition() == DataPartition.UNPARTITIONED) {
Reference<Long> backendIdRef = new Reference<Long>();
TNetworkAddress execHostport;
if (((ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()) || (isAllExternalScan
&& Config.prefer_compute_node_for_external_table)) && !addressToBackendID.isEmpty()) {
execHostport = SimpleScheduler.getHostByCurrentBackend(addressToBackendID);
} else {
execHostport = SimpleScheduler.getHost(this.idToBackend, backendIdRef);
}
if (execHostport == null) {
LOG.warn("DataPartition UNPARTITIONED, no scanNode Backend available");
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
if (backendIdRef.getRef() != null) {
this.addressToBackendID.put(execHostport, backendIdRef.getRef());
}
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport,
0, params);
params.instanceExecParams.add(instanceParam);
continue;
}
Pair<PlanNode, PlanNode> pairNodes = findLeftmostNode(fragment.getPlanRoot());
PlanNode fatherNode = pairNodes.first;
PlanNode leftMostNode = pairNodes.second;
/*
* Case A:
* if the left most is ScanNode, which means there is no child fragment,
* we should assign fragment instances on every scan node hosts.
* Case B:
* if not, there should be exchange nodes to collect all data from child fragments(input fragments),
* so we should assign fragment instances corresponding to the child fragments' host
*/
if (!(leftMostNode instanceof ScanNode)) {
int inputFragmentIndex = 0;
int maxParallelism = 0;
int childrenCount = (fatherNode != null) ? fatherNode.getChildren().size() : 1;
for (int j = 0; j < childrenCount; j++) {
int currentChildFragmentParallelism
= fragmentExecParamsMap.get(fragment.getChild(j).getFragmentId()).instanceExecParams.size();
if (currentChildFragmentParallelism > maxParallelism) {
maxParallelism = currentChildFragmentParallelism;
inputFragmentIndex = j;
}
}
PlanFragmentId inputFragmentId = fragment.getChild(inputFragmentIndex).getFragmentId();
int exchangeInstances = -1;
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable() != null) {
exchangeInstances = ConnectContext.get().getSessionVariable().getExchangeInstanceParallel();
}
if (leftMostNode.getNumInstances() == 1) {
exchangeInstances = 1;
}
if (exchangeInstances > 0 && fragmentExecParamsMap.get(inputFragmentId)
.instanceExecParams.size() > exchangeInstances) {
Set<TNetworkAddress> hostSet = Sets.newHashSet();
for (FInstanceExecParam execParams :
fragmentExecParamsMap.get(inputFragmentId).instanceExecParams) {
hostSet.add(execParams.host);
}
List<TNetworkAddress> hosts = Lists.newArrayList(hostSet);
Collections.shuffle(hosts, instanceRandom);
for (int index = 0; index < exchangeInstances; index++) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null,
hosts.get(index % hosts.size()), 0, params);
params.instanceExecParams.add(instanceParam);
}
} else {
for (FInstanceExecParam execParams
: fragmentExecParamsMap.get(inputFragmentId).instanceExecParams) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execParams.host, 0, params);
params.instanceExecParams.add(instanceParam);
}
}
Collections.shuffle(params.instanceExecParams, instanceRandom);
continue;
}
int parallelExecInstanceNum = fragment.getParallelExecNum();
if ((isColocateFragment(fragment, fragment.getPlanRoot())
&& fragmentIdToSeqToAddressMap.containsKey(fragment.getFragmentId())
&& fragmentIdToSeqToAddressMap.get(fragment.getFragmentId()).size() > 0)) {
computeColocateJoinInstanceParam(fragment.getFragmentId(), parallelExecInstanceNum, params);
} else if (bucketShuffleJoinController.isBucketShuffleJoin(fragment.getFragmentId().asInt())) {
bucketShuffleJoinController.computeInstanceParam(fragment.getFragmentId(),
parallelExecInstanceNum, params);
} else {
for (Entry<TNetworkAddress, Map<Integer, List<TScanRangeParams>>> entry : fragmentExecParamsMap.get(
fragment.getFragmentId()).scanRangeAssignment.entrySet()) {
TNetworkAddress key = entry.getKey();
Map<Integer, List<TScanRangeParams>> value = entry.getValue();
for (Integer planNodeId : value.keySet()) {
List<TScanRangeParams> perNodeScanRanges = value.get(planNodeId);
List<List<TScanRangeParams>> perInstanceScanRanges = Lists.newArrayList();
List<Boolean> sharedScanOpts = Lists.newArrayList();
Optional<ScanNode> node = scanNodes.stream().filter(scanNode -> {
return scanNode.getId().asInt() == planNodeId;
}).findFirst();
if (!enablePipelineEngine || (node.isPresent() && node.get().getShouldColoScan())
|| (node.isPresent() && node.get() instanceof FileScanNode)
|| (node.isPresent() && node.get().shouldDisableSharedScan())) {
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(perNodeScanRanges.size(), parallelExecInstanceNum);
}
if (node.isPresent() && node.get().shouldUseOneInstance(ConnectContext.get())) {
expectedInstanceNum = 1;
}
perInstanceScanRanges = ListUtil.splitBySize(perNodeScanRanges,
expectedInstanceNum);
sharedScanOpts = Collections.nCopies(perInstanceScanRanges.size(), false);
} else {
int expectedInstanceNum = Math.min(parallelExecInstanceNum,
leftMostNode.getNumInstances());
expectedInstanceNum = Math.max(expectedInstanceNum, 1);
if (node.isPresent() && node.get().shouldUseOneInstance(ConnectContext.get())) {
expectedInstanceNum = 1;
}
perInstanceScanRanges = Collections.nCopies(expectedInstanceNum, perNodeScanRanges);
sharedScanOpts = Collections.nCopies(perInstanceScanRanges.size(), true);
}
LOG.debug("scan range number per instance is: {}", perInstanceScanRanges.size());
for (int j = 0; j < perInstanceScanRanges.size(); j++) {
List<TScanRangeParams> scanRangeParams = perInstanceScanRanges.get(j);
boolean sharedScan = sharedScanOpts.get(j);
FInstanceExecParam instanceParam = new FInstanceExecParam(null, key, 0, params);
instanceParam.perNodeScanRanges.put(planNodeId, scanRangeParams);
instanceParam.perNodeSharedScans.put(planNodeId, sharedScan);
params.instanceExecParams.add(instanceParam);
}
}
}
}
if (params.instanceExecParams.isEmpty()) {
Reference<Long> backendIdRef = new Reference<Long>();
TNetworkAddress execHostport;
if (ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()
&& !addressToBackendID.isEmpty()) {
execHostport = SimpleScheduler.getHostByCurrentBackend(addressToBackendID);
} else {
execHostport = SimpleScheduler.getHost(this.idToBackend, backendIdRef);
}
if (execHostport == null) {
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
if (backendIdRef.getRef() != null) {
this.addressToBackendID.put(execHostport, backendIdRef.getRef());
}
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport, 0, params);
params.instanceExecParams.add(instanceParam);
}
}
}
private void assignRuntimeFilterAddr() throws Exception {
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
for (RuntimeFilterId rid : fragment.getTargetRuntimeFilterIds()) {
List<FRuntimeFilterTargetParam> targetFragments = ridToTargetParam.computeIfAbsent(rid,
k -> new ArrayList<>());
for (final FInstanceExecParam instance : params.instanceExecParams) {
targetFragments.add(new FRuntimeFilterTargetParam(instance.instanceId, toBrpcHost(instance.host)));
}
}
for (RuntimeFilterId rid : fragment.getBuilderRuntimeFilterIds()) {
ridToBuilderNum.merge(rid, params.instanceExecParams.size(), Integer::sum);
}
}
FragmentExecParams uppermostParams = fragmentExecParamsMap.get(fragments.get(0).getFragmentId());
runtimeFilterMergeAddr = toBrpcHost(uppermostParams.instanceExecParams.get(0).host);
runtimeFilterMergeInstanceId = uppermostParams.instanceExecParams.get(0).instanceId;
}
private boolean isColocateFragment(PlanFragment planFragment, PlanNode node) {
if (ConnectContext.get() != null) {
if (ConnectContext.get().getSessionVariable().isDisableColocatePlan()) {
return false;
}
}
if (colocateFragmentIds.contains(node.getFragmentId().asInt())) {
return true;
}
if (planFragment.hasColocatePlanNode()) {
colocateFragmentIds.add(planFragment.getId().asInt());
return true;
}
return false;
}
private Pair<PlanNode, PlanNode> findLeftmostNode(PlanNode plan) {
PlanNode newPlan = plan;
PlanNode fatherPlan = null;
while (newPlan.getChildren().size() != 0 && !(newPlan instanceof ExchangeNode)) {
fatherPlan = newPlan;
newPlan = newPlan.getChild(0);
}
return Pair.of(fatherPlan, newPlan);
}
private <K, V> V findOrInsert(Map<K, V> m, final K key, final V defaultVal) {
V value = m.get(key);
if (value == null) {
m.put(key, defaultVal);
value = defaultVal;
}
return value;
}
private List<TScanRangeParams> findOrInsert(Map<Integer, List<TScanRangeParams>> m, Integer key,
ArrayList<TScanRangeParams> defaultVal) {
List<TScanRangeParams> value = m.get(key);
if (value == null) {
m.put(key, defaultVal);
value = defaultVal;
}
return value;
}
private void computeColocateJoinInstanceParam(PlanFragmentId fragmentId,
int parallelExecInstanceNum, FragmentExecParams params) {
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(fragmentId);
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdTobucketSeqToScanRangeMap.get(fragmentId);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.get(fragmentId);
Map<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressToScanRanges
= Maps.newHashMap();
for (Map.Entry<Integer, Map<Integer, List<TScanRangeParams>>> scanRanges : bucketSeqToScanRange.entrySet()) {
TNetworkAddress address = bucketSeqToAddress.get(scanRanges.getKey());
Map<Integer, List<TScanRangeParams>> nodeScanRanges = scanRanges.getValue();
Map<Integer, List<TScanRangeParams>> filteredNodeScanRanges = Maps.newHashMap();
for (Integer scanNodeId : nodeScanRanges.keySet()) {
if (scanNodeIds.contains(scanNodeId)) {
filteredNodeScanRanges.put(scanNodeId, nodeScanRanges.get(scanNodeId));
}
}
Pair<Integer, Map<Integer, List<TScanRangeParams>>> filteredScanRanges
= Pair.of(scanRanges.getKey(), filteredNodeScanRanges);
if (!addressToScanRanges.containsKey(address)) {
addressToScanRanges.put(address, Lists.newArrayList());
}
addressToScanRanges.get(address).add(filteredScanRanges);
}
FragmentScanRangeAssignment assignment = params.scanRangeAssignment;
for (Map.Entry<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressScanRange
: addressToScanRanges.entrySet()) {
List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> scanRange = addressScanRange.getValue();
Map<Integer, List<TScanRangeParams>> range
= findOrInsert(assignment, addressScanRange.getKey(), new HashMap<>());
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(scanRange.size(), parallelExecInstanceNum);
}
List<List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> perInstanceScanRanges
= ListUtil.splitBySize(scanRange, expectedInstanceNum);
for (List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> perInstanceScanRange
: perInstanceScanRanges) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null, addressScanRange.getKey(), 0, params);
for (Pair<Integer, Map<Integer, List<TScanRangeParams>>> nodeScanRangeMap : perInstanceScanRange) {
instanceParam.bucketSeqSet.add(nodeScanRangeMap.first);
for (Map.Entry<Integer, List<TScanRangeParams>> nodeScanRange
: nodeScanRangeMap.second.entrySet()) {
if (!instanceParam.perNodeScanRanges.containsKey(nodeScanRange.getKey())) {
range.put(nodeScanRange.getKey(), Lists.newArrayList());
instanceParam.perNodeScanRanges.put(nodeScanRange.getKey(), Lists.newArrayList());
}
range.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
instanceParam.perNodeScanRanges.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
}
}
params.instanceExecParams.add(instanceParam);
}
}
}
private Map<TNetworkAddress, Long> getReplicaNumPerHostForOlapTable() {
Map<TNetworkAddress, Long> replicaNumPerHost = Maps.newHashMap();
for (ScanNode scanNode : scanNodes) {
List<TScanRangeLocations> locationsList = scanNode.getScanRangeLocations(0);
for (TScanRangeLocations locations : locationsList) {
for (TScanRangeLocation location : locations.locations) {
if (replicaNumPerHost.containsKey(location.server)) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) + 1L);
} else {
replicaNumPerHost.put(location.server, 1L);
}
}
}
}
return replicaNumPerHost;
}
private void computeScanRangeAssignment() throws Exception {
Map<TNetworkAddress, Long> assignedBytesPerHost = Maps.newHashMap();
Map<TNetworkAddress, Long> replicaNumPerHost = getReplicaNumPerHostForOlapTable();
Collections.shuffle(scanNodes);
for (ScanNode scanNode : scanNodes) {
if (!(scanNode instanceof ExternalScanNode)) {
isAllExternalScan = false;
}
List<TScanRangeLocations> locations;
locations = scanNode.getScanRangeLocations(0);
if (locations == null) {
continue;
}
Collections.shuffle(locations);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.computeIfAbsent(scanNode.getFragmentId(),
k -> Sets.newHashSet());
scanNodeIds.add(scanNode.getId().asInt());
if (scanNode instanceof FileQueryScanNode) {
fileScanRangeParamsMap.put(
scanNode.getId().asInt(), ((FileQueryScanNode) scanNode).getFileScanRangeParams());
}
FragmentScanRangeAssignment assignment
= fragmentExecParamsMap.get(scanNode.getFragmentId()).scanRangeAssignment;
boolean fragmentContainsColocateJoin = isColocateFragment(scanNode.getFragment(),
scanNode.getFragment().getPlanRoot());
boolean fragmentContainsBucketShuffleJoin = bucketShuffleJoinController
.isBucketShuffleJoin(scanNode.getFragmentId().asInt(), scanNode.getFragment().getPlanRoot());
if (fragmentContainsColocateJoin) {
computeScanRangeAssignmentByColocate((OlapScanNode) scanNode, assignedBytesPerHost, replicaNumPerHost);
}
if (fragmentContainsBucketShuffleJoin) {
bucketShuffleJoinController.computeScanRangeAssignmentByBucket((OlapScanNode) scanNode,
idToBackend, addressToBackendID, replicaNumPerHost);
}
if (!(fragmentContainsColocateJoin || fragmentContainsBucketShuffleJoin)) {
computeScanRangeAssignmentByScheduler(scanNode, locations, assignment, assignedBytesPerHost,
replicaNumPerHost);
}
}
}
private void computeScanRangeAssignmentByColocate(
final OlapScanNode scanNode, Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
if (!fragmentIdToSeqToAddressMap.containsKey(scanNode.getFragmentId())) {
fragmentIdToSeqToAddressMap.put(scanNode.getFragmentId(), new HashMap<>());
fragmentIdTobucketSeqToScanRangeMap.put(scanNode.getFragmentId(), new BucketSeqToScanRange());
}
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(scanNode.getFragmentId());
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdTobucketSeqToScanRangeMap.get(scanNode.getFragmentId());
for (Integer bucketSeq : scanNode.bucketSeq2locations.keySet()) {
List<TScanRangeLocations> locations = scanNode.bucketSeq2locations.get(bucketSeq);
if (!bucketSeqToAddress.containsKey(bucketSeq)) {
getExecHostPortForFragmentIDAndBucketSeq(locations.get(0),
scanNode.getFragmentId(), bucketSeq, assignedBytesPerHost, replicaNumPerHost);
}
for (TScanRangeLocations location : locations) {
Map<Integer, List<TScanRangeParams>> scanRanges =
findOrInsert(bucketSeqToScanRange, bucketSeq, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList =
findOrInsert(scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = location.scan_range;
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
}
private void getExecHostPortForFragmentIDAndBucketSeq(TScanRangeLocations seqLocation,
PlanFragmentId fragmentId, Integer bucketSeq, Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost)
throws Exception {
Reference<Long> backendIdRef = new Reference<Long>();
selectBackendsByRoundRobin(seqLocation, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
Backend backend = this.idToBackend.get(backendIdRef.getRef());
TNetworkAddress execHostPort = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.addressToBackendID.put(execHostPort, backendIdRef.getRef());
this.fragmentIdToSeqToAddressMap.get(fragmentId).put(bucketSeq, execHostPort);
}
public TScanRangeLocation selectBackendsByRoundRobin(TScanRangeLocations seqLocation,
Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost,
Reference<Long> backendIdRef) throws UserException {
if (!Config.enable_local_replica_selection) {
return selectBackendsByRoundRobin(seqLocation.getLocations(), assignedBytesPerHost, replicaNumPerHost,
backendIdRef);
}
List<TScanRangeLocation> localLocations = new ArrayList<>();
List<TScanRangeLocation> nonlocalLocations = new ArrayList<>();
long localBeId = Env.getCurrentSystemInfo().getBackendIdByHost(FrontendOptions.getLocalHostAddress());
for (final TScanRangeLocation location : seqLocation.getLocations()) {
if (location.backend_id == localBeId) {
localLocations.add(location);
} else {
nonlocalLocations.add(location);
}
}
try {
return selectBackendsByRoundRobin(localLocations, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
} catch (UserException ue) {
if (!Config.enable_local_replica_selection_fallback) {
throw ue;
}
return selectBackendsByRoundRobin(nonlocalLocations, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
}
}
public TScanRangeLocation selectBackendsByRoundRobin(List<TScanRangeLocation> locations,
Map<TNetworkAddress, Long> assignedBytesPerHost, Map<TNetworkAddress, Long> replicaNumPerHost,
Reference<Long> backendIdRef) throws UserException {
Long minAssignedBytes = Long.MAX_VALUE;
Long minReplicaNum = Long.MAX_VALUE;
TScanRangeLocation minLocation = null;
Long step = 1L;
for (final TScanRangeLocation location : locations) {
Long assignedBytes = findOrInsert(assignedBytesPerHost, location.server, 0L);
if (assignedBytes < minAssignedBytes || (assignedBytes.equals(minAssignedBytes)
&& replicaNumPerHost.get(location.server) < minReplicaNum)) {
minAssignedBytes = assignedBytes;
minReplicaNum = replicaNumPerHost.get(location.server);
minLocation = location;
}
}
for (TScanRangeLocation location : locations) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) - 1);
}
TScanRangeLocation location = SimpleScheduler.getLocation(minLocation, locations,
this.idToBackend, backendIdRef);
assignedBytesPerHost.put(location.server, assignedBytesPerHost.get(location.server) + step);
return location;
}
private void computeScanRangeAssignmentByScheduler(
final ScanNode scanNode,
final List<TScanRangeLocations> locations,
FragmentScanRangeAssignment assignment,
Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
for (TScanRangeLocations scanRangeLocations : locations) {
Reference<Long> backendIdRef = new Reference<Long>();
TScanRangeLocation minLocation = selectBackendsByRoundRobin(scanRangeLocations,
assignedBytesPerHost, replicaNumPerHost, backendIdRef);
Backend backend = this.idToBackend.get(backendIdRef.getRef());
TNetworkAddress execHostPort = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.addressToBackendID.put(execHostPort, backendIdRef.getRef());
Map<Integer, List<TScanRangeParams>> scanRanges = findOrInsert(assignment, execHostPort,
new HashMap<Integer, List<TScanRangeParams>>());
List<TScanRangeParams> scanRangeParamsList = findOrInsert(scanRanges, scanNode.getId().asInt(),
new ArrayList<TScanRangeParams>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = scanRangeLocations.scan_range;
scanRangeParams.setVolumeId(minLocation.volume_id);
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
private void updateScanRangeNumByScanRange(TScanRangeParams param) {
TScanRange scanRange = param.getScanRange();
if (scanRange == null) {
return;
}
TBrokerScanRange brokerScanRange = scanRange.getBrokerScanRange();
if (brokerScanRange != null) {
scanRangeNum += brokerScanRange.getRanges().size();
}
TExternalScanRange externalScanRange = scanRange.getExtScanRange();
if (externalScanRange != null) {
TFileScanRange fileScanRange = externalScanRange.getFileScanRange();
if (fileScanRange != null) {
scanRangeNum += fileScanRange.getRanges().size();
}
}
TPaloScanRange paloScanRange = scanRange.getPaloScanRange();
if (paloScanRange != null) {
scanRangeNum = scanRangeNum + 1;
}
}
public void updateFragmentExecStatus(TReportExecStatusParams params) {
if (enablePipelineEngine) {
PipelineExecContext ctx = pipelineExecContexts.get(Pair.of(params.getFragmentId(), params.getBackendId()));
if (!ctx.updateProfile(params)) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
ctx.printProfile(builder);
LOG.debug("profile for query_id={} instance_id={}\n{}",
DebugUtil.printId(queryId),
DebugUtil.printId(params.getFragmentInstanceId()),
builder.toString());
}
Status status = new Status(params.status);
if (!(returnedAllResults && status.isCancelled()) && !status.ok()) {
LOG.warn("one instance report fail, query_id={} instance_id={}, error message: {}",
DebugUtil.printId(queryId), DebugUtil.printId(params.getFragmentInstanceId()),
status.getErrorMsg());
updateStatus(status, params.getFragmentInstanceId());
}
if (ctx.fragmentInstancesMap.get(params.fragment_instance_id).getIsDone() && params.isDone()) {
if (params.isSetDeltaUrls()) {
updateDeltas(params.getDeltaUrls());
}
if (params.isSetLoadCounters()) {
updateLoadCounters(params.getLoadCounters());
}
if (params.isSetTrackingUrl()) {
trackingUrl = params.getTrackingUrl();
}
if (params.isSetExportFiles()) {
updateExportFiles(params.getExportFiles());
}
if (params.isSetCommitInfos()) {
updateCommitInfos(params.getCommitInfos());
}
if (params.isSetErrorTabletInfos()) {
updateErrorTabletInfos(params.getErrorTabletInfos());
}
executionProfile.markOneInstanceDone(params.getFragmentInstanceId());
}
} else {
if (params.backend_num >= backendExecStates.size()) {
LOG.warn("unknown backend number: {}, expected less than: {}",
params.backend_num, backendExecStates.size());
return;
}
BackendExecState execState = backendExecStates.get(params.backend_num);
if (!execState.updateProfile(params)) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
execState.printProfile(builder);
LOG.debug("profile for query_id={} instance_id={}\n{}",
DebugUtil.printId(queryId),
DebugUtil.printId(params.getFragmentInstanceId()),
builder.toString());
}
Status status = new Status(params.status);
if (!(returnedAllResults && status.isCancelled()) && !status.ok()) {
LOG.warn("one instance report fail, query_id={} instance_id={}, error message: {}",
DebugUtil.printId(queryId), DebugUtil.printId(params.getFragmentInstanceId()),
status.getErrorMsg());
updateStatus(status, params.getFragmentInstanceId());
}
if (execState.done && params.isDone()) {
if (params.isSetDeltaUrls()) {
updateDeltas(params.getDeltaUrls());
}
if (params.isSetLoadCounters()) {
updateLoadCounters(params.getLoadCounters());
}
if (params.isSetTrackingUrl()) {
trackingUrl = params.getTrackingUrl();
}
if (params.isSetExportFiles()) {
updateExportFiles(params.getExportFiles());
}
if (params.isSetCommitInfos()) {
updateCommitInfos(params.getCommitInfos());
}
if (params.isSetErrorTabletInfos()) {
updateErrorTabletInfos(params.getErrorTabletInfos());
}
executionProfile.markOneInstanceDone(params.getFragmentInstanceId());
}
}
if (params.isSetLoadedRows() && jobId != -1) {
Env.getCurrentEnv().getLoadManager().updateJobProgress(
jobId, params.getBackendId(), params.getQueryId(), params.getFragmentInstanceId(),
params.getLoadedRows(), params.getLoadedBytes(), params.isDone());
Env.getCurrentEnv().getProgressManager().updateProgress(String.valueOf(jobId),
params.getQueryId(), params.getFragmentInstanceId(), params.getFinishedScanRanges());
}
}
/*
* Waiting the coordinator finish executing.
* return false if waiting timeout.
* return true otherwise.
* NOTICE: return true does not mean that coordinator executed success,
* the caller should check queryStatus for result.
*
* We divide the entire waiting process into multiple rounds,
* with a maximum of 30 seconds per round. And after each round of waiting,
* check the status of the BE. If the BE status is abnormal, the wait is ended
* and the result is returned. Otherwise, continue to the next round of waiting.
* This method mainly avoids the problem that the Coordinator waits for a long time
* after some BE can no long return the result due to some exception, such as BE is down.
*/
public boolean join(int timeoutS) {
final long fixedMaxWaitTime = 30;
long leftTimeoutS = timeoutS;
while (leftTimeoutS > 0) {
long waitTime = Math.min(leftTimeoutS, fixedMaxWaitTime);
boolean awaitRes = false;
try {
awaitRes = executionProfile.awaitAllInstancesDone(waitTime);
} catch (InterruptedException e) {
}
if (awaitRes) {
return true;
}
if (!checkBackendState()) {
return true;
}
leftTimeoutS -= waitTime;
}
return false;
}
/*
* Check the state of backends in needCheckBackendExecStates.
* return true if all of them are OK. Otherwise, return false.
*/
private boolean checkBackendState() {
if (enablePipelineEngine) {
for (PipelineExecContext ctx : needCheckPipelineExecContexts) {
if (!ctx.isBackendStateHealthy()) {
queryStatus = new Status(TStatusCode.INTERNAL_ERROR, "backend "
+ ctx.backend.getId() + " is down");
return false;
}
}
} else {
for (BackendExecState backendExecState : needCheckBackendExecStates) {
if (!backendExecState.isBackendStateHealthy()) {
queryStatus = new Status(TStatusCode.INTERNAL_ERROR, "backend "
+ backendExecState.backend.getId() + " is down");
return false;
}
}
}
return true;
}
public boolean isDone() {
return executionProfile.isAllInstancesDone();
}
class FragmentScanRangeAssignment
extends HashMap<TNetworkAddress, Map<Integer, List<TScanRangeParams>>> {
}
class BucketSeqToScanRange extends HashMap<Integer, Map<Integer, List<TScanRangeParams>>> {
}
class BucketShuffleJoinController {
private final Map<PlanFragmentId, BucketSeqToScanRange> fragmentIdBucketSeqToScanRangeMap = Maps.newHashMap();
private final Map<PlanFragmentId, Map<Integer, TNetworkAddress>> fragmentIdToSeqToAddressMap
= Maps.newHashMap();
private final Map<PlanFragmentId, Map<Long, Integer>> fragmentIdToBuckendIdBucketCountMap = Maps.newHashMap();
private final Map<PlanFragmentId, Integer> fragmentIdToBucketNumMap = Maps.newHashMap();
private final Set<Integer> bucketShuffleFragmentIds = new HashSet<>();
private final Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds;
public BucketShuffleJoinController(Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds) {
this.fragmentIdToScanNodeIds = fragmentIdToScanNodeIds;
}
private boolean isBucketShuffleJoin(int fragmentId, PlanNode node) {
if (ConnectContext.get() != null) {
if (!ConnectContext.get().getSessionVariable().isEnableBucketShuffleJoin()
&& !ConnectContext.get().getSessionVariable().isEnableNereidsPlanner()) {
return false;
}
}
if (fragmentId != node.getFragmentId().asInt()) {
return false;
}
if (bucketShuffleFragmentIds.contains(fragmentId)) {
return true;
}
if (node instanceof HashJoinNode) {
HashJoinNode joinNode = (HashJoinNode) node;
if (joinNode.isBucketShuffle()) {
bucketShuffleFragmentIds.add(joinNode.getFragmentId().asInt());
return true;
}
}
for (PlanNode childNode : node.getChildren()) {
if (isBucketShuffleJoin(fragmentId, childNode)) {
return true;
}
}
return false;
}
private boolean isBucketShuffleJoin(int fragmentId) {
return bucketShuffleFragmentIds.contains(fragmentId);
}
private int getFragmentBucketNum(PlanFragmentId fragmentId) {
return fragmentIdToBucketNumMap.get(fragmentId);
}
private void getExecHostPortForFragmentIDAndBucketSeq(TScanRangeLocations seqLocation,
PlanFragmentId fragmentId, Integer bucketSeq, ImmutableMap<Long, Backend> idToBackend,
Map<TNetworkAddress, Long> addressToBackendID,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
Map<Long, Integer> buckendIdToBucketCountMap = fragmentIdToBuckendIdBucketCountMap.get(fragmentId);
int maxBucketNum = Integer.MAX_VALUE;
long buckendId = Long.MAX_VALUE;
Long minReplicaNum = Long.MAX_VALUE;
for (TScanRangeLocation location : seqLocation.locations) {
if (buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0) < maxBucketNum) {
maxBucketNum = buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0);
buckendId = location.backend_id;
minReplicaNum = replicaNumPerHost.get(location.server);
} else if (buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0) == maxBucketNum
&& replicaNumPerHost.get(location.server) < minReplicaNum) {
buckendId = location.backend_id;
minReplicaNum = replicaNumPerHost.get(location.server);
}
}
Reference<Long> backendIdRef = new Reference<>();
TNetworkAddress execHostPort = SimpleScheduler.getHost(buckendId,
seqLocation.locations, idToBackend, backendIdRef);
if (backendIdRef.getRef() != buckendId) {
buckendIdToBucketCountMap.put(backendIdRef.getRef(),
buckendIdToBucketCountMap.getOrDefault(backendIdRef.getRef(), 0) + 1);
} else {
buckendIdToBucketCountMap.put(buckendId, buckendIdToBucketCountMap.getOrDefault(buckendId, 0) + 1);
}
for (TScanRangeLocation location : seqLocation.locations) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) - 1);
}
addressToBackendID.put(execHostPort, backendIdRef.getRef());
this.fragmentIdToSeqToAddressMap.get(fragmentId).put(bucketSeq, execHostPort);
}
private void computeScanRangeAssignmentByBucket(
final OlapScanNode scanNode, ImmutableMap<Long, Backend> idToBackend,
Map<TNetworkAddress, Long> addressToBackendID,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
if (!fragmentIdToSeqToAddressMap.containsKey(scanNode.getFragmentId())) {
int bucketNum = 0;
if (scanNode.getOlapTable().isColocateTable()) {
bucketNum = scanNode.getOlapTable().getDefaultDistributionInfo().getBucketNum();
} else {
bucketNum = (int) (scanNode.getTotalTabletsNum());
}
fragmentIdToBucketNumMap.put(scanNode.getFragmentId(), bucketNum);
fragmentIdToSeqToAddressMap.put(scanNode.getFragmentId(), new HashMap<>());
fragmentIdBucketSeqToScanRangeMap.put(scanNode.getFragmentId(), new BucketSeqToScanRange());
fragmentIdToBuckendIdBucketCountMap.put(scanNode.getFragmentId(), new HashMap<>());
}
Map<Integer, TNetworkAddress> bucketSeqToAddress
= fragmentIdToSeqToAddressMap.get(scanNode.getFragmentId());
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdBucketSeqToScanRangeMap.get(scanNode.getFragmentId());
for (Integer bucketSeq : scanNode.bucketSeq2locations.keySet()) {
List<TScanRangeLocations> locations = scanNode.bucketSeq2locations.get(bucketSeq);
if (!bucketSeqToAddress.containsKey(bucketSeq)) {
getExecHostPortForFragmentIDAndBucketSeq(locations.get(0), scanNode.getFragmentId(),
bucketSeq, idToBackend, addressToBackendID, replicaNumPerHost);
}
for (TScanRangeLocations location : locations) {
Map<Integer, List<TScanRangeParams>> scanRanges =
findOrInsert(bucketSeqToScanRange, bucketSeq, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList =
findOrInsert(scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = location.scan_range;
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
}
private void computeInstanceParam(PlanFragmentId fragmentId,
int parallelExecInstanceNum, FragmentExecParams params) {
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(fragmentId);
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdBucketSeqToScanRangeMap.get(fragmentId);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.get(fragmentId);
Map<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressToScanRanges
= Maps.newHashMap();
for (Map.Entry<Integer, Map<Integer, List<TScanRangeParams>>> scanRanges
: bucketSeqToScanRange.entrySet()) {
TNetworkAddress address = bucketSeqToAddress.get(scanRanges.getKey());
Map<Integer, List<TScanRangeParams>> nodeScanRanges = scanRanges.getValue();
Map<Integer, List<TScanRangeParams>> filteredNodeScanRanges = Maps.newHashMap();
for (Integer scanNodeId : nodeScanRanges.keySet()) {
if (scanNodeIds.contains(scanNodeId)) {
filteredNodeScanRanges.put(scanNodeId, nodeScanRanges.get(scanNodeId));
}
}
Pair<Integer, Map<Integer, List<TScanRangeParams>>> filteredScanRanges
= Pair.of(scanRanges.getKey(), filteredNodeScanRanges);
if (!addressToScanRanges.containsKey(address)) {
addressToScanRanges.put(address, Lists.newArrayList());
}
addressToScanRanges.get(address).add(filteredScanRanges);
}
FragmentScanRangeAssignment assignment = params.scanRangeAssignment;
for (Map.Entry<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressScanRange
: addressToScanRanges.entrySet()) {
List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> scanRange = addressScanRange.getValue();
Map<Integer, List<TScanRangeParams>> range
= findOrInsert(assignment, addressScanRange.getKey(), new HashMap<>());
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(scanRange.size(), parallelExecInstanceNum);
}
List<List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> perInstanceScanRanges
= ListUtil.splitBySize(scanRange, expectedInstanceNum);
for (List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> perInstanceScanRange
: perInstanceScanRanges) {
FInstanceExecParam instanceParam = new FInstanceExecParam(
null, addressScanRange.getKey(), 0, params);
for (Pair<Integer, Map<Integer, List<TScanRangeParams>>> nodeScanRangeMap : perInstanceScanRange) {
instanceParam.addBucketSeq(nodeScanRangeMap.first);
for (Map.Entry<Integer, List<TScanRangeParams>> nodeScanRange
: nodeScanRangeMap.second.entrySet()) {
if (!instanceParam.perNodeScanRanges.containsKey(nodeScanRange.getKey())) {
range.put(nodeScanRange.getKey(), Lists.newArrayList());
instanceParam.perNodeScanRanges.put(nodeScanRange.getKey(), Lists.newArrayList());
}
range.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
instanceParam.perNodeScanRanges.get(nodeScanRange.getKey())
.addAll(nodeScanRange.getValue());
}
}
params.instanceExecParams.add(instanceParam);
}
}
}
}
private final Map<PlanFragmentId, BucketSeqToScanRange> fragmentIdTobucketSeqToScanRangeMap = Maps.newHashMap();
private final Map<PlanFragmentId, Map<Integer, TNetworkAddress>> fragmentIdToSeqToAddressMap = Maps.newHashMap();
private final Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds = Maps.newHashMap();
private final Set<Integer> colocateFragmentIds = new HashSet<>();
private final BucketShuffleJoinController bucketShuffleJoinController
= new BucketShuffleJoinController(fragmentIdToScanNodeIds);
public class BackendExecState {
TExecPlanFragmentParams rpcParams;
PlanFragmentId fragmentId;
boolean initiated;
volatile boolean done;
boolean hasCanceled;
int profileFragmentId;
RuntimeProfile instanceProfile;
RuntimeProfile loadChannelProfile;
TNetworkAddress brpcAddress;
TNetworkAddress address;
Backend backend;
long lastMissingHeartbeatTime = -1;
TUniqueId instanceId;
public BackendExecState(PlanFragmentId fragmentId, int instanceId, int profileFragmentId,
TExecPlanFragmentParams rpcParams, Map<TNetworkAddress, Long> addressToBackendID,
RuntimeProfile loadChannelProfile) {
this.profileFragmentId = profileFragmentId;
this.fragmentId = fragmentId;
this.rpcParams = rpcParams;
this.initiated = false;
this.done = false;
FInstanceExecParam fi = fragmentExecParamsMap.get(fragmentId).instanceExecParams.get(instanceId);
this.instanceId = fi.instanceId;
this.address = fi.host;
this.backend = idToBackend.get(addressToBackendID.get(address));
this.brpcAddress = new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
String name = "Instance " + DebugUtil.printId(fi.instanceId) + " (host=" + address + ")";
this.loadChannelProfile = loadChannelProfile;
this.instanceProfile = new RuntimeProfile(name);
this.hasCanceled = false;
this.lastMissingHeartbeatTime = backend.getLastMissingHeartbeatTime();
}
/**
* Some information common to all Fragments does not need to be sent repeatedly.
* Therefore, when we confirm that a certain BE has accepted the information,
* we will delete the information in the subsequent Fragment to avoid repeated sending.
* This information can be obtained from the cache of BE.
*/
public void unsetFields() {
this.rpcParams.unsetDescTbl();
this.rpcParams.unsetFileScanParams();
this.rpcParams.unsetCoord();
this.rpcParams.unsetQueryGlobals();
this.rpcParams.unsetResourceInfo();
this.rpcParams.setIsSimplifiedParam(true);
}
public synchronized boolean updateProfile(TReportExecStatusParams params) {
if (this.done) {
return false;
}
if (params.isSetProfile()) {
instanceProfile.update(params.profile);
}
if (params.isSetLoadChannelProfile()) {
loadChannelProfile.update(params.loadChannelProfile);
}
this.done = params.done;
if (statsErrorEstimator != null) {
statsErrorEstimator.updateExactReturnedRows(params);
}
return true;
}
public synchronized void printProfile(StringBuilder builder) {
this.instanceProfile.computeTimeInProfile();
this.instanceProfile.prettyPrint(builder, "");
}
public synchronized boolean cancelFragmentInstance(Types.PPlanFragmentCancelReason cancelReason) {
if (LOG.isDebugEnabled()) {
LOG.debug("cancelRemoteFragments initiated={} done={} hasCanceled={} backend: {},"
+ " fragment instance id={}, reason: {}",
this.initiated, this.done, this.hasCanceled, backend.getId(),
DebugUtil.printId(fragmentInstanceId()), cancelReason.name());
}
try {
if (!this.initiated) {
return false;
}
if (this.done) {
return false;
}
if (this.hasCanceled) {
return false;
}
Span span = ConnectContext.get() != null
? ConnectContext.get().getTracer().spanBuilder("cancelPlanFragmentAsync")
.setParent(Context.current()).setSpanKind(SpanKind.CLIENT).startSpan()
: Telemetry.getNoopSpan();
try (Scope scope = span.makeCurrent()) {
BackendServiceProxy.getInstance().cancelPlanFragmentAsync(brpcAddress,
fragmentInstanceId(), cancelReason);
} catch (RpcException e) {
span.recordException(e);
LOG.warn("cancel plan fragment get a exception, address={}:{}", brpcAddress.getHostname(),
brpcAddress.getPort());
SimpleScheduler.addToBlacklist(addressToBackendID.get(brpcAddress), e.getMessage());
} finally {
span.end();
}
this.hasCanceled = true;
} catch (Exception e) {
LOG.warn("catch a exception", e);
return false;
}
return true;
}
public synchronized boolean computeTimeInProfile(int maxFragmentId) {
if (this.profileFragmentId < 0 || this.profileFragmentId > maxFragmentId) {
LOG.warn("profileFragmentId {} should be in [0, {})", profileFragmentId, maxFragmentId);
return false;
}
instanceProfile.computeTimeInProfile();
return true;
}
public boolean isBackendStateHealthy() {
if (backend.getLastMissingHeartbeatTime() > lastMissingHeartbeatTime && !backend.isAlive()) {
LOG.warn("backend {} is down while joining the coordinator. job id: {}",
backend.getId(), jobId);
return false;
}
return true;
}
public FragmentInstanceInfo buildFragmentInstanceInfo() {
return new QueryStatisticsItem.FragmentInstanceInfo.Builder().instanceId(fragmentInstanceId())
.fragmentId(String.valueOf(fragmentId)).address(this.address).build();
}
private TUniqueId fragmentInstanceId() {
return this.rpcParams.params.getFragmentInstanceId();
}
}
public class PipelineExecContext {
TPipelineFragmentParams rpcParams;
PlanFragmentId fragmentId;
boolean initiated;
volatile boolean done;
boolean hasCanceled;
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap;
RuntimeProfile loadChannelProfile;
int cancelProgress = 0;
int profileFragmentId;
TNetworkAddress brpcAddress;
TNetworkAddress address;
Backend backend;
long lastMissingHeartbeatTime = -1;
long profileReportProgress = 0;
private final int numInstances;
public PipelineExecContext(PlanFragmentId fragmentId, int profileFragmentId,
TPipelineFragmentParams rpcParams, Long backendId,
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap,
RuntimeProfile loadChannelProfile) {
this.profileFragmentId = profileFragmentId;
this.fragmentId = fragmentId;
this.rpcParams = rpcParams;
this.numInstances = rpcParams.local_params.size();
this.fragmentInstancesMap = fragmentInstancesMap;
this.loadChannelProfile = loadChannelProfile;
this.initiated = false;
this.done = false;
this.backend = idToBackend.get(backendId);
this.address = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.brpcAddress = new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
this.hasCanceled = false;
this.lastMissingHeartbeatTime = backend.getLastMissingHeartbeatTime();
}
/**
* Some information common to all Fragments does not need to be sent repeatedly.
* Therefore, when we confirm that a certain BE has accepted the information,
* we will delete the information in the subsequent Fragment to avoid repeated sending.
* This information can be obtained from the cache of BE.
*/
public void unsetFields() {
this.rpcParams.unsetDescTbl();
this.rpcParams.unsetFileScanParams();
this.rpcParams.unsetCoord();
this.rpcParams.unsetQueryGlobals();
this.rpcParams.unsetResourceInfo();
this.rpcParams.setIsSimplifiedParam(true);
}
public synchronized boolean updateProfile(TReportExecStatusParams params) {
RuntimeProfile profile = fragmentInstancesMap.get(params.fragment_instance_id);
if (params.done && profile.getIsDone()) {
return false;
}
if (params.isSetProfile()) {
profile.update(params.profile);
}
if (params.isSetLoadChannelProfile()) {
loadChannelProfile.update(params.loadChannelProfile);
}
if (params.done) {
profile.setIsDone(true);
profileReportProgress++;
}
if (profileReportProgress == numInstances) {
this.done = true;
}
return true;
}
public synchronized void printProfile(StringBuilder builder) {
this.fragmentInstancesMap.values().stream().forEach(p -> {
p.computeTimeInProfile();
p.prettyPrint(builder, "");
});
}
public synchronized boolean cancelFragmentInstance(Types.PPlanFragmentCancelReason cancelReason) {
if (!this.initiated) {
return false;
}
if (this.done) {
return false;
}
if (this.hasCanceled) {
return false;
}
for (TPipelineInstanceParams localParam : rpcParams.local_params) {
if (LOG.isDebugEnabled()) {
LOG.debug("cancelRemoteFragments initiated={} done={} hasCanceled={} backend: {},"
+ " fragment instance id={}, reason: {}",
this.initiated, this.done, this.hasCanceled, backend.getId(),
DebugUtil.printId(localParam.fragment_instance_id), cancelReason.name());
}
RuntimeProfile profile = fragmentInstancesMap.get(localParam.fragment_instance_id);
if (profile.getIsDone() || profile.getIsCancel()) {
continue;
}
this.hasCanceled = true;
try {
Span span = ConnectContext.get() != null
? ConnectContext.get().getTracer().spanBuilder("cancelPlanFragmentAsync")
.setParent(Context.current()).setSpanKind(SpanKind.CLIENT).startSpan()
: Telemetry.getNoopSpan();
try (Scope scope = span.makeCurrent()) {
BackendServiceProxy.getInstance().cancelPlanFragmentAsync(brpcAddress,
localParam.fragment_instance_id, cancelReason);
} catch (RpcException e) {
span.recordException(e);
LOG.warn("cancel plan fragment get a exception, address={}:{}", brpcAddress.getHostname(),
brpcAddress.getPort());
SimpleScheduler.addToBlacklist(addressToBackendID.get(brpcAddress), e.getMessage());
} finally {
span.end();
}
} catch (Exception e) {
LOG.warn("catch a exception", e);
return false;
}
}
if (!this.hasCanceled) {
return false;
}
for (int i = 0; i < this.numInstances; i++) {
fragmentInstancesMap.get(rpcParams.local_params.get(i).fragment_instance_id).setIsCancel(true);
}
cancelProgress = numInstances;
return true;
}
public synchronized boolean computeTimeInProfile(int maxFragmentId) {
if (this.profileFragmentId < 0 || this.profileFragmentId > maxFragmentId) {
LOG.warn("profileFragmentId {} should be in [0, {})", profileFragmentId, maxFragmentId);
return false;
}
return true;
}
public boolean isBackendStateHealthy() {
if (backend.getLastMissingHeartbeatTime() > lastMissingHeartbeatTime && !backend.isAlive()) {
LOG.warn("backend {} is down while joining the coordinator. job id: {}",
backend.getId(), jobId);
return false;
}
return true;
}
public List<QueryStatisticsItem.FragmentInstanceInfo> buildFragmentInstanceInfo() {
return this.rpcParams.local_params.stream().map(it -> new FragmentInstanceInfo.Builder()
.instanceId(it.fragment_instance_id).fragmentId(String.valueOf(fragmentId))
.address(this.address).build()).collect(Collectors.toList());
}
}
/**
* A set of BackendExecState for same Backend
*/
public class BackendExecStates {
long beId;
TNetworkAddress brpcAddr;
List<BackendExecState> states = Lists.newArrayList();
boolean twoPhaseExecution = false;
ScopedSpan scopedSpan = new ScopedSpan();
public BackendExecStates(long beId, TNetworkAddress brpcAddr, boolean twoPhaseExecution) {
this.beId = beId;
this.brpcAddr = brpcAddr;
this.twoPhaseExecution = twoPhaseExecution;
}
public void addState(BackendExecState state) {
this.states.add(state);
}
/**
* The BackendExecState in states are all send to the same BE.
* So only the first BackendExecState need to carry some common fields, such as DescriptorTbl,
* the other BackendExecState does not need those fields. Unset them to reduce size.
*/
public void unsetFields() {
boolean first = true;
for (BackendExecState state : states) {
if (first) {
first = false;
continue;
}
state.unsetFields();
}
}
public Future<InternalService.PExecPlanFragmentResult> execRemoteFragmentsAsync(BackendServiceProxy proxy)
throws TException {
try {
TExecPlanFragmentParamsList paramsList = new TExecPlanFragmentParamsList();
for (BackendExecState state : states) {
state.initiated = true;
paramsList.addToParamsList(state.rpcParams);
}
return proxy.execPlanFragmentsAsync(brpcAddr, paramsList, twoPhaseExecution);
} catch (RpcException e) {
return futureWithException(e);
}
}
public Future<InternalService.PExecPlanFragmentResult> execPlanFragmentStartAsync(BackendServiceProxy proxy)
throws TException {
try {
PExecPlanFragmentStartRequest.Builder builder = PExecPlanFragmentStartRequest.newBuilder();
PUniqueId qid = PUniqueId.newBuilder().setHi(queryId.hi).setLo(queryId.lo).build();
builder.setQueryId(qid);
return proxy.execPlanFragmentStartAsync(brpcAddr, builder.build());
} catch (RpcException e) {
return futureWithException(e);
}
}
@NotNull
private Future<PExecPlanFragmentResult> futureWithException(RpcException e) {
return new Future<PExecPlanFragmentResult>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public PExecPlanFragmentResult get() {
PExecPlanFragmentResult result = PExecPlanFragmentResult.newBuilder().setStatus(
Types.PStatus.newBuilder().addErrorMsgs(e.getMessage())
.setStatusCode(TStatusCode.THRIFT_RPC_ERROR.getValue()).build()).build();
return result;
}
@Override
public PExecPlanFragmentResult get(long timeout, TimeUnit unit) {
return get();
}
};
}
}
public class PipelineExecContexts {
long beId;
TNetworkAddress brpcAddr;
List<PipelineExecContext> ctxs = Lists.newArrayList();
boolean twoPhaseExecution = false;
ScopedSpan scopedSpan = new ScopedSpan();
int instanceNumber;
public PipelineExecContexts(long beId, TNetworkAddress brpcAddr, boolean twoPhaseExecution,
int instanceNumber) {
this.beId = beId;
this.brpcAddr = brpcAddr;
this.twoPhaseExecution = twoPhaseExecution;
this.instanceNumber = instanceNumber;
}
public void addContext(PipelineExecContext ctx) {
this.ctxs.add(ctx);
}
public int getInstanceNumber() {
return instanceNumber;
}
/**
* The BackendExecState in states are all send to the same BE.
* So only the first BackendExecState need to carry some common fields, such as DescriptorTbl,
* the other BackendExecState does not need those fields. Unset them to reduce size.
*/
public void unsetFields() {
boolean first = true;
for (PipelineExecContext ctx : ctxs) {
if (first) {
first = false;
continue;
}
ctx.unsetFields();
}
}
public Future<InternalService.PExecPlanFragmentResult> execRemoteFragmentsAsync(BackendServiceProxy proxy)
throws TException {
try {
TPipelineFragmentParamsList paramsList = new TPipelineFragmentParamsList();
for (PipelineExecContext cts : ctxs) {
cts.initiated = true;
paramsList.addToParamsList(cts.rpcParams);
}
return proxy.execPlanFragmentsAsync(brpcAddr, paramsList, twoPhaseExecution);
} catch (RpcException e) {
return futureWithException(e);
}
}
public Future<InternalService.PExecPlanFragmentResult> execPlanFragmentStartAsync(BackendServiceProxy proxy)
throws TException {
try {
PExecPlanFragmentStartRequest.Builder builder = PExecPlanFragmentStartRequest.newBuilder();
PUniqueId qid = PUniqueId.newBuilder().setHi(queryId.hi).setLo(queryId.lo).build();
builder.setQueryId(qid);
return proxy.execPlanFragmentStartAsync(brpcAddr, builder.build());
} catch (RpcException e) {
return futureWithException(e);
}
}
@NotNull
private Future<PExecPlanFragmentResult> futureWithException(RpcException e) {
return new Future<PExecPlanFragmentResult>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public PExecPlanFragmentResult get() {
PExecPlanFragmentResult result = PExecPlanFragmentResult.newBuilder().setStatus(
Types.PStatus.newBuilder().addErrorMsgs(e.getMessage())
.setStatusCode(TStatusCode.THRIFT_RPC_ERROR.getValue()).build()).build();
return result;
}
@Override
public PExecPlanFragmentResult get(long timeout, TimeUnit unit) {
return get();
}
};
}
}
protected class FragmentExecParams {
public PlanFragment fragment;
public List<TPlanFragmentDestination> destinations = Lists.newArrayList();
public Map<Integer, Integer> perExchNumSenders = Maps.newHashMap();
public List<PlanFragmentId> inputFragments = Lists.newArrayList();
public List<FInstanceExecParam> instanceExecParams = Lists.newArrayList();
public FragmentScanRangeAssignment scanRangeAssignment = new FragmentScanRangeAssignment();
public FragmentExecParams(PlanFragment fragment) {
this.fragment = fragment;
}
List<TExecPlanFragmentParams> toThrift(int backendNum) {
List<TExecPlanFragmentParams> paramsList = Lists.newArrayList();
for (int i = 0; i < instanceExecParams.size(); ++i) {
final FInstanceExecParam instanceExecParam = instanceExecParams.get(i);
TExecPlanFragmentParams params = new TExecPlanFragmentParams();
params.setProtocolVersion(PaloInternalServiceVersion.V1);
params.setFragment(fragment.toThrift());
params.setDescTbl(descTable);
params.setParams(new TPlanFragmentExecParams());
params.setBuildHashTableForBroadcastJoin(instanceExecParam.buildHashTableForBroadcastJoin);
params.params.setQueryId(queryId);
params.params.setFragmentInstanceId(instanceExecParam.instanceId);
Map<Integer, List<TScanRangeParams>> scanRanges = instanceExecParam.perNodeScanRanges;
if (scanRanges == null) {
scanRanges = Maps.newHashMap();
}
params.params.setPerNodeScanRanges(scanRanges);
params.params.setPerExchNumSenders(perExchNumSenders);
params.params.setDestinations(destinations);
params.params.setSenderId(i);
params.params.setNumSenders(instanceExecParams.size());
params.setCoord(coordAddress);
params.setBackendNum(backendNum++);
params.setQueryGlobals(queryGlobals);
params.setQueryOptions(queryOptions);
params.query_options.setEnablePipelineEngine(false);
params.params.setSendQueryStatisticsWithEveryBatch(
fragment.isTransferQueryStatisticsWithEveryBatch());
params.params.setRuntimeFilterParams(new TRuntimeFilterParams());
params.params.runtime_filter_params.setRuntimeFilterMergeAddr(runtimeFilterMergeAddr);
if (instanceExecParam.instanceId.equals(runtimeFilterMergeInstanceId)) {
for (RuntimeFilter rf : assignedRuntimeFilters) {
if (!ridToTargetParam.containsKey(rf.getFilterId())) {
continue;
}
List<FRuntimeFilterTargetParam> fParams = ridToTargetParam.get(rf.getFilterId());
rf.computeUseRemoteRfOpt();
if (rf.getUseRemoteRfOpt()) {
Map<TNetworkAddress, TRuntimeFilterTargetParamsV2> targetParamsV2 = new HashMap<>();
for (FRuntimeFilterTargetParam targetParam : fParams) {
if (targetParamsV2.containsKey(targetParam.targetFragmentInstanceAddr)) {
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
} else {
targetParamsV2.put(targetParam.targetFragmentInstanceAddr,
new TRuntimeFilterTargetParamsV2());
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_addr
= targetParam.targetFragmentInstanceAddr;
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
= new ArrayList<>();
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
}
}
params.params.runtime_filter_params.putToRidToTargetParamv2(rf.getFilterId().asInt(),
new ArrayList<TRuntimeFilterTargetParamsV2>(targetParamsV2.values()));
} else {
List<TRuntimeFilterTargetParams> targetParams = Lists.newArrayList();
for (FRuntimeFilterTargetParam targetParam : fParams) {
targetParams.add(new TRuntimeFilterTargetParams(targetParam.targetFragmentInstanceId,
targetParam.targetFragmentInstanceAddr));
}
params.params.runtime_filter_params.putToRidToTargetParam(rf.getFilterId().asInt(),
targetParams);
}
}
for (Map.Entry<RuntimeFilterId, Integer> entry : ridToBuilderNum.entrySet()) {
params.params.runtime_filter_params.putToRuntimeFilterBuilderNum(
entry.getKey().asInt(), entry.getValue());
}
for (RuntimeFilter rf : assignedRuntimeFilters) {
params.params.runtime_filter_params.putToRidToRuntimeFilter(
rf.getFilterId().asInt(), rf.toThrift());
}
}
params.setFileScanParams(fileScanRangeParamsMap);
paramsList.add(params);
}
return paramsList;
}
Map<TNetworkAddress, TPipelineFragmentParams> toTPipelineParams(int backendNum) {
long memLimit = queryOptions.getMemLimit();
if (colocateFragmentIds.contains(fragment.getFragmentId().asInt())) {
int rate = Math.min(Config.query_colocate_join_memory_limit_penalty_factor, instanceExecParams.size());
memLimit = queryOptions.getMemLimit() / rate;
}
Map<TNetworkAddress, TPipelineFragmentParams> res = new HashMap();
for (int i = 0; i < instanceExecParams.size(); ++i) {
final FInstanceExecParam instanceExecParam = instanceExecParams.get(i);
if (!res.containsKey(instanceExecParam.host)) {
TPipelineFragmentParams params = new TPipelineFragmentParams();
params.setProtocolVersion(PaloInternalServiceVersion.V1);
params.setDescTbl(descTable);
params.setQueryId(queryId);
params.setPerExchNumSenders(perExchNumSenders);
params.setDestinations(destinations);
params.setNumSenders(instanceExecParams.size());
params.setCoord(coordAddress);
params.setQueryGlobals(queryGlobals);
params.setQueryOptions(queryOptions);
params.query_options.setEnablePipelineEngine(true);
params.query_options.setMemLimit(memLimit);
params.setSendQueryStatisticsWithEveryBatch(
fragment.isTransferQueryStatisticsWithEveryBatch());
params.setFragment(fragment.toThrift());
params.setLocalParams(Lists.newArrayList());
if (tWorkloadGroups != null) {
params.setWorkloadGroups(tWorkloadGroups);
}
params.setFileScanParams(fileScanRangeParamsMap);
res.put(instanceExecParam.host, params);
}
TPipelineFragmentParams params = res.get(instanceExecParam.host);
TPipelineInstanceParams localParams = new TPipelineInstanceParams();
localParams.setBuildHashTableForBroadcastJoin(instanceExecParam.buildHashTableForBroadcastJoin);
localParams.setFragmentInstanceId(instanceExecParam.instanceId);
Map<Integer, List<TScanRangeParams>> scanRanges = instanceExecParam.perNodeScanRanges;
Map<Integer, Boolean> perNodeSharedScans = instanceExecParam.perNodeSharedScans;
if (scanRanges == null) {
scanRanges = Maps.newHashMap();
perNodeSharedScans = Maps.newHashMap();
}
localParams.setPerNodeScanRanges(scanRanges);
localParams.setPerNodeSharedScans(perNodeSharedScans);
localParams.setSenderId(i);
localParams.setBackendNum(backendNum++);
localParams.setRuntimeFilterParams(new TRuntimeFilterParams());
localParams.runtime_filter_params.setRuntimeFilterMergeAddr(runtimeFilterMergeAddr);
if (instanceExecParam.instanceId.equals(runtimeFilterMergeInstanceId)) {
for (RuntimeFilter rf : assignedRuntimeFilters) {
if (!ridToTargetParam.containsKey(rf.getFilterId())) {
continue;
}
List<FRuntimeFilterTargetParam> fParams = ridToTargetParam.get(rf.getFilterId());
rf.computeUseRemoteRfOpt();
if (rf.getUseRemoteRfOpt()) {
Map<TNetworkAddress, TRuntimeFilterTargetParamsV2> targetParamsV2 = new HashMap<>();
for (FRuntimeFilterTargetParam targetParam : fParams) {
if (targetParamsV2.containsKey(targetParam.targetFragmentInstanceAddr)) {
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
} else {
targetParamsV2.put(targetParam.targetFragmentInstanceAddr,
new TRuntimeFilterTargetParamsV2());
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_addr
= targetParam.targetFragmentInstanceAddr;
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
= new ArrayList<>();
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
}
}
localParams.runtime_filter_params.putToRidToTargetParamv2(rf.getFilterId().asInt(),
new ArrayList<TRuntimeFilterTargetParamsV2>(targetParamsV2.values()));
} else {
List<TRuntimeFilterTargetParams> targetParams = Lists.newArrayList();
for (FRuntimeFilterTargetParam targetParam : fParams) {
targetParams.add(new TRuntimeFilterTargetParams(targetParam.targetFragmentInstanceId,
targetParam.targetFragmentInstanceAddr));
}
localParams.runtime_filter_params.putToRidToTargetParam(rf.getFilterId().asInt(),
targetParams);
}
}
for (Map.Entry<RuntimeFilterId, Integer> entry : ridToBuilderNum.entrySet()) {
localParams.runtime_filter_params.putToRuntimeFilterBuilderNum(
entry.getKey().asInt(), entry.getValue());
}
for (RuntimeFilter rf : assignedRuntimeFilters) {
localParams.runtime_filter_params.putToRidToRuntimeFilter(
rf.getFilterId().asInt(), rf.toThrift());
}
}
params.getLocalParams().add(localParams);
}
return res;
}
public void appendScanRange(StringBuilder sb, List<TScanRangeParams> params) {
sb.append("range=[");
int idx = 0;
for (TScanRangeParams range : params) {
TPaloScanRange paloScanRange = range.getScanRange().getPaloScanRange();
if (paloScanRange != null) {
if (idx++ != 0) {
sb.append(",");
}
sb.append("{tid=").append(paloScanRange.getTabletId())
.append(",ver=").append(paloScanRange.getVersion()).append("}");
}
TEsScanRange esScanRange = range.getScanRange().getEsScanRange();
if (esScanRange != null) {
sb.append("{ index=").append(esScanRange.getIndex())
.append(", shardid=").append(esScanRange.getShardId())
.append("}");
}
}
sb.append("]");
}
public void appendTo(StringBuilder sb) {
sb.append("{plan=");
fragment.getPlanRoot().appendTrace(sb);
sb.append(",instance=[");
for (int i = 0; i < instanceExecParams.size(); ++i) {
if (i != 0) {
sb.append(",");
}
TNetworkAddress address = instanceExecParams.get(i).host;
Map<Integer, List<TScanRangeParams>> scanRanges =
scanRangeAssignment.get(address);
sb.append("{");
sb.append("id=").append(DebugUtil.printId(instanceExecParams.get(i).instanceId));
sb.append(",host=").append(instanceExecParams.get(i).host);
if (scanRanges == null) {
sb.append("}");
continue;
}
sb.append(",range=[");
int eIdx = 0;
for (Map.Entry<Integer, List<TScanRangeParams>> entry : scanRanges.entrySet()) {
if (eIdx++ != 0) {
sb.append(",");
}
sb.append("id").append(entry.getKey()).append(",");
appendScanRange(sb, entry.getValue());
}
sb.append("]");
sb.append("}");
}
sb.append("]");
sb.append("}");
}
}
|
```suggestion assertThatFuture(allocatedFuture).eventuallySucceeds(); ``` The test is succeeding even with the production code change not being applied. We're getting into the right code path for this test, but we're not asserting the returned future correctly as far as I can see. (hint: You have to statically import `FlinkAssertions.assertThatFuture` for the above code change to work)
|
void testAllocationUpdatesIgnoredIfSlotRemoved() throws Exception {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final CompletableFuture<
Tuple6<
SlotID,
JobID,
AllocationID,
ResourceProfile,
String,
ResourceManagerId>>
requestFuture = new CompletableFuture<>();
final CompletableFuture<Acknowledge> responseFuture = new CompletableFuture<>();
final TestingTaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setRequestSlotFunction(
tuple6 -> {
requestFuture.complete(tuple6);
return responseFuture;
})
.createTestingTaskExecutorGateway();
final TaskExecutorConnection taskExecutorConnection =
new TaskExecutorConnection(ResourceID.generate(), taskExecutorGateway);
taskManagerTracker.addTaskManager(
taskExecutorConnection, ResourceProfile.ANY, ResourceProfile.ANY);
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final JobID jobId = new JobID();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
final CompletableFuture<Void> allocatedFuture =
slotStatusSyncer.allocateSlot(
taskExecutorConnection.getInstanceID(),
jobId,
"address",
ResourceProfile.ANY);
final AllocationID allocationId = requestFuture.get().f2;
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(ResourceProfile.ANY, 1));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId))
.hasValueSatisfying(
slot -> {
assertThat(slot.getJobId()).isEqualTo(jobId);
assertThat(slot.getState()).isEqualTo(SlotState.PENDING);
});
taskManagerTracker.removeTaskManager(taskExecutorConnection.getInstanceID());
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId)).isEmpty();
responseFuture.complete(Acknowledge.get());
assertThat(allocatedFuture).isNotCompletedExceptionally();
}
|
assertThat(allocatedFuture).isNotCompletedExceptionally();
|
void testAllocationUpdatesIgnoredIfSlotRemoved() throws Exception {
testSlotAllocation(
(slotStatusSyncer, taskManagerTracker, instanceID, allocationId) -> {
taskManagerTracker.removeTaskManager(instanceID);
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId))
.isEmpty();
});
}
|
class DefaultSlotStatusSyncerTest {
private static final Time TASK_MANAGER_REQUEST_TIMEOUT = Time.seconds(10);
private static final TaskExecutorConnection TASK_EXECUTOR_CONNECTION =
new TaskExecutorConnection(
ResourceID.generate(),
new TestingTaskExecutorGatewayBuilder().createTestingTaskExecutorGateway());
@RegisterExtension
static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_RESOURCE =
TestingUtils.defaultExecutorExtension();
@Test
void testAllocateSlot() throws Exception {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final CompletableFuture<
Tuple6<
SlotID,
JobID,
AllocationID,
ResourceProfile,
String,
ResourceManagerId>>
requestFuture = new CompletableFuture<>();
final CompletableFuture<Acknowledge> responseFuture = new CompletableFuture<>();
final TestingTaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setRequestSlotFunction(
tuple6 -> {
requestFuture.complete(tuple6);
return responseFuture;
})
.createTestingTaskExecutorGateway();
final TaskExecutorConnection taskExecutorConnection =
new TaskExecutorConnection(ResourceID.generate(), taskExecutorGateway);
taskManagerTracker.addTaskManager(
taskExecutorConnection, ResourceProfile.ANY, ResourceProfile.ANY);
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final JobID jobId = new JobID();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
final CompletableFuture<Void> allocatedFuture =
slotStatusSyncer.allocateSlot(
taskExecutorConnection.getInstanceID(),
jobId,
"address",
ResourceProfile.ANY);
final AllocationID allocationId = requestFuture.get().f2;
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(ResourceProfile.ANY, 1));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId))
.hasValueSatisfying(
slot -> {
assertThat(slot.getJobId()).isEqualTo(jobId);
assertThat(slot.getState()).isEqualTo(SlotState.PENDING);
});
responseFuture.complete(Acknowledge.get());
assertThat(allocatedFuture).isNotCompletedExceptionally();
}
@Test
void testAllocationUpdatesIgnoredIfSlotFreed() throws Exception {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final CompletableFuture<
Tuple6<
SlotID,
JobID,
AllocationID,
ResourceProfile,
String,
ResourceManagerId>>
requestFuture = new CompletableFuture<>();
final CompletableFuture<Acknowledge> responseFuture = new CompletableFuture<>();
final TestingTaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setRequestSlotFunction(
tuple6 -> {
requestFuture.complete(tuple6);
return responseFuture;
})
.createTestingTaskExecutorGateway();
final TaskExecutorConnection taskExecutorConnection =
new TaskExecutorConnection(ResourceID.generate(), taskExecutorGateway);
taskManagerTracker.addTaskManager(
taskExecutorConnection, ResourceProfile.ANY, ResourceProfile.ANY);
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final JobID jobId = new JobID();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
final CompletableFuture<Void> allocatedFuture =
slotStatusSyncer.allocateSlot(
taskExecutorConnection.getInstanceID(),
jobId,
"address",
ResourceProfile.ANY);
final AllocationID allocationId = requestFuture.get().f2;
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(ResourceProfile.ANY, 1));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId))
.hasValueSatisfying(
slot -> {
assertThat(slot.getJobId()).isEqualTo(jobId);
assertThat(slot.getState()).isEqualTo(SlotState.PENDING);
});
slotStatusSyncer.freeSlot(allocationId);
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId)).isEmpty();
responseFuture.complete(Acknowledge.get());
assertThat(allocatedFuture).isNotCompletedExceptionally();
}
@Test
void testAllocateSlotFailsWithException() {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final TestingTaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setRequestSlotFunction(
ignored ->
FutureUtils.completedExceptionally(
new TimeoutException("timeout")))
.createTestingTaskExecutorGateway();
final TaskExecutorConnection taskExecutorConnection =
new TaskExecutorConnection(ResourceID.generate(), taskExecutorGateway);
taskManagerTracker.addTaskManager(
taskExecutorConnection, ResourceProfile.ANY, ResourceProfile.ANY);
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final JobID jobId = new JobID();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
final CompletableFuture<Void> allocatedFuture =
slotStatusSyncer.allocateSlot(
taskExecutorConnection.getInstanceID(),
jobId,
"address",
ResourceProfile.ANY);
assertThatThrownBy(allocatedFuture::get).hasCauseInstanceOf(TimeoutException.class);
assertThat(resourceTracker.getAcquiredResources(jobId)).isEmpty();
assertThat(
taskManagerTracker.getRegisteredTaskManager(
taskExecutorConnection.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAllocatedSlots()).isEmpty());
}
@Test
@Test
void testFreeSlot() {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final JobID jobId = new JobID();
final AllocationID allocationId = new AllocationID();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
taskManagerTracker.addTaskManager(
TASK_EXECUTOR_CONNECTION, ResourceProfile.ANY, ResourceProfile.ANY);
taskManagerTracker.notifySlotStatus(
allocationId,
jobId,
TASK_EXECUTOR_CONNECTION.getInstanceID(),
ResourceProfile.ANY,
SlotState.ALLOCATED);
resourceTracker.notifyAcquiredResource(jobId, ResourceProfile.ANY);
slotStatusSyncer.freeSlot(new AllocationID());
assertThat(resourceTracker.getAcquiredResources(jobId))
.containsExactly(ResourceRequirement.create(ResourceProfile.ANY, 1));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId)).isPresent();
slotStatusSyncer.freeSlot(allocationId);
assertThat(resourceTracker.getAcquiredResources(jobId)).isEmpty();
assertThat(
taskManagerTracker.getRegisteredTaskManager(
TASK_EXECUTOR_CONNECTION.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAllocatedSlots()).isEmpty());
}
@Test
void testSlotStatusProcessing() {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
final TestingTaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setRequestSlotFunction(ignored -> new CompletableFuture<>())
.createTestingTaskExecutorGateway();
final TaskExecutorConnection taskExecutorConnection =
new TaskExecutorConnection(ResourceID.generate(), taskExecutorGateway);
final JobID jobId = new JobID();
final AllocationID allocationId1 = new AllocationID();
final AllocationID allocationId2 = new AllocationID();
final SlotID slotId1 = new SlotID(taskExecutorConnection.getResourceID(), 0);
final SlotID slotId2 = new SlotID(taskExecutorConnection.getResourceID(), 1);
final SlotID slotId3 = new SlotID(taskExecutorConnection.getResourceID(), 2);
final ResourceProfile totalResource = ResourceProfile.fromResources(5, 20);
final ResourceProfile resource = ResourceProfile.fromResources(1, 4);
final SlotReport slotReport1 =
new SlotReport(
Arrays.asList(
new SlotStatus(slotId1, totalResource),
new SlotStatus(slotId2, resource, jobId, allocationId1),
new SlotStatus(slotId3, resource, jobId, allocationId2)));
final SlotReport slotReport2 =
new SlotReport(
Arrays.asList(
new SlotStatus(slotId3, resource),
new SlotStatus(slotId2, resource, jobId, allocationId1)));
taskManagerTracker.addTaskManager(taskExecutorConnection, totalResource, totalResource);
slotStatusSyncer.reportSlotStatus(taskExecutorConnection.getInstanceID(), slotReport1);
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(resource, 2));
assertThat(
taskManagerTracker.getRegisteredTaskManager(
taskExecutorConnection.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAvailableResource())
.isEqualTo(ResourceProfile.fromResources(3, 12)));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId1)).isPresent();
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId2)).isPresent();
slotStatusSyncer.allocateSlot(
taskExecutorConnection.getInstanceID(), jobId, "address", resource);
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(resource, 3));
assertThat(
taskManagerTracker.getRegisteredTaskManager(
taskExecutorConnection.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAvailableResource())
.isEqualTo(ResourceProfile.fromResources(2, 8)));
final AllocationID allocationId3 =
taskManagerTracker.getRegisteredTaskManager(taskExecutorConnection.getInstanceID())
.get().getAllocatedSlots().keySet().stream()
.filter(
allocationId ->
!allocationId.equals(allocationId1)
&& !allocationId.equals(allocationId2))
.findAny()
.get();
slotStatusSyncer.reportSlotStatus(taskExecutorConnection.getInstanceID(), slotReport2);
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(resource, 2));
assertThat(
taskManagerTracker.getRegisteredTaskManager(
taskExecutorConnection.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAvailableResource())
.isEqualTo(ResourceProfile.fromResources(3, 12)));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId2)).isNotPresent();
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId1))
.hasValueSatisfying(
slot -> assertThat(slot.getState()).isEqualTo(SlotState.ALLOCATED));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId3))
.hasValueSatisfying(
slot -> assertThat(slot.getState()).isEqualTo(SlotState.PENDING));
}
}
|
class DefaultSlotStatusSyncerTest {
private static final Time TASK_MANAGER_REQUEST_TIMEOUT = Time.seconds(10);
private static final TaskExecutorConnection TASK_EXECUTOR_CONNECTION =
new TaskExecutorConnection(
ResourceID.generate(),
new TestingTaskExecutorGatewayBuilder().createTestingTaskExecutorGateway());
@RegisterExtension
static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_RESOURCE =
TestingUtils.defaultExecutorExtension();
@Test
void testSlotAllocationSucceeds() throws Exception {
testSlotAllocation((ignored0, ignored1, ignored2, ignored3) -> {});
}
@Test
void testAllocationUpdatesIgnoredIfSlotFreed() throws Exception {
testSlotAllocation(
(slotStatusSyncer, taskManagerTracker, ignored, allocationId) -> {
slotStatusSyncer.freeSlot(allocationId);
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId))
.isEmpty();
});
}
@Test
void testAllocateSlotFailsWithException() {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final TestingTaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setRequestSlotFunction(
ignored ->
FutureUtils.completedExceptionally(
new TimeoutException("timeout")))
.createTestingTaskExecutorGateway();
final TaskExecutorConnection taskExecutorConnection =
new TaskExecutorConnection(ResourceID.generate(), taskExecutorGateway);
taskManagerTracker.addTaskManager(
taskExecutorConnection, ResourceProfile.ANY, ResourceProfile.ANY);
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final JobID jobId = new JobID();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
final CompletableFuture<Void> allocatedFuture =
slotStatusSyncer.allocateSlot(
taskExecutorConnection.getInstanceID(),
jobId,
"address",
ResourceProfile.ANY);
assertThatThrownBy(allocatedFuture::get).hasCauseInstanceOf(TimeoutException.class);
assertThat(resourceTracker.getAcquiredResources(jobId)).isEmpty();
assertThat(
taskManagerTracker.getRegisteredTaskManager(
taskExecutorConnection.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAllocatedSlots()).isEmpty());
}
@Test
@Test
void testFreeSlot() {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final JobID jobId = new JobID();
final AllocationID allocationId = new AllocationID();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
taskManagerTracker.addTaskManager(
TASK_EXECUTOR_CONNECTION, ResourceProfile.ANY, ResourceProfile.ANY);
taskManagerTracker.notifySlotStatus(
allocationId,
jobId,
TASK_EXECUTOR_CONNECTION.getInstanceID(),
ResourceProfile.ANY,
SlotState.ALLOCATED);
resourceTracker.notifyAcquiredResource(jobId, ResourceProfile.ANY);
slotStatusSyncer.freeSlot(new AllocationID());
assertThat(resourceTracker.getAcquiredResources(jobId))
.containsExactly(ResourceRequirement.create(ResourceProfile.ANY, 1));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId)).isPresent();
slotStatusSyncer.freeSlot(allocationId);
assertThat(resourceTracker.getAcquiredResources(jobId)).isEmpty();
assertThat(
taskManagerTracker.getRegisteredTaskManager(
TASK_EXECUTOR_CONNECTION.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAllocatedSlots()).isEmpty());
}
@Test
void testSlotStatusProcessing() {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
final TestingTaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setRequestSlotFunction(ignored -> new CompletableFuture<>())
.createTestingTaskExecutorGateway();
final TaskExecutorConnection taskExecutorConnection =
new TaskExecutorConnection(ResourceID.generate(), taskExecutorGateway);
final JobID jobId = new JobID();
final AllocationID allocationId1 = new AllocationID();
final AllocationID allocationId2 = new AllocationID();
final SlotID slotId1 = new SlotID(taskExecutorConnection.getResourceID(), 0);
final SlotID slotId2 = new SlotID(taskExecutorConnection.getResourceID(), 1);
final SlotID slotId3 = new SlotID(taskExecutorConnection.getResourceID(), 2);
final ResourceProfile totalResource = ResourceProfile.fromResources(5, 20);
final ResourceProfile resource = ResourceProfile.fromResources(1, 4);
final SlotReport slotReport1 =
new SlotReport(
Arrays.asList(
new SlotStatus(slotId1, totalResource),
new SlotStatus(slotId2, resource, jobId, allocationId1),
new SlotStatus(slotId3, resource, jobId, allocationId2)));
final SlotReport slotReport2 =
new SlotReport(
Arrays.asList(
new SlotStatus(slotId3, resource),
new SlotStatus(slotId2, resource, jobId, allocationId1)));
taskManagerTracker.addTaskManager(taskExecutorConnection, totalResource, totalResource);
slotStatusSyncer.reportSlotStatus(taskExecutorConnection.getInstanceID(), slotReport1);
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(resource, 2));
assertThat(
taskManagerTracker.getRegisteredTaskManager(
taskExecutorConnection.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAvailableResource())
.isEqualTo(ResourceProfile.fromResources(3, 12)));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId1)).isPresent();
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId2)).isPresent();
slotStatusSyncer.allocateSlot(
taskExecutorConnection.getInstanceID(), jobId, "address", resource);
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(resource, 3));
assertThat(
taskManagerTracker.getRegisteredTaskManager(
taskExecutorConnection.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAvailableResource())
.isEqualTo(ResourceProfile.fromResources(2, 8)));
final AllocationID allocationId3 =
taskManagerTracker.getRegisteredTaskManager(taskExecutorConnection.getInstanceID())
.get().getAllocatedSlots().keySet().stream()
.filter(
allocationId ->
!allocationId.equals(allocationId1)
&& !allocationId.equals(allocationId2))
.findAny()
.get();
slotStatusSyncer.reportSlotStatus(taskExecutorConnection.getInstanceID(), slotReport2);
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(resource, 2));
assertThat(
taskManagerTracker.getRegisteredTaskManager(
taskExecutorConnection.getInstanceID()))
.hasValueSatisfying(
taskManagerInfo ->
assertThat(taskManagerInfo.getAvailableResource())
.isEqualTo(ResourceProfile.fromResources(3, 12)));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId2)).isNotPresent();
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId1))
.hasValueSatisfying(
slot -> assertThat(slot.getState()).isEqualTo(SlotState.ALLOCATED));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId3))
.hasValueSatisfying(
slot -> assertThat(slot.getState()).isEqualTo(SlotState.PENDING));
}
private static void testSlotAllocation(
QuadConsumer<SlotStatusSyncer, TaskManagerTracker, InstanceID, AllocationID>
beforeCompletingSlotRequestCallback)
throws ExecutionException, InterruptedException {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
final CompletableFuture<AllocationID> requestFuture = new CompletableFuture<>();
final CompletableFuture<Acknowledge> responseFuture = new CompletableFuture<>();
final TestingTaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setRequestSlotFunction(
tuple6 -> {
requestFuture.complete(tuple6.f2);
return responseFuture;
})
.createTestingTaskExecutorGateway();
final TaskExecutorConnection taskExecutorConnection =
new TaskExecutorConnection(ResourceID.generate(), taskExecutorGateway);
taskManagerTracker.addTaskManager(
taskExecutorConnection, ResourceProfile.ANY, ResourceProfile.ANY);
final ResourceTracker resourceTracker = new DefaultResourceTracker();
final JobID jobId = new JobID();
final SlotStatusSyncer slotStatusSyncer =
new DefaultSlotStatusSyncer(TASK_MANAGER_REQUEST_TIMEOUT);
slotStatusSyncer.initialize(
taskManagerTracker,
resourceTracker,
ResourceManagerId.generate(),
EXECUTOR_RESOURCE.getExecutor());
final CompletableFuture<Void> allocatedFuture =
slotStatusSyncer.allocateSlot(
taskExecutorConnection.getInstanceID(),
jobId,
"address",
ResourceProfile.ANY);
final AllocationID allocationId = requestFuture.get();
assertThat(resourceTracker.getAcquiredResources(jobId))
.contains(ResourceRequirement.create(ResourceProfile.ANY, 1));
assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId))
.hasValueSatisfying(
slot -> {
assertThat(slot.getJobId()).isEqualTo(jobId);
assertThat(slot.getState()).isEqualTo(SlotState.PENDING);
});
beforeCompletingSlotRequestCallback.accept(
slotStatusSyncer,
taskManagerTracker,
taskExecutorConnection.getInstanceID(),
allocationId);
responseFuture.complete(Acknowledge.get());
assertThatFuture(allocatedFuture).eventuallySucceeds();
}
}
|
Here a trailing WS is not required AFAIU.
|
public ImportDeclarationNode transform(ImportDeclarationNode importDeclarationNode) {
Token importKeyword = formatToken(importDeclarationNode.importKeyword(), 0, 0);
if (importDeclarationNode.orgName().isPresent()) {
ImportOrgNameNode orgName = formatNode(importDeclarationNode.orgName().get(), 0, 0);
importDeclarationNode = importDeclarationNode.modify().withOrgName(orgName).apply();
}
SeparatedNodeList<IdentifierToken> moduleNames = formatSeparatedNodeList(importDeclarationNode.moduleName(),
0, 0, 0, 0, 0, 0);
if (importDeclarationNode.version().isPresent()) {
ImportVersionNode version = formatNode(importDeclarationNode.version().get(), 0, 0);
importDeclarationNode = importDeclarationNode.modify().withVersion(version).apply();
}
if (importDeclarationNode.prefix().isPresent()) {
ImportPrefixNode prefix = formatNode(importDeclarationNode.prefix().get(), 0, 0);
importDeclarationNode = importDeclarationNode.modify().withPrefix(prefix).apply();
}
Token semicolon = formatToken(importDeclarationNode.semicolon(), 0, 1);
return importDeclarationNode.modify()
.withImportKeyword(importKeyword)
.withModuleName(moduleNames)
.withSemicolon(semicolon)
.apply();
}
|
ImportPrefixNode prefix = formatNode(importDeclarationNode.prefix().get(), 0, 0);
|
public ImportDeclarationNode transform(ImportDeclarationNode importDeclarationNode) {
Token importKeyword = formatToken(importDeclarationNode.importKeyword(), 1, 0);
boolean hasOrgName = importDeclarationNode.orgName().isPresent();
boolean hasVersion = importDeclarationNode.version().isPresent();
boolean hasPrefix = importDeclarationNode.prefix().isPresent();
if (hasOrgName) {
ImportOrgNameNode orgName = formatNode(importDeclarationNode.orgName().get(), 0, 0);
importDeclarationNode = importDeclarationNode.modify().withOrgName(orgName).apply();
}
SeparatedNodeList<IdentifierToken> moduleNames = formatSeparatedNodeList(importDeclarationNode.moduleName(),
0, 0, 0, 0, (hasVersion || hasPrefix) ? 1 : 0, 0);
if (hasVersion) {
ImportVersionNode version = formatNode(importDeclarationNode.version().get(), hasPrefix ? 1 : 0, 0);
importDeclarationNode = importDeclarationNode.modify().withVersion(version).apply();
}
if (hasPrefix) {
ImportPrefixNode prefix = formatNode(importDeclarationNode.prefix().get(), 0, 0);
importDeclarationNode = importDeclarationNode.modify().withPrefix(prefix).apply();
}
Token semicolon = formatToken(importDeclarationNode.semicolon(), this.trailingWS, this.trailingNL);
return importDeclarationNode.modify()
.withImportKeyword(importKeyword)
.withModuleName(moduleNames)
.withSemicolon(semicolon)
.apply();
}
|
class NewFormattingTreeModifier extends FormattingTreeModifier {
/**
* Number of of whitespace characters to be used as the indentation for the current line.
*/
private int indentation = 0;
/**
* Number of leading newlines to be added to the currently processing node.
*/
private int leadingNL = 0;
/**
* Number of trailing newlines to be added to the currently processing node.
*/
private int trailingNL = 0;
/**
* Number of trailing whitespace characters to be added to the currently processing node.
*/
private int trailingWS = 0;
/**
* Flag indicating whether the currently formatting token is the first token of the current line.
*/
private boolean hasNewline = true;
/**
* Number of of whitespace characters to be used for a single indentation.
*/
private static final int DEFAULT_INDENTATION = 4;
/**
* Maximum length of a line. Any line that goes pass this limit will be wrapped.
*/
private static final int COLUMN_LIMIT = 80;
/**
* Length of the currently formatting line.
*/
private int lineLength = 0;
public NewFormattingTreeModifier(FormattingOptions options, LineRange lineRange) {
super(options, lineRange);
}
@Override
public ModulePartNode transform(ModulePartNode modulePartNode) {
NodeList<ImportDeclarationNode> imports = formatNodeList(modulePartNode.imports(), 0, 1, 0, 2);
NodeList<ModuleMemberDeclarationNode> members = formatNodeList(modulePartNode.members(), 0, 2, 0, 1);
Token eofToken = formatToken(modulePartNode.eofToken(), 0, 0);
return modulePartNode.modify(imports, members, eofToken);
}
@Override
public FunctionDefinitionNode transform(FunctionDefinitionNode functionDefinitionNode) {
if (functionDefinitionNode.metadata().isPresent()) {
MetadataNode metadata = formatNode(functionDefinitionNode.metadata().get(), 1, 0);
functionDefinitionNode = functionDefinitionNode.modify().withMetadata(metadata).apply();
}
NodeList<Token> qualifierList = formatNodeList(functionDefinitionNode.qualifierList(), 1, 0, 1, 0);
Token functionKeyword = formatToken(functionDefinitionNode.functionKeyword(), 1, 0);
IdentifierToken functionName = formatToken(functionDefinitionNode.functionName(), 0, 0);
FunctionSignatureNode functionSignatureNode = formatNode(functionDefinitionNode.functionSignature(), 1, 0);
FunctionBodyNode functionBodyNode = formatNode(functionDefinitionNode.functionBody(),
this.trailingWS, this.trailingNL);
return functionDefinitionNode.modify()
.withFunctionKeyword(functionKeyword)
.withFunctionName(functionName)
.withFunctionSignature(functionSignatureNode).withQualifierList(qualifierList)
.withFunctionBody(functionBodyNode)
.apply();
}
@Override
public FunctionSignatureNode transform(FunctionSignatureNode functionSignatureNode) {
Token openPara = formatToken(functionSignatureNode.openParenToken(), 0, 0);
int currentIndentation = this.indentation;
setIndentation(this.lineLength);
SeparatedNodeList<ParameterNode> parameters =
formatSeparatedNodeList(functionSignatureNode.parameters(), 0, 0, 0, 0);
setIndentation(currentIndentation);
Token closePara = formatToken(functionSignatureNode.closeParenToken(), 1, 0);
if (functionSignatureNode.returnTypeDesc().isPresent()) {
ReturnTypeDescriptorNode returnTypeDesc =
formatNode(functionSignatureNode.returnTypeDesc().get(), this.trailingWS, this.trailingNL);
functionSignatureNode = functionSignatureNode.modify().withReturnTypeDesc(returnTypeDesc).apply();
}
return functionSignatureNode.modify()
.withOpenParenToken(openPara)
.withCloseParenToken(closePara)
.withParameters(parameters)
.apply();
}
@Override
public RequiredParameterNode transform(RequiredParameterNode requiredParameterNode) {
NodeList<AnnotationNode> annotations = formatNodeList(requiredParameterNode.annotations(), 0, 1, 0, 0);
Node typeName = formatNode(requiredParameterNode.typeName(), 1, 0);
if (requiredParameterNode.paramName().isPresent()) {
Token paramName = formatToken(requiredParameterNode.paramName().get(), 0, 0);
return requiredParameterNode.modify()
.withAnnotations(annotations)
.withTypeName(typeName)
.withParamName(paramName)
.apply();
} else {
return requiredParameterNode.modify()
.withAnnotations(annotations)
.withTypeName(typeName)
.apply();
}
}
@Override
public FunctionBodyBlockNode transform(FunctionBodyBlockNode functionBodyBlockNode) {
Token openBrace = formatToken(functionBodyBlockNode.openBraceToken(), 0, 1);
indent();
NodeList<StatementNode> statements = formatNodeList(functionBodyBlockNode.statements(), 0, 1, 0, 1);
unindent();
Token closeBrace = formatToken(functionBodyBlockNode.closeBraceToken(), this.trailingWS, this.trailingNL);
return functionBodyBlockNode.modify()
.withOpenBraceToken(openBrace)
.withCloseBraceToken(closeBrace)
.withStatements(statements)
.apply();
}
@Override
public VariableDeclarationNode transform(VariableDeclarationNode variableDeclarationNode) {
NodeList<AnnotationNode> annotationNodes = formatNodeList(variableDeclarationNode.annotations(), 0, 1, 0, 1);
if (variableDeclarationNode.finalKeyword().isPresent()) {
Token finalToken = formatToken(variableDeclarationNode.finalKeyword().get(), 1, 0);
variableDeclarationNode = variableDeclarationNode.modify().withFinalKeyword(finalToken).apply();
}
TypedBindingPatternNode typedBindingPatternNode;
if (variableDeclarationNode.equalsToken().isPresent()) {
typedBindingPatternNode = formatNode(variableDeclarationNode.typedBindingPattern(), 1, 0);
Token equalToken = formatToken(variableDeclarationNode.equalsToken().get(), 1, 0);
ExpressionNode initializer = formatNode(variableDeclarationNode.initializer().get(), 0, 0);
Token semicolonToken = formatToken(variableDeclarationNode.semicolonToken(),
this.trailingWS, this.trailingNL);
return variableDeclarationNode.modify()
.withAnnotations(annotationNodes)
.withTypedBindingPattern(typedBindingPatternNode)
.withEqualsToken(equalToken)
.withInitializer(initializer)
.withSemicolonToken(semicolonToken)
.apply();
} else {
typedBindingPatternNode = formatNode(variableDeclarationNode.typedBindingPattern(), 0, 0);
Token semicolonToken = formatToken(variableDeclarationNode.semicolonToken(),
this.trailingWS, this.trailingNL);
return variableDeclarationNode.modify()
.withAnnotations(annotationNodes)
.withTypedBindingPattern(typedBindingPatternNode)
.withSemicolonToken(semicolonToken)
.apply();
}
}
@Override
public TypedBindingPatternNode transform(TypedBindingPatternNode typedBindingPatternNode) {
TypeDescriptorNode typeDescriptorNode = formatNode(typedBindingPatternNode.typeDescriptor(), 1, 0);
BindingPatternNode bindingPatternNode =
formatNode(typedBindingPatternNode.bindingPattern(), this.trailingWS, this.trailingNL);
return typedBindingPatternNode.modify()
.withTypeDescriptor(typeDescriptorNode)
.withBindingPattern(bindingPatternNode)
.apply();
}
@Override
public BuiltinSimpleNameReferenceNode transform(BuiltinSimpleNameReferenceNode builtinSimpleNameReferenceNode) {
Token name = formatToken(builtinSimpleNameReferenceNode.name(), this.trailingWS, this.trailingNL);
return builtinSimpleNameReferenceNode.modify().withName(name).apply();
}
@Override
public BasicLiteralNode transform(BasicLiteralNode basicLiteralNode) {
Token literalToken = formatToken(basicLiteralNode.literalToken(), this.trailingWS, this.trailingNL);
return basicLiteralNode.modify().withLiteralToken(literalToken).apply();
}
@Override
public CaptureBindingPatternNode transform(CaptureBindingPatternNode captureBindingPatternNode) {
Token variableName = formatToken(captureBindingPatternNode.variableName(), this.trailingWS, this.trailingNL);
return captureBindingPatternNode.modify().withVariableName(variableName).apply();
}
@Override
public IfElseStatementNode transform(IfElseStatementNode ifElseStatementNode) {
Token ifKeyword = formatToken(ifElseStatementNode.ifKeyword(), 1, 0);
ExpressionNode condition = formatNode(ifElseStatementNode.condition(), 1, 0);
BlockStatementNode ifBody;
if (ifElseStatementNode.elseBody().isPresent()) {
ifBody = formatNode(ifElseStatementNode.ifBody(), 1, 0);
Node elseBody = formatNode(ifElseStatementNode.elseBody().get(), this.trailingWS, this.trailingNL);
ifElseStatementNode = ifElseStatementNode.modify().withElseBody(elseBody).apply();
} else {
ifBody = formatNode(ifElseStatementNode.ifBody(), this.trailingWS, this.trailingNL);
}
return ifElseStatementNode.modify()
.withIfKeyword(ifKeyword)
.withIfBody(ifBody)
.withCondition(condition)
.apply();
}
@Override
public ElseBlockNode transform(ElseBlockNode elseBlockNode) {
Token elseKeyword = formatToken(elseBlockNode.elseKeyword(), 1, 0);
StatementNode elseBody = formatNode(elseBlockNode.elseBody(), this.trailingWS, this.trailingNL);
return elseBlockNode.modify()
.withElseKeyword(elseKeyword)
.withElseBody(elseBody)
.apply();
}
@Override
public BlockStatementNode transform(BlockStatementNode blockStatementNode) {
Token openBrace = formatToken(blockStatementNode.openBraceToken(), 0, 1);
indent();
NodeList<StatementNode> statements = formatNodeList(blockStatementNode.statements(), 0, 1, 0, 1);
unindent();
Token closeBrace = formatToken(blockStatementNode.closeBraceToken(), this.trailingWS, this.trailingNL);
return blockStatementNode.modify()
.withOpenBraceToken(openBrace)
.withStatements(statements)
.withCloseBraceToken(closeBrace)
.apply();
}
@Override
public RecordTypeDescriptorNode transform(RecordTypeDescriptorNode recordTypeDesc) {
Token recordKeyword = formatNode(recordTypeDesc.recordKeyword(), 1, 0);
int fieldTrailingWS = 0;
int fieldTrailingNL = 0;
if (shouldExpand(recordTypeDesc)) {
fieldTrailingNL++;
} else {
fieldTrailingWS++;
}
Token bodyStartDelimiter = formatToken(recordTypeDesc.bodyStartDelimiter(), fieldTrailingWS, fieldTrailingNL);
int prevIndentation = this.indentation;
setIndentation(recordKeyword.location().lineRange().startLine().offset() + DEFAULT_INDENTATION);
NodeList<Node> fields = formatNodeList(recordTypeDesc.fields(), fieldTrailingWS, fieldTrailingNL,
fieldTrailingWS, fieldTrailingNL);
if (recordTypeDesc.recordRestDescriptor().isPresent()) {
RecordRestDescriptorNode recordRestDescriptor =
formatNode(recordTypeDesc.recordRestDescriptor().get(), fieldTrailingWS, fieldTrailingNL);
recordTypeDesc = recordTypeDesc.modify().withRecordRestDescriptor(recordRestDescriptor).apply();
}
setIndentation(prevIndentation);
Token bodyEndDelimiter = formatToken(recordTypeDesc.bodyEndDelimiter(), this.trailingWS, this.trailingNL);
return recordTypeDesc.modify()
.withRecordKeyword(recordKeyword)
.withBodyStartDelimiter(bodyStartDelimiter)
.withFields(fields)
.withBodyEndDelimiter(bodyEndDelimiter)
.apply();
}
@Override
public RecordFieldNode transform(RecordFieldNode recordField) {
if (recordField.metadata().isPresent()) {
MetadataNode metadata = formatNode(recordField.metadata().get(), 0, 1);
recordField = recordField.modify().withMetadata(metadata).apply();
}
if (recordField.readonlyKeyword().isPresent()) {
Token readonlyKeyword = formatNode(recordField.readonlyKeyword().get(), 1, 0);
recordField = recordField.modify().withReadonlyKeyword(readonlyKeyword).apply();
}
Node typeName = formatNode(recordField.typeName(), 1, 0);
Token fieldName = formatToken(recordField.fieldName(), 0, 0);
if (recordField.questionMarkToken().isPresent()) {
Token questionMarkToken = formatToken(recordField.questionMarkToken().get(), 0, 1);
recordField = recordField.modify().withQuestionMarkToken(questionMarkToken).apply();
}
Token semicolonToken = formatToken(recordField.semicolonToken(), this.trailingWS, this.trailingNL);
return recordField.modify()
.withTypeName(typeName)
.withFieldName(fieldName)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public RecordFieldWithDefaultValueNode transform(RecordFieldWithDefaultValueNode recordField) {
if (recordField.metadata().isPresent()) {
MetadataNode metadata = formatNode(recordField.metadata().get(), 0, 1);
recordField = recordField.modify().withMetadata(metadata).apply();
}
if (recordField.readonlyKeyword().isPresent()) {
Token readonlyKeyword = formatNode(recordField.readonlyKeyword().get(), 1, 0);
recordField = recordField.modify().withReadonlyKeyword(readonlyKeyword).apply();
}
Node typeName = formatNode(recordField.typeName(), 1, 0);
Token fieldName = formatToken(recordField.fieldName(), 1, 0);
Token equalsToken = formatToken(recordField.equalsToken(), 1, 0);
ExpressionNode expression = formatNode(recordField.expression(), 0, 0);
Token semicolonToken = formatToken(recordField.semicolonToken(), this.trailingWS, this.trailingNL);
return recordField.modify()
.withTypeName(typeName)
.withFieldName(fieldName)
.withEqualsToken(equalsToken)
.withExpression(expression)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
@Override
public ServiceDeclarationNode transform(ServiceDeclarationNode serviceDeclarationNode) {
if (serviceDeclarationNode.metadata().isPresent()) {
MetadataNode metadata = formatNode(serviceDeclarationNode.metadata().get(), 1, 0);
serviceDeclarationNode = serviceDeclarationNode.modify().withMetadata(metadata).apply();
}
Token serviceKeyword = formatToken(serviceDeclarationNode.serviceKeyword(), 1, 0);
IdentifierToken serviceName = formatToken(serviceDeclarationNode.serviceName(), 1, 0);
Token onKeyword = formatToken(serviceDeclarationNode.onKeyword(), 1, 0);
SeparatedNodeList<ExpressionNode> expressions =
formatSeparatedNodeList(serviceDeclarationNode.expressions(), 0, 0, 0, 0);
Node serviceBody = formatNode(serviceDeclarationNode.serviceBody(), this.trailingWS, this.trailingNL);
return serviceDeclarationNode.modify()
.withServiceKeyword(serviceKeyword)
.withServiceName(serviceName)
.withOnKeyword(onKeyword)
.withExpressions(expressions)
.withServiceBody(serviceBody)
.apply();
}
@Override
public ExplicitNewExpressionNode transform(ExplicitNewExpressionNode explicitNewExpressionNode) {
Token newKeywordToken = formatToken(explicitNewExpressionNode.newKeyword(), 1, 0);
TypeDescriptorNode typeDescriptorNode = formatNode(explicitNewExpressionNode.typeDescriptor(), 0, 0);
ParenthesizedArgList parenthesizedArgList = formatNode(explicitNewExpressionNode.parenthesizedArgList(), 0, 0);
return explicitNewExpressionNode.modify()
.withNewKeyword(newKeywordToken)
.withTypeDescriptor(typeDescriptorNode)
.withParenthesizedArgList(parenthesizedArgList)
.apply();
}
@Override
public ParenthesizedArgList transform(ParenthesizedArgList parenthesizedArgList) {
Token openParenToken = formatToken(parenthesizedArgList.openParenToken(), 0, 0);
SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(parenthesizedArgList
.arguments(), 0, 0, 0, 0);
Token closeParenToken = formatToken(parenthesizedArgList.closeParenToken(), 1, 0);
return parenthesizedArgList.modify()
.withOpenParenToken(openParenToken)
.withArguments(arguments)
.withCloseParenToken(closeParenToken)
.apply();
}
@Override
public ServiceBodyNode transform(ServiceBodyNode serviceBodyNode) {
Token openBraceToken = formatToken(serviceBodyNode.openBraceToken(), 0, 1);
indent();
NodeList<Node> resources = formatNodeList(serviceBodyNode.resources(), 0, 1, 0, 1);
unindent();
Token closeBraceToken = formatToken(serviceBodyNode.closeBraceToken(), this.trailingWS, this.trailingNL);
return serviceBodyNode.modify()
.withOpenBraceToken(openBraceToken)
.withResources(resources)
.withCloseBraceToken(closeBraceToken)
.apply();
}
@Override
public QualifiedNameReferenceNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
Token modulePrefix = formatToken(qualifiedNameReferenceNode.modulePrefix(), 0, 0);
Token colon = formatToken((Token) qualifiedNameReferenceNode.colon(), 0, 0);
IdentifierToken identifier = formatToken(qualifiedNameReferenceNode.identifier(),
this.trailingWS, this.trailingNL);
return qualifiedNameReferenceNode.modify()
.withModulePrefix(modulePrefix)
.withColon(colon)
.withIdentifier(identifier)
.apply();
}
@Override
public ReturnTypeDescriptorNode transform(ReturnTypeDescriptorNode returnTypeDescriptorNode) {
Token returnsKeyword = formatToken(returnTypeDescriptorNode.returnsKeyword(), 1, 0);
NodeList<AnnotationNode> annotations = formatNodeList(returnTypeDescriptorNode.annotations(), 0, 0, 1, 0);
Node type = formatNode(returnTypeDescriptorNode.type(), 1, 0);
return returnTypeDescriptorNode.modify()
.withReturnsKeyword(returnsKeyword)
.withAnnotations(annotations)
.withType(type)
.apply();
}
@Override
public OptionalTypeDescriptorNode transform(OptionalTypeDescriptorNode optionalTypeDescriptorNode) {
Node typeDescriptor = formatNode(optionalTypeDescriptorNode.typeDescriptor(), 0, 0);
Token questionMarkToken = formatToken(optionalTypeDescriptorNode.questionMarkToken(), 1, 0);
return optionalTypeDescriptorNode.modify()
.withTypeDescriptor(typeDescriptor)
.withQuestionMarkToken(questionMarkToken)
.apply();
}
@Override
public ExpressionStatementNode transform(ExpressionStatementNode expressionStatementNode) {
ExpressionNode expression = formatNode(expressionStatementNode.expression(), this.trailingWS, this.trailingNL);
Token semicolonToken = formatToken(expressionStatementNode.semicolonToken(), this.trailingWS, this.trailingNL);
return expressionStatementNode.modify()
.withExpression(expression)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public CheckExpressionNode transform(CheckExpressionNode checkExpressionNode) {
Token checkKeyword = formatToken(checkExpressionNode.checkKeyword(), 1, 0);
ExpressionNode expressionNode = formatNode(checkExpressionNode.expression(), this.trailingWS, this.trailingNL);
return checkExpressionNode.modify()
.withCheckKeyword(checkKeyword)
.withExpression(expressionNode)
.apply();
}
@Override
public RemoteMethodCallActionNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) {
ExpressionNode expression = formatNode(remoteMethodCallActionNode.expression(), 0, 0);
Token rightArrowToken = formatToken(remoteMethodCallActionNode.rightArrowToken(), 0, 0);
SimpleNameReferenceNode methodName = formatNode(remoteMethodCallActionNode.methodName(), 0, 0);
Token openParenToken = formatToken(remoteMethodCallActionNode.openParenToken(), 0, 0);
SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(remoteMethodCallActionNode
.arguments(), 1, 0, 0, 0);
Token closeParenToken = formatToken(remoteMethodCallActionNode.closeParenToken(), 0, 0);
return remoteMethodCallActionNode.modify()
.withExpression(expression)
.withRightArrowToken(rightArrowToken)
.withMethodName(methodName)
.withOpenParenToken(openParenToken)
.withArguments(arguments)
.withCloseParenToken(closeParenToken)
.apply();
}
@Override
public SimpleNameReferenceNode transform(SimpleNameReferenceNode simpleNameReferenceNode) {
Token name = formatToken(simpleNameReferenceNode.name(), this.trailingWS, 0);
return simpleNameReferenceNode.modify()
.withName(name)
.apply();
}
@Override
public TypeDefinitionNode transform(TypeDefinitionNode typeDefinitionNode) {
if (typeDefinitionNode.metadata().isPresent()) {
MetadataNode metadata = formatNode(typeDefinitionNode.metadata().get(), 1, 0);
typeDefinitionNode = typeDefinitionNode.modify().withMetadata(metadata).apply();
}
if (typeDefinitionNode.visibilityQualifier().isPresent()) {
Token visibilityQualifier = formatToken(typeDefinitionNode.visibilityQualifier().get(), 1, 0);
typeDefinitionNode = typeDefinitionNode.modify().withVisibilityQualifier(visibilityQualifier).apply();
}
Token typeKeyword = formatToken(typeDefinitionNode.typeKeyword(), 1, 0);
Token typeName = formatToken(typeDefinitionNode.typeName(), 1, 0);
Node typeDescriptor = formatNode(typeDefinitionNode.typeDescriptor(), 1, 0);
Token semicolonToken = formatToken(typeDefinitionNode.semicolonToken(), this.trailingWS, this.trailingNL);
return typeDefinitionNode.modify()
.withTypeKeyword(typeKeyword)
.withTypeName(typeName)
.withTypeDescriptor(typeDescriptor)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public SingletonTypeDescriptorNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) {
ExpressionNode simpleContExprNode = formatNode(singletonTypeDescriptorNode.simpleContExprNode(), 1, 0);
return singletonTypeDescriptorNode.modify()
.withSimpleContExprNode(simpleContExprNode)
.apply();
}
@Override
public WhileStatementNode transform(WhileStatementNode whileStatementNode) {
Token whileKeyword = formatToken(whileStatementNode.whileKeyword(), 1, 0);
ExpressionNode condition = formatNode(whileStatementNode.condition(), 1, 0);
BlockStatementNode whileBody = formatNode(whileStatementNode.whileBody(), this.trailingWS, this.trailingNL);
if (whileStatementNode.onFailClause().isPresent()) {
OnFailClauseNode onFailClause = formatNode(whileStatementNode.onFailClause().get(),
this.trailingWS, this.trailingNL);
whileStatementNode = whileStatementNode.modify().withOnFailClause(onFailClause).apply();
}
return whileStatementNode.modify()
.withWhileKeyword(whileKeyword)
.withCondition(condition)
.withWhileBody(whileBody)
.apply();
}
@Override
public BracedExpressionNode transform(BracedExpressionNode bracedExpressionNode) {
Token openParen = formatToken(bracedExpressionNode.openParen(), 0, 0);
ExpressionNode expression = formatNode(bracedExpressionNode.expression(), 0, 0);
Token closeParen = formatToken(bracedExpressionNode.closeParen(), 1, 0);
return bracedExpressionNode.modify()
.withOpenParen(openParen)
.withExpression(expression)
.withCloseParen(closeParen)
.apply();
}
@Override
public AssignmentStatementNode transform(AssignmentStatementNode assignmentStatementNode) {
Node varRef = formatNode(assignmentStatementNode.varRef(), 1, 0);
Token equalsToken = formatToken(assignmentStatementNode.equalsToken(), 1, 0);
ExpressionNode expression = formatNode(assignmentStatementNode.expression(), 0, 0);
Token semicolonToken = formatToken(assignmentStatementNode.semicolonToken(), this.trailingWS, this.trailingNL);
return assignmentStatementNode.modify()
.withVarRef(varRef)
.withEqualsToken(equalsToken)
.withExpression(expression)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public CompoundAssignmentStatementNode transform(CompoundAssignmentStatementNode compoundAssignmentStatementNode) {
ExpressionNode lhsExpression = formatNode(compoundAssignmentStatementNode.lhsExpression(), 1, 0);
Token binaryOperator = formatToken(compoundAssignmentStatementNode.binaryOperator(), 0, 0);
Token equalsToken = formatToken(compoundAssignmentStatementNode.equalsToken(), 1, 0);
ExpressionNode rhsExpression = formatNode(compoundAssignmentStatementNode.rhsExpression(), 0, 0);
Token semicolonToken = formatToken(compoundAssignmentStatementNode.semicolonToken(),
this.trailingWS, this.trailingNL);
return compoundAssignmentStatementNode.modify()
.withLhsExpression(lhsExpression)
.withBinaryOperator(binaryOperator)
.withEqualsToken(equalsToken)
.withRhsExpression(rhsExpression)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public DoStatementNode transform(DoStatementNode doStatementNode) {
Token doKeyword = formatToken(doStatementNode.doKeyword(), 1, 0);
BlockStatementNode blockStatement = formatNode(doStatementNode.blockStatement(),
this.trailingWS, this.trailingNL);
if (doStatementNode.onFailClause().isPresent()) {
OnFailClauseNode onFailClause = formatNode(doStatementNode.onFailClause().get(),
this.trailingWS, this.trailingNL);
doStatementNode = doStatementNode.modify().withOnFailClause(onFailClause).apply();
}
return doStatementNode.modify()
.withDoKeyword(doKeyword)
.withBlockStatement(blockStatement)
.apply();
}
@Override
public ForEachStatementNode transform(ForEachStatementNode forEachStatementNode) {
Token forEachKeyword = formatToken(forEachStatementNode.forEachKeyword(), 1, 0);
TypedBindingPatternNode typedBindingPattern = formatNode(forEachStatementNode.typedBindingPattern(), 0, 0);
Token inKeyword = formatToken(forEachStatementNode.inKeyword(), 1, 0);
Node actionOrExpressionNode = formatNode(forEachStatementNode.actionOrExpressionNode(), 1, 0);
StatementNode blockStatement = formatNode(forEachStatementNode.blockStatement(),
this.trailingWS, this.trailingNL);
if (forEachStatementNode.onFailClause().isPresent()) {
OnFailClauseNode onFailClause = formatNode(forEachStatementNode.onFailClause().get(),
this.trailingWS, this.trailingNL);
forEachStatementNode = forEachStatementNode.modify().withOnFailClause(onFailClause).apply();
}
return forEachStatementNode.modify()
.withForEachKeyword(forEachKeyword)
.withTypedBindingPattern(typedBindingPattern)
.withInKeyword(inKeyword)
.withActionOrExpressionNode(actionOrExpressionNode)
.withBlockStatement(blockStatement)
.apply();
}
@Override
public BinaryExpressionNode transform(BinaryExpressionNode binaryExpressionNode) {
Node lhsExpr = formatNode(binaryExpressionNode.lhsExpr(), 1, 0);
Token operator = formatToken(binaryExpressionNode.operator(), 1, 0);
Node rhsExpr = formatNode(binaryExpressionNode.rhsExpr(), 0, 0);
return binaryExpressionNode.modify()
.withLhsExpr(lhsExpr)
.withOperator(operator)
.withRhsExpr(rhsExpr)
.apply();
}
@Override
public OnFailClauseNode transform(OnFailClauseNode onFailClauseNode) {
Token onKeyword = formatToken(onFailClauseNode.onKeyword(), 1, 0);
Token failKeyword = formatToken(onFailClauseNode.failKeyword(), 1, 0);
TypeDescriptorNode typeDescriptor = formatNode(onFailClauseNode.typeDescriptor(), 1, 0);
IdentifierToken failErrorName = formatToken(onFailClauseNode.failErrorName(), 1, 0);
BlockStatementNode blockStatement = formatNode(onFailClauseNode.blockStatement(),
0, 1);
return onFailClauseNode.modify()
.withOnKeyword(onKeyword)
.withFailKeyword(failKeyword)
.withTypeDescriptor(typeDescriptor)
.withFailErrorName(failErrorName)
.withBlockStatement(blockStatement)
.apply();
}
@Override
public ReturnStatementNode transform(ReturnStatementNode returnStatementNode) {
Token returnKeyword = formatToken(returnStatementNode.returnKeyword(),
returnStatementNode.expression().isPresent() ? 1 : 0, 0);
Token semicolonToken = formatToken(returnStatementNode.semicolonToken(), this.trailingWS, this.trailingNL);
if (returnStatementNode.expression().isPresent()) {
ExpressionNode expressionNode = formatNode(returnStatementNode.expression().get(), 0, 0);
returnStatementNode = returnStatementNode.modify()
.withExpression(expressionNode).apply();
}
return returnStatementNode.modify()
.withReturnKeyword(returnKeyword)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public FunctionCallExpressionNode transform(FunctionCallExpressionNode functionCallExpressionNode) {
NameReferenceNode functionName = formatNode(functionCallExpressionNode.functionName(), 1, 0);
Token functionCallOpenPara = formatToken(functionCallExpressionNode.openParenToken(), 0, 0);
SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(functionCallExpressionNode
.arguments(), 0, 0, 0, 0);
Token functionCallClosePara = formatToken(functionCallExpressionNode.closeParenToken(),
this.trailingWS, this.trailingNL);
return functionCallExpressionNode.modify()
.withFunctionName(functionName)
.withOpenParenToken(functionCallOpenPara)
.withCloseParenToken(functionCallClosePara)
.withArguments(arguments)
.apply();
}
@Override
public IdentifierToken transform(IdentifierToken identifier) {
return formatToken(identifier, this.trailingWS, this.trailingNL);
}
@Override
public Token transform(Token token) {
return formatToken(token, this.trailingWS, this.trailingNL);
}
/**
* Format a node.
*
* @param <T> Type of the node
* @param node Node to be formatted
* @param trailingWS Number of single-length spaces to be added after the node
* @param trailingNL Number of newlines to be added after the node
* @return Formatted node
*/
@SuppressWarnings("unchecked")
private <T extends Node> T formatNode(T node, int trailingWS, int trailingNL) {
if (node == null) {
return node;
}
if (!isInLineRange(node, lineRange)) {
checkForNewline(node);
return node;
}
int prevTrailingNL = this.trailingNL;
int prevTrailingWS = this.trailingWS;
this.trailingNL = trailingNL;
this.trailingWS = trailingWS;
node = (T) node.apply(this);
if (this.lineLength > COLUMN_LIMIT) {
node = wrap(node);
}
this.trailingNL = prevTrailingNL;
this.trailingWS = prevTrailingWS;
return node;
}
/**
* Wrap the node. This is equivalent to adding a newline before the node and
* re-formatting the node. Wrapped content will start from the current level
* of indentation.
*
* @param <T> Node type
* @param node Node to be wrapped
* @return Wrapped node
*/
@SuppressWarnings("unchecked")
private <T extends Node> T wrap(T node) {
this.leadingNL += 1;
this.lineLength = 0;
this.hasNewline = true;
return (T) node.apply(this);
}
/**
* Format a token.
*
* @param <T> Type of the token
* @param token Token to be formatted
* @param trailingWS Number of single-length spaces to be added after the token
* @param trailingNL Number of newlines to be added after the token
* @return Formatted token
*/
private <T extends Token> T formatToken(T token, int trailingWS, int trailingNL) {
if (token == null) {
return token;
}
if (!isInLineRange(token, lineRange)) {
checkForNewline(token);
return token;
}
int prevTrailingNL = this.trailingNL;
int prevTrailingWS = this.trailingWS;
this.trailingNL = trailingNL > 0 ? 1 : 0;
this.trailingWS = trailingWS;
token = formatTokenInternal(token);
this.leadingNL = trailingNL > 0 ? trailingNL - 1 : 0;
this.hasNewline = trailingNL > 0;
this.trailingNL = prevTrailingNL;
this.trailingWS = prevTrailingWS;
return token;
}
private <T extends Node> void checkForNewline(T node) {
for (Minutiae mintiae : node.trailingMinutiae()) {
if (mintiae.kind() == SyntaxKind.END_OF_LINE_MINUTIAE) {
this.hasNewline = true;
return;
}
}
}
/**
* Format a list of nodes.
*
* @param <T> Type of the list item
* @param nodeList Node list to be formatted
* @param itemTrailingWS Number of single-length spaces to be added after each item of the list
* @param itemTrailingNL Number of newlines to be added after each item of the list
* @param listTrailingWS Number of single-length spaces to be added after the last item of the list
* @param listTrailingNL Number of newlines to be added after the last item of the list
* @return Formatted node list
*/
@SuppressWarnings("unchecked")
protected <T extends Node> NodeList<T> formatNodeList(NodeList<T> nodeList,
int itemTrailingWS,
int itemTrailingNL,
int listTrailingWS,
int listTrailingNL) {
if (nodeList.isEmpty()) {
return nodeList;
}
boolean nodeModified = false;
int size = nodeList.size();
Node[] newNodes = new Node[size];
for (int index = 0; index < size; index++) {
T oldNode = nodeList.get(index);
T newNode;
if (index == size - 1) {
newNode = formatNode(oldNode, listTrailingWS, listTrailingNL);
} else {
newNode = formatNode(oldNode, itemTrailingWS, itemTrailingNL);
}
if (oldNode != newNode) {
nodeModified = true;
}
newNodes[index] = newNode;
}
if (!nodeModified) {
return nodeList;
}
return (NodeList<T>) NodeFactory.createNodeList(newNodes);
}
/**
* Format a delimited list of nodes. This method assumes the delimiters are followed by a
* single whitespace character only.
*
* @param <T> Type of the list item
* @param nodeList Node list to be formatted
* @param itemTrailingWS Number of single-length spaces to be added after each item in the list
* @param itemTrailingNL Number of newlines to be added after each item in the list
* @param listTrailingWS Number of single-length spaces to be added after the last item in the list
* @param listTrailingNL Number of newlines to be added after the last item in the list
* @return Formatted node list
*/
protected <T extends Node> SeparatedNodeList<T> formatSeparatedNodeList(SeparatedNodeList<T> nodeList,
int itemTrailingWS,
int itemTrailingNL,
int listTrailingWS,
int listTrailingNL) {
return formatSeparatedNodeList(nodeList, itemTrailingWS, itemTrailingNL, 1, 0, listTrailingWS, listTrailingNL);
}
/**
* Format a delimited list of nodes.
*
* @param <T> Type of the list item
* @param nodeList Node list to be formatted
* @param itemTrailingWS Number of single-length spaces to be added after each item in the list
* @param itemTrailingNL Number of newlines to be added after each item in the list
* @param separatorTrailingWS Number of single-length spaces to be added after each separator in the list
* @param separatorTrailingNL Number of newlines to be added after each each separator in the list
* @param listTrailingWS Number of single-length spaces to be added after the last item in the list
* @param listTrailingNL Number of newlines to be added after the last item in the list
* @return Formatted node list
*/
@SuppressWarnings("unchecked")
protected <T extends Node> SeparatedNodeList<T> formatSeparatedNodeList(SeparatedNodeList<T> nodeList,
int itemTrailingWS,
int itemTrailingNL,
int separatorTrailingWS,
int separatorTrailingNL,
int listTrailingWS,
int listTrailingNL) {
if (nodeList.isEmpty()) {
return nodeList;
}
boolean nodeModified = false;
int size = nodeList.size();
Node[] newNodes = new Node[size * 2 - 1];
for (int index = 0; index < size; index++) {
T oldNode = nodeList.get(index);
T newNode;
if (index == size - 1) {
newNode = formatNode(oldNode, listTrailingWS, listTrailingNL);
} else {
newNode = formatNode(oldNode, itemTrailingWS, itemTrailingNL);
}
newNodes[2 * index] = newNode;
if (oldNode != newNode) {
nodeModified = true;
}
if (index == nodeList.size() - 1) {
break;
}
Token oldSeperator = nodeList.getSeparator(index);
Token newSeperator = formatToken(oldSeperator, separatorTrailingWS, separatorTrailingNL);
newNodes[(2 * index) + 1] = newSeperator;
if (oldSeperator != newSeperator) {
nodeModified = true;
}
}
if (!nodeModified) {
return nodeList;
}
return (SeparatedNodeList<T>) NodeFactory.createSeparatedNodeList(newNodes);
}
/**
* Format a token.
*
* @param <T> Type of the token
* @param token Token to be formatted
* @return Formatted token
*/
@SuppressWarnings("unchecked")
private <T extends Token> T formatTokenInternal(T token) {
MinutiaeList newLeadingMinutiaeList = getLeadingMinutiae();
this.lineLength += token.text().length();
MinutiaeList newTrailingMinutiaeList = getTrailingMinutiae();
return (T) token.modify(newLeadingMinutiaeList, newTrailingMinutiaeList);
}
/**
* Get leading minutiae.
*
* @return Leading minutiae list
*/
private MinutiaeList getLeadingMinutiae() {
List<Minutiae> leadingMinutiae = new ArrayList<>();
if (this.hasNewline) {
for (int i = 0; i < this.leadingNL; i++) {
leadingMinutiae.add(getNewline());
}
if (this.indentation > 0) {
String wsContent = getWSContent(this.indentation);
leadingMinutiae.add(NodeFactory.createWhitespaceMinutiae(wsContent));
}
}
MinutiaeList newLeadingMinutiaeList = NodeFactory.createMinutiaeList(leadingMinutiae);
return newLeadingMinutiaeList;
}
/**
* Get trailing minutiae.
*
* @return Trailing minutiae list
*/
private MinutiaeList getTrailingMinutiae() {
List<Minutiae> trailingMinutiae = new ArrayList<>();
if (this.trailingWS > 0) {
String wsContent = getWSContent(this.trailingWS);
trailingMinutiae.add(NodeFactory.createWhitespaceMinutiae(wsContent));
}
if (this.trailingNL > 0) {
trailingMinutiae.add(getNewline());
}
MinutiaeList newTrailingMinutiaeList = NodeFactory.createMinutiaeList(trailingMinutiae);
return newTrailingMinutiaeList;
}
private Minutiae getNewline() {
this.lineLength = 0;
return NodeFactory.createEndOfLineMinutiae(FormatterUtils.NEWLINE_SYMBOL);
}
/**
* Indent the code by the 4-whitespace characters.
*/
private void indent() {
this.indentation += DEFAULT_INDENTATION;
}
/**
* Undo the indentation of the code by the 4-whitespace characters.
*/
private void unindent() {
if (this.indentation < DEFAULT_INDENTATION) {
this.indentation = 0;
return;
}
this.indentation -= DEFAULT_INDENTATION;
}
/**
* Set the indentation for the code to follow.
*
* @param value Number of characters to set the indentation from the start of the line.
*/
private void setIndentation(int value) {
this.indentation = value;
}
private String getWSContent(int count) {
this.lineLength += count;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < count; i++) {
sb.append(" ");
}
return sb.toString();
}
private boolean shouldExpand(RecordTypeDescriptorNode recordTypeDesc) {
int fieldCount = recordTypeDesc.fields().size();
fieldCount += recordTypeDesc.recordRestDescriptor().isPresent() ? 1 : 0;
if (fieldCount <= 1) {
return false;
}
if (fieldCount > 3) {
return true;
}
for (Node field : recordTypeDesc.fields()) {
TextRange textRange = field.textRange();
if ((textRange.endOffset() - textRange.startOffset()) > 15) {
return true;
}
}
return false;
}
}
|
class NewFormattingTreeModifier extends FormattingTreeModifier {
/**
* Number of of whitespace characters to be used as the indentation for the current line.
*/
private int indentation = 0;
/**
* Number of leading newlines to be added to the currently processing node.
*/
private int leadingNL = 0;
/**
* Number of trailing newlines to be added to the currently processing node.
*/
private int trailingNL = 0;
/**
* Number of trailing whitespace characters to be added to the currently processing node.
*/
private int trailingWS = 0;
/**
* Flag indicating whether the currently formatting token is the first token of the current line.
*/
private boolean hasNewline = true;
/**
* Number of of whitespace characters to be used for a single indentation.
*/
private static final int DEFAULT_INDENTATION = 4;
/**
* Maximum length of a line. Any line that goes pass this limit will be wrapped.
*/
private static final int COLUMN_LIMIT = 80;
/**
* Length of the currently formatting line.
*/
private int lineLength = 0;
public NewFormattingTreeModifier(FormattingOptions options, LineRange lineRange) {
super(options, lineRange);
}
@Override
public ModulePartNode transform(ModulePartNode modulePartNode) {
NodeList<ImportDeclarationNode> imports = formatNodeList(modulePartNode.imports(), 0, 1, 0, 2);
NodeList<ModuleMemberDeclarationNode> members = formatNodeList(modulePartNode.members(), 0, 2, 0, 1);
Token eofToken = formatToken(modulePartNode.eofToken(), 0, 0);
return modulePartNode.modify(imports, members, eofToken);
}
@Override
public FunctionDefinitionNode transform(FunctionDefinitionNode functionDefinitionNode) {
if (functionDefinitionNode.metadata().isPresent()) {
MetadataNode metadata = formatNode(functionDefinitionNode.metadata().get(), 1, 0);
functionDefinitionNode = functionDefinitionNode.modify().withMetadata(metadata).apply();
}
NodeList<Token> qualifierList = formatNodeList(functionDefinitionNode.qualifierList(), 1, 0, 1, 0);
Token functionKeyword = formatToken(functionDefinitionNode.functionKeyword(), 1, 0);
IdentifierToken functionName = formatToken(functionDefinitionNode.functionName(), 0, 0);
FunctionSignatureNode functionSignatureNode = formatNode(functionDefinitionNode.functionSignature(), 1, 0);
FunctionBodyNode functionBodyNode = formatNode(functionDefinitionNode.functionBody(),
this.trailingWS, this.trailingNL);
return functionDefinitionNode.modify()
.withFunctionKeyword(functionKeyword)
.withFunctionName(functionName)
.withFunctionSignature(functionSignatureNode).withQualifierList(qualifierList)
.withFunctionBody(functionBodyNode)
.apply();
}
@Override
public FunctionSignatureNode transform(FunctionSignatureNode functionSignatureNode) {
Token openPara = formatToken(functionSignatureNode.openParenToken(), 0, 0);
int currentIndentation = this.indentation;
setIndentation(this.lineLength);
SeparatedNodeList<ParameterNode> parameters =
formatSeparatedNodeList(functionSignatureNode.parameters(), 0, 0, 0, 0);
setIndentation(currentIndentation);
Token closePara = formatToken(functionSignatureNode.closeParenToken(), 1, 0);
if (functionSignatureNode.returnTypeDesc().isPresent()) {
ReturnTypeDescriptorNode returnTypeDesc =
formatNode(functionSignatureNode.returnTypeDesc().get(), this.trailingWS, this.trailingNL);
functionSignatureNode = functionSignatureNode.modify().withReturnTypeDesc(returnTypeDesc).apply();
}
return functionSignatureNode.modify()
.withOpenParenToken(openPara)
.withCloseParenToken(closePara)
.withParameters(parameters)
.apply();
}
@Override
public RequiredParameterNode transform(RequiredParameterNode requiredParameterNode) {
NodeList<AnnotationNode> annotations = formatNodeList(requiredParameterNode.annotations(), 0, 1, 0, 0);
Node typeName = formatNode(requiredParameterNode.typeName(), 1, 0);
if (requiredParameterNode.paramName().isPresent()) {
Token paramName = formatToken(requiredParameterNode.paramName().get(), 0, 0);
return requiredParameterNode.modify()
.withAnnotations(annotations)
.withTypeName(typeName)
.withParamName(paramName)
.apply();
} else {
return requiredParameterNode.modify()
.withAnnotations(annotations)
.withTypeName(typeName)
.apply();
}
}
@Override
public FunctionBodyBlockNode transform(FunctionBodyBlockNode functionBodyBlockNode) {
Token openBrace = formatToken(functionBodyBlockNode.openBraceToken(), 0, 1);
indent();
NodeList<StatementNode> statements = formatNodeList(functionBodyBlockNode.statements(), 0, 1, 0, 1);
unindent();
Token closeBrace = formatToken(functionBodyBlockNode.closeBraceToken(), this.trailingWS, this.trailingNL);
return functionBodyBlockNode.modify()
.withOpenBraceToken(openBrace)
.withCloseBraceToken(closeBrace)
.withStatements(statements)
.apply();
}
@Override
public VariableDeclarationNode transform(VariableDeclarationNode variableDeclarationNode) {
NodeList<AnnotationNode> annotationNodes = formatNodeList(variableDeclarationNode.annotations(), 0, 1, 0, 1);
if (variableDeclarationNode.finalKeyword().isPresent()) {
Token finalToken = formatToken(variableDeclarationNode.finalKeyword().get(), 1, 0);
variableDeclarationNode = variableDeclarationNode.modify().withFinalKeyword(finalToken).apply();
}
TypedBindingPatternNode typedBindingPatternNode;
if (variableDeclarationNode.equalsToken().isPresent()) {
typedBindingPatternNode = formatNode(variableDeclarationNode.typedBindingPattern(), 1, 0);
Token equalToken = formatToken(variableDeclarationNode.equalsToken().get(), 1, 0);
ExpressionNode initializer = formatNode(variableDeclarationNode.initializer().get(), 0, 0);
Token semicolonToken = formatToken(variableDeclarationNode.semicolonToken(),
this.trailingWS, this.trailingNL);
return variableDeclarationNode.modify()
.withAnnotations(annotationNodes)
.withTypedBindingPattern(typedBindingPatternNode)
.withEqualsToken(equalToken)
.withInitializer(initializer)
.withSemicolonToken(semicolonToken)
.apply();
} else {
typedBindingPatternNode = formatNode(variableDeclarationNode.typedBindingPattern(), 0, 0);
Token semicolonToken = formatToken(variableDeclarationNode.semicolonToken(),
this.trailingWS, this.trailingNL);
return variableDeclarationNode.modify()
.withAnnotations(annotationNodes)
.withTypedBindingPattern(typedBindingPatternNode)
.withSemicolonToken(semicolonToken)
.apply();
}
}
@Override
public TypedBindingPatternNode transform(TypedBindingPatternNode typedBindingPatternNode) {
TypeDescriptorNode typeDescriptorNode = formatNode(typedBindingPatternNode.typeDescriptor(), 1, 0);
BindingPatternNode bindingPatternNode =
formatNode(typedBindingPatternNode.bindingPattern(), this.trailingWS, this.trailingNL);
return typedBindingPatternNode.modify()
.withTypeDescriptor(typeDescriptorNode)
.withBindingPattern(bindingPatternNode)
.apply();
}
@Override
public BuiltinSimpleNameReferenceNode transform(BuiltinSimpleNameReferenceNode builtinSimpleNameReferenceNode) {
Token name = formatToken(builtinSimpleNameReferenceNode.name(), this.trailingWS, this.trailingNL);
return builtinSimpleNameReferenceNode.modify().withName(name).apply();
}
@Override
public BasicLiteralNode transform(BasicLiteralNode basicLiteralNode) {
Token literalToken = formatToken(basicLiteralNode.literalToken(), this.trailingWS, this.trailingNL);
return basicLiteralNode.modify().withLiteralToken(literalToken).apply();
}
@Override
public CaptureBindingPatternNode transform(CaptureBindingPatternNode captureBindingPatternNode) {
Token variableName = formatToken(captureBindingPatternNode.variableName(), this.trailingWS, this.trailingNL);
return captureBindingPatternNode.modify().withVariableName(variableName).apply();
}
@Override
public IfElseStatementNode transform(IfElseStatementNode ifElseStatementNode) {
Token ifKeyword = formatToken(ifElseStatementNode.ifKeyword(), 1, 0);
ExpressionNode condition = formatNode(ifElseStatementNode.condition(), 1, 0);
BlockStatementNode ifBody;
if (ifElseStatementNode.elseBody().isPresent()) {
ifBody = formatNode(ifElseStatementNode.ifBody(), 1, 0);
Node elseBody = formatNode(ifElseStatementNode.elseBody().get(), this.trailingWS, this.trailingNL);
ifElseStatementNode = ifElseStatementNode.modify().withElseBody(elseBody).apply();
} else {
ifBody = formatNode(ifElseStatementNode.ifBody(), this.trailingWS, this.trailingNL);
}
return ifElseStatementNode.modify()
.withIfKeyword(ifKeyword)
.withIfBody(ifBody)
.withCondition(condition)
.apply();
}
@Override
public ElseBlockNode transform(ElseBlockNode elseBlockNode) {
Token elseKeyword = formatToken(elseBlockNode.elseKeyword(), 1, 0);
StatementNode elseBody = formatNode(elseBlockNode.elseBody(), this.trailingWS, this.trailingNL);
return elseBlockNode.modify()
.withElseKeyword(elseKeyword)
.withElseBody(elseBody)
.apply();
}
@Override
public BlockStatementNode transform(BlockStatementNode blockStatementNode) {
Token openBrace = formatToken(blockStatementNode.openBraceToken(), 0, 1);
indent();
NodeList<StatementNode> statements = formatNodeList(blockStatementNode.statements(), 0, 1, 0, 1);
unindent();
Token closeBrace = formatToken(blockStatementNode.closeBraceToken(), this.trailingWS, this.trailingNL);
return blockStatementNode.modify()
.withOpenBraceToken(openBrace)
.withStatements(statements)
.withCloseBraceToken(closeBrace)
.apply();
}
@Override
public RecordTypeDescriptorNode transform(RecordTypeDescriptorNode recordTypeDesc) {
Token recordKeyword = formatNode(recordTypeDesc.recordKeyword(), 1, 0);
int fieldTrailingWS = 0;
int fieldTrailingNL = 0;
if (shouldExpand(recordTypeDesc)) {
fieldTrailingNL++;
} else {
fieldTrailingWS++;
}
Token bodyStartDelimiter = formatToken(recordTypeDesc.bodyStartDelimiter(), fieldTrailingWS, fieldTrailingNL);
int prevIndentation = this.indentation;
setIndentation(recordKeyword.location().lineRange().startLine().offset() + DEFAULT_INDENTATION);
NodeList<Node> fields = formatNodeList(recordTypeDesc.fields(), fieldTrailingWS, fieldTrailingNL,
fieldTrailingWS, fieldTrailingNL);
if (recordTypeDesc.recordRestDescriptor().isPresent()) {
RecordRestDescriptorNode recordRestDescriptor =
formatNode(recordTypeDesc.recordRestDescriptor().get(), fieldTrailingWS, fieldTrailingNL);
recordTypeDesc = recordTypeDesc.modify().withRecordRestDescriptor(recordRestDescriptor).apply();
}
setIndentation(prevIndentation);
Token bodyEndDelimiter = formatToken(recordTypeDesc.bodyEndDelimiter(), this.trailingWS, this.trailingNL);
return recordTypeDesc.modify()
.withRecordKeyword(recordKeyword)
.withBodyStartDelimiter(bodyStartDelimiter)
.withFields(fields)
.withBodyEndDelimiter(bodyEndDelimiter)
.apply();
}
@Override
public RecordFieldNode transform(RecordFieldNode recordField) {
if (recordField.metadata().isPresent()) {
MetadataNode metadata = formatNode(recordField.metadata().get(), 0, 1);
recordField = recordField.modify().withMetadata(metadata).apply();
}
if (recordField.readonlyKeyword().isPresent()) {
Token readonlyKeyword = formatNode(recordField.readonlyKeyword().get(), 1, 0);
recordField = recordField.modify().withReadonlyKeyword(readonlyKeyword).apply();
}
Node typeName = formatNode(recordField.typeName(), 1, 0);
Token fieldName = formatToken(recordField.fieldName(), 0, 0);
if (recordField.questionMarkToken().isPresent()) {
Token questionMarkToken = formatToken(recordField.questionMarkToken().get(), 0, 1);
recordField = recordField.modify().withQuestionMarkToken(questionMarkToken).apply();
}
Token semicolonToken = formatToken(recordField.semicolonToken(), this.trailingWS, this.trailingNL);
return recordField.modify()
.withTypeName(typeName)
.withFieldName(fieldName)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public RecordFieldWithDefaultValueNode transform(RecordFieldWithDefaultValueNode recordField) {
if (recordField.metadata().isPresent()) {
MetadataNode metadata = formatNode(recordField.metadata().get(), 0, 1);
recordField = recordField.modify().withMetadata(metadata).apply();
}
if (recordField.readonlyKeyword().isPresent()) {
Token readonlyKeyword = formatNode(recordField.readonlyKeyword().get(), 1, 0);
recordField = recordField.modify().withReadonlyKeyword(readonlyKeyword).apply();
}
Node typeName = formatNode(recordField.typeName(), 1, 0);
Token fieldName = formatToken(recordField.fieldName(), 1, 0);
Token equalsToken = formatToken(recordField.equalsToken(), 1, 0);
ExpressionNode expression = formatNode(recordField.expression(), 0, 0);
Token semicolonToken = formatToken(recordField.semicolonToken(), this.trailingWS, this.trailingNL);
return recordField.modify()
.withTypeName(typeName)
.withFieldName(fieldName)
.withEqualsToken(equalsToken)
.withExpression(expression)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
@Override
public ImportOrgNameNode transform(ImportOrgNameNode importOrgNameNode) {
Token orgName = formatToken(importOrgNameNode.orgName(), 0, 0);
Token slashToken = formatToken(importOrgNameNode.slashToken(), this.trailingWS, this.trailingNL);
return importOrgNameNode.modify()
.withOrgName(orgName)
.withSlashToken(slashToken)
.apply();
}
@Override
public ImportPrefixNode transform(ImportPrefixNode importPrefixNode) {
Token asKeyword = formatToken(importPrefixNode.asKeyword(), 1, 0);
Token prefix = formatToken(importPrefixNode.prefix(), this.trailingWS, this.trailingNL);
return importPrefixNode.modify()
.withAsKeyword(asKeyword)
.withPrefix(prefix)
.apply();
}
@Override
public ImportVersionNode transform(ImportVersionNode importVersionNode) {
Token versionKeyword = formatToken(importVersionNode.versionKeyword(), 1, 0);
SeparatedNodeList<Token> versionNumber = formatSeparatedNodeList(importVersionNode.versionNumber(),
0, 0, 0, 0, this.trailingWS, this.trailingNL);
return importVersionNode.modify()
.withVersionKeyword(versionKeyword)
.withVersionNumber(versionNumber)
.apply();
}
@Override
public ServiceDeclarationNode transform(ServiceDeclarationNode serviceDeclarationNode) {
if (serviceDeclarationNode.metadata().isPresent()) {
MetadataNode metadata = formatNode(serviceDeclarationNode.metadata().get(), 1, 0);
serviceDeclarationNode = serviceDeclarationNode.modify().withMetadata(metadata).apply();
}
Token serviceKeyword = formatToken(serviceDeclarationNode.serviceKeyword(), 1, 0);
IdentifierToken serviceName = formatToken(serviceDeclarationNode.serviceName(), 1, 0);
Token onKeyword = formatToken(serviceDeclarationNode.onKeyword(), 1, 0);
SeparatedNodeList<ExpressionNode> expressions =
formatSeparatedNodeList(serviceDeclarationNode.expressions(), 0, 0, 1, 0);
Node serviceBody = formatNode(serviceDeclarationNode.serviceBody(), this.trailingWS, this.trailingNL);
return serviceDeclarationNode.modify()
.withServiceKeyword(serviceKeyword)
.withServiceName(serviceName)
.withOnKeyword(onKeyword)
.withExpressions(expressions)
.withServiceBody(serviceBody)
.apply();
}
@Override
public ExplicitNewExpressionNode transform(ExplicitNewExpressionNode explicitNewExpressionNode) {
Token newKeywordToken = formatToken(explicitNewExpressionNode.newKeyword(), 1, 0);
TypeDescriptorNode typeDescriptorNode = formatNode(explicitNewExpressionNode.typeDescriptor(), 0, 0);
ParenthesizedArgList parenthesizedArgList = formatNode(explicitNewExpressionNode.parenthesizedArgList(),
this.trailingWS, this.trailingNL);
return explicitNewExpressionNode.modify()
.withNewKeyword(newKeywordToken)
.withTypeDescriptor(typeDescriptorNode)
.withParenthesizedArgList(parenthesizedArgList)
.apply();
}
@Override
public ParenthesizedArgList transform(ParenthesizedArgList parenthesizedArgList) {
Token openParenToken = formatToken(parenthesizedArgList.openParenToken(), 0, 0);
SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(parenthesizedArgList
.arguments(), 0, 0, 0, 0);
Token closeParenToken = formatToken(parenthesizedArgList.closeParenToken(), this.trailingWS, this.trailingNL);
return parenthesizedArgList.modify()
.withOpenParenToken(openParenToken)
.withArguments(arguments)
.withCloseParenToken(closeParenToken)
.apply();
}
@Override
public ServiceBodyNode transform(ServiceBodyNode serviceBodyNode) {
Token openBraceToken = formatToken(serviceBodyNode.openBraceToken(), 0, 1);
indent();
NodeList<Node> resources = formatNodeList(serviceBodyNode.resources(), 0, 1, 0, 1);
unindent();
Token closeBraceToken = formatToken(serviceBodyNode.closeBraceToken(), this.trailingWS, this.trailingNL);
return serviceBodyNode.modify()
.withOpenBraceToken(openBraceToken)
.withResources(resources)
.withCloseBraceToken(closeBraceToken)
.apply();
}
@Override
public QualifiedNameReferenceNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
Token modulePrefix = formatToken(qualifiedNameReferenceNode.modulePrefix(), 0, 0);
Token colon = formatToken((Token) qualifiedNameReferenceNode.colon(), 0, 0);
IdentifierToken identifier = formatToken(qualifiedNameReferenceNode.identifier(),
this.trailingWS, this.trailingNL);
return qualifiedNameReferenceNode.modify()
.withModulePrefix(modulePrefix)
.withColon(colon)
.withIdentifier(identifier)
.apply();
}
@Override
public ReturnTypeDescriptorNode transform(ReturnTypeDescriptorNode returnTypeDescriptorNode) {
Token returnsKeyword = formatToken(returnTypeDescriptorNode.returnsKeyword(), 1, 0);
NodeList<AnnotationNode> annotations = formatNodeList(returnTypeDescriptorNode.annotations(), 0, 0, 1, 0);
Node type = formatNode(returnTypeDescriptorNode.type(), this.trailingWS, this.trailingNL);
return returnTypeDescriptorNode.modify()
.withReturnsKeyword(returnsKeyword)
.withAnnotations(annotations)
.withType(type)
.apply();
}
@Override
public OptionalTypeDescriptorNode transform(OptionalTypeDescriptorNode optionalTypeDescriptorNode) {
Node typeDescriptor = formatNode(optionalTypeDescriptorNode.typeDescriptor(), 0, 0);
Token questionMarkToken = formatToken(optionalTypeDescriptorNode.questionMarkToken(),
this.trailingWS, this.trailingNL);
return optionalTypeDescriptorNode.modify()
.withTypeDescriptor(typeDescriptor)
.withQuestionMarkToken(questionMarkToken)
.apply();
}
@Override
public ExpressionStatementNode transform(ExpressionStatementNode expressionStatementNode) {
ExpressionNode expression = formatNode(expressionStatementNode.expression(), 0, 0);
Token semicolonToken = formatToken(expressionStatementNode.semicolonToken(), this.trailingWS, this.trailingNL);
return expressionStatementNode.modify()
.withExpression(expression)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public CheckExpressionNode transform(CheckExpressionNode checkExpressionNode) {
Token checkKeyword = formatToken(checkExpressionNode.checkKeyword(), 1, 0);
ExpressionNode expressionNode = formatNode(checkExpressionNode.expression(), this.trailingWS, this.trailingNL);
return checkExpressionNode.modify()
.withCheckKeyword(checkKeyword)
.withExpression(expressionNode)
.apply();
}
@Override
public RemoteMethodCallActionNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) {
ExpressionNode expression = formatNode(remoteMethodCallActionNode.expression(), 0, 0);
Token rightArrowToken = formatToken(remoteMethodCallActionNode.rightArrowToken(), 0, 0);
SimpleNameReferenceNode methodName = formatNode(remoteMethodCallActionNode.methodName(), 0, 0);
Token openParenToken = formatToken(remoteMethodCallActionNode.openParenToken(), 0, 0);
SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(remoteMethodCallActionNode
.arguments(), 1, 0, 0, 0);
Token closeParenToken = formatToken(remoteMethodCallActionNode.closeParenToken(),
this.trailingWS, this.trailingNL);
return remoteMethodCallActionNode.modify()
.withExpression(expression)
.withRightArrowToken(rightArrowToken)
.withMethodName(methodName)
.withOpenParenToken(openParenToken)
.withArguments(arguments)
.withCloseParenToken(closeParenToken)
.apply();
}
@Override
public SimpleNameReferenceNode transform(SimpleNameReferenceNode simpleNameReferenceNode) {
Token name = formatToken(simpleNameReferenceNode.name(), this.trailingWS, this.trailingNL);
return simpleNameReferenceNode.modify()
.withName(name)
.apply();
}
@Override
public TypeDefinitionNode transform(TypeDefinitionNode typeDefinitionNode) {
if (typeDefinitionNode.metadata().isPresent()) {
MetadataNode metadata = formatNode(typeDefinitionNode.metadata().get(), 1, 0);
typeDefinitionNode = typeDefinitionNode.modify().withMetadata(metadata).apply();
}
if (typeDefinitionNode.visibilityQualifier().isPresent()) {
Token visibilityQualifier = formatToken(typeDefinitionNode.visibilityQualifier().get(), 1, 0);
typeDefinitionNode = typeDefinitionNode.modify().withVisibilityQualifier(visibilityQualifier).apply();
}
Token typeKeyword = formatToken(typeDefinitionNode.typeKeyword(), 1, 0);
Token typeName = formatToken(typeDefinitionNode.typeName(), 1, 0);
Node typeDescriptor = formatNode(typeDefinitionNode.typeDescriptor(), 1, 0);
Token semicolonToken = formatToken(typeDefinitionNode.semicolonToken(), this.trailingWS, this.trailingNL);
return typeDefinitionNode.modify()
.withTypeKeyword(typeKeyword)
.withTypeName(typeName)
.withTypeDescriptor(typeDescriptor)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public SingletonTypeDescriptorNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) {
ExpressionNode simpleContExprNode = formatNode(singletonTypeDescriptorNode.simpleContExprNode(), 1, 0);
return singletonTypeDescriptorNode.modify()
.withSimpleContExprNode(simpleContExprNode)
.apply();
}
@Override
public WhileStatementNode transform(WhileStatementNode whileStatementNode) {
boolean hasOnFailClause = whileStatementNode.onFailClause().isPresent();
Token whileKeyword = formatToken(whileStatementNode.whileKeyword(), 1, 0);
ExpressionNode condition = formatNode(whileStatementNode.condition(), 1, 0);
BlockStatementNode whileBody;
if (hasOnFailClause) {
whileBody = formatNode(whileStatementNode.whileBody(), 1, 0);
OnFailClauseNode onFailClause = formatNode(whileStatementNode.onFailClause().get(),
this.trailingWS, this.trailingNL);
whileStatementNode = whileStatementNode.modify().withOnFailClause(onFailClause).apply();
} else {
whileBody = formatNode(whileStatementNode.whileBody(), this.trailingWS, this.trailingNL);
}
return whileStatementNode.modify()
.withWhileKeyword(whileKeyword)
.withCondition(condition)
.withWhileBody(whileBody)
.apply();
}
@Override
public BracedExpressionNode transform(BracedExpressionNode bracedExpressionNode) {
Token openParen = formatToken(bracedExpressionNode.openParen(), 0, 0);
ExpressionNode expression = formatNode(bracedExpressionNode.expression(), 0, 0);
Token closeParen = formatToken(bracedExpressionNode.closeParen(), this.trailingWS, this.trailingNL);
return bracedExpressionNode.modify()
.withOpenParen(openParen)
.withExpression(expression)
.withCloseParen(closeParen)
.apply();
}
@Override
public AssignmentStatementNode transform(AssignmentStatementNode assignmentStatementNode) {
Node varRef = formatNode(assignmentStatementNode.varRef(), 1, 0);
Token equalsToken = formatToken(assignmentStatementNode.equalsToken(), 1, 0);
ExpressionNode expression = formatNode(assignmentStatementNode.expression(), 0, 0);
Token semicolonToken = formatToken(assignmentStatementNode.semicolonToken(), this.trailingWS, this.trailingNL);
return assignmentStatementNode.modify()
.withVarRef(varRef)
.withEqualsToken(equalsToken)
.withExpression(expression)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public CompoundAssignmentStatementNode transform(CompoundAssignmentStatementNode compoundAssignmentStatementNode) {
ExpressionNode lhsExpression = formatNode(compoundAssignmentStatementNode.lhsExpression(), 1, 0);
Token binaryOperator = formatToken(compoundAssignmentStatementNode.binaryOperator(), 0, 0);
Token equalsToken = formatToken(compoundAssignmentStatementNode.equalsToken(), 1, 0);
ExpressionNode rhsExpression = formatNode(compoundAssignmentStatementNode.rhsExpression(), 0, 0);
Token semicolonToken = formatToken(compoundAssignmentStatementNode.semicolonToken(),
this.trailingWS, this.trailingNL);
return compoundAssignmentStatementNode.modify()
.withLhsExpression(lhsExpression)
.withBinaryOperator(binaryOperator)
.withEqualsToken(equalsToken)
.withRhsExpression(rhsExpression)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public DoStatementNode transform(DoStatementNode doStatementNode) {
boolean hasOnFailClause = doStatementNode.onFailClause().isPresent();
Token doKeyword = formatToken(doStatementNode.doKeyword(), 1, 0);
BlockStatementNode blockStatement;
if (hasOnFailClause) {
blockStatement = formatNode(doStatementNode.blockStatement(), 1, 0);
OnFailClauseNode onFailClause = formatNode(doStatementNode.onFailClause().get(),
this.trailingWS, this.trailingNL);
doStatementNode = doStatementNode.modify().withOnFailClause(onFailClause).apply();
} else {
blockStatement = formatNode(doStatementNode.blockStatement(), this.trailingWS, this.trailingNL);
}
return doStatementNode.modify()
.withDoKeyword(doKeyword)
.withBlockStatement(blockStatement)
.apply();
}
@Override
public ForEachStatementNode transform(ForEachStatementNode forEachStatementNode) {
boolean hasOnFailClause = forEachStatementNode.onFailClause().isPresent();
Token forEachKeyword = formatToken(forEachStatementNode.forEachKeyword(), 1, 0);
TypedBindingPatternNode typedBindingPattern = formatNode(forEachStatementNode.typedBindingPattern(), 1, 0);
Token inKeyword = formatToken(forEachStatementNode.inKeyword(), 1, 0);
Node actionOrExpressionNode = formatNode(forEachStatementNode.actionOrExpressionNode(), 1, 0);
StatementNode blockStatement;
if (hasOnFailClause) {
blockStatement = formatNode(forEachStatementNode.blockStatement(), 1, 0);
OnFailClauseNode onFailClause = formatNode(forEachStatementNode.onFailClause().get(),
this.trailingWS, this.trailingNL);
forEachStatementNode = forEachStatementNode.modify().withOnFailClause(onFailClause).apply();
} else {
blockStatement = formatNode(forEachStatementNode.blockStatement(), this.trailingWS, this.trailingNL);
}
return forEachStatementNode.modify()
.withForEachKeyword(forEachKeyword)
.withTypedBindingPattern(typedBindingPattern)
.withInKeyword(inKeyword)
.withActionOrExpressionNode(actionOrExpressionNode)
.withBlockStatement(blockStatement)
.apply();
}
@Override
public BinaryExpressionNode transform(BinaryExpressionNode binaryExpressionNode) {
Node lhsExpr = formatNode(binaryExpressionNode.lhsExpr(), 1, 0);
Token operator = formatToken(binaryExpressionNode.operator(), 1, 0);
Node rhsExpr = formatNode(binaryExpressionNode.rhsExpr(), this.trailingWS, this.trailingNL);
return binaryExpressionNode.modify()
.withLhsExpr(lhsExpr)
.withOperator(operator)
.withRhsExpr(rhsExpr)
.apply();
}
@Override
public OnFailClauseNode transform(OnFailClauseNode onFailClauseNode) {
Token onKeyword = formatToken(onFailClauseNode.onKeyword(), 1, 0);
Token failKeyword = formatToken(onFailClauseNode.failKeyword(), 1, 0);
TypeDescriptorNode typeDescriptor = formatNode(onFailClauseNode.typeDescriptor(), 1, 0);
IdentifierToken failErrorName = formatToken(onFailClauseNode.failErrorName(), 1, 0);
BlockStatementNode blockStatement = formatNode(onFailClauseNode.blockStatement(),
this.trailingWS, this.trailingNL);
return onFailClauseNode.modify()
.withOnKeyword(onKeyword)
.withFailKeyword(failKeyword)
.withTypeDescriptor(typeDescriptor)
.withFailErrorName(failErrorName)
.withBlockStatement(blockStatement)
.apply();
}
@Override
public ReturnStatementNode transform(ReturnStatementNode returnStatementNode) {
Token returnKeyword = formatToken(returnStatementNode.returnKeyword(),
returnStatementNode.expression().isPresent() ? 1 : 0, 0);
if (returnStatementNode.expression().isPresent()) {
ExpressionNode expressionNode = formatNode(returnStatementNode.expression().get(), 0, 0);
returnStatementNode = returnStatementNode.modify().withExpression(expressionNode).apply();
}
Token semicolonToken = formatToken(returnStatementNode.semicolonToken(), this.trailingWS, this.trailingNL);
return returnStatementNode.modify()
.withReturnKeyword(returnKeyword)
.withSemicolonToken(semicolonToken)
.apply();
}
@Override
public FunctionCallExpressionNode transform(FunctionCallExpressionNode functionCallExpressionNode) {
NameReferenceNode functionName = formatNode(functionCallExpressionNode.functionName(), 0, 0);
Token functionCallOpenPara = formatToken(functionCallExpressionNode.openParenToken(), 0, 0);
SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(functionCallExpressionNode
.arguments(), 0, 0, 0, 0);
Token functionCallClosePara = formatToken(functionCallExpressionNode.closeParenToken(),
this.trailingWS, this.trailingNL);
return functionCallExpressionNode.modify()
.withFunctionName(functionName)
.withOpenParenToken(functionCallOpenPara)
.withCloseParenToken(functionCallClosePara)
.withArguments(arguments)
.apply();
}
@Override
public UnionTypeDescriptorNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) {
TypeDescriptorNode leftTypeDesc = formatNode(unionTypeDescriptorNode.leftTypeDesc(), 0, 0);
Token pipeToken = formatToken(unionTypeDescriptorNode.pipeToken(), 0, 0);
TypeDescriptorNode rightTypeDesc = formatNode(unionTypeDescriptorNode.rightTypeDesc(),
this.trailingWS, this.trailingNL);
return unionTypeDescriptorNode.modify()
.withLeftTypeDesc(leftTypeDesc)
.withPipeToken(pipeToken)
.withRightTypeDesc(rightTypeDesc)
.apply();
}
@Override
public NilTypeDescriptorNode transform(NilTypeDescriptorNode nilTypeDescriptorNode) {
Token openParenToken = formatToken(nilTypeDescriptorNode.openParenToken(), 0, 0);
Token closeParenToken = formatToken(nilTypeDescriptorNode.closeParenToken(), this.trailingWS, this.trailingNL);
return nilTypeDescriptorNode.modify()
.withOpenParenToken(openParenToken)
.withCloseParenToken(closeParenToken)
.apply();
}
@Override
public IdentifierToken transform(IdentifierToken identifier) {
return formatToken(identifier, this.trailingWS, this.trailingNL);
}
@Override
public Token transform(Token token) {
return formatToken(token, this.trailingWS, this.trailingNL);
}
/**
* Format a node.
*
* @param <T> Type of the node
* @param node Node to be formatted
* @param trailingWS Number of single-length spaces to be added after the node
* @param trailingNL Number of newlines to be added after the node
* @return Formatted node
*/
@SuppressWarnings("unchecked")
private <T extends Node> T formatNode(T node, int trailingWS, int trailingNL) {
if (node == null) {
return node;
}
if (!isInLineRange(node, lineRange)) {
checkForNewline(node);
return node;
}
int prevTrailingNL = this.trailingNL;
int prevTrailingWS = this.trailingWS;
this.trailingNL = trailingNL;
this.trailingWS = trailingWS;
node = (T) node.apply(this);
if (this.lineLength > COLUMN_LIMIT) {
node = wrap(node);
}
this.trailingNL = prevTrailingNL;
this.trailingWS = prevTrailingWS;
return node;
}
/**
* Wrap the node. This is equivalent to adding a newline before the node and
* re-formatting the node. Wrapped content will start from the current level
* of indentation.
*
* @param <T> Node type
* @param node Node to be wrapped
* @return Wrapped node
*/
@SuppressWarnings("unchecked")
private <T extends Node> T wrap(T node) {
this.leadingNL += 1;
this.lineLength = 0;
this.hasNewline = true;
return (T) node.apply(this);
}
/**
* Format a token.
*
* @param <T> Type of the token
* @param token Token to be formatted
* @param trailingWS Number of single-length spaces to be added after the token
* @param trailingNL Number of newlines to be added after the token
* @return Formatted token
*/
private <T extends Token> T formatToken(T token, int trailingWS, int trailingNL) {
if (token == null) {
return token;
}
if (!isInLineRange(token, lineRange)) {
checkForNewline(token);
return token;
}
int prevTrailingNL = this.trailingNL;
int prevTrailingWS = this.trailingWS;
this.trailingNL = trailingNL > 0 ? 1 : 0;
this.trailingWS = trailingWS;
token = formatTokenInternal(token);
this.leadingNL = trailingNL > 0 ? trailingNL - 1 : 0;
this.hasNewline = trailingNL > 0;
this.trailingNL = prevTrailingNL;
this.trailingWS = prevTrailingWS;
return token;
}
private <T extends Node> void checkForNewline(T node) {
for (Minutiae mintiae : node.trailingMinutiae()) {
if (mintiae.kind() == SyntaxKind.END_OF_LINE_MINUTIAE) {
this.hasNewline = true;
return;
}
}
}
/**
* Format a list of nodes.
*
* @param <T> Type of the list item
* @param nodeList Node list to be formatted
* @param itemTrailingWS Number of single-length spaces to be added after each item of the list
* @param itemTrailingNL Number of newlines to be added after each item of the list
* @param listTrailingWS Number of single-length spaces to be added after the last item of the list
* @param listTrailingNL Number of newlines to be added after the last item of the list
* @return Formatted node list
*/
@SuppressWarnings("unchecked")
protected <T extends Node> NodeList<T> formatNodeList(NodeList<T> nodeList,
int itemTrailingWS,
int itemTrailingNL,
int listTrailingWS,
int listTrailingNL) {
if (nodeList.isEmpty()) {
return nodeList;
}
boolean nodeModified = false;
int size = nodeList.size();
Node[] newNodes = new Node[size];
for (int index = 0; index < size; index++) {
T oldNode = nodeList.get(index);
T newNode;
if (index == size - 1) {
newNode = formatNode(oldNode, listTrailingWS, listTrailingNL);
} else {
newNode = formatNode(oldNode, itemTrailingWS, itemTrailingNL);
}
if (oldNode != newNode) {
nodeModified = true;
}
newNodes[index] = newNode;
}
if (!nodeModified) {
return nodeList;
}
return (NodeList<T>) NodeFactory.createNodeList(newNodes);
}
/**
* Format a delimited list of nodes. This method assumes the delimiters are followed by a
* single whitespace character only.
*
* @param <T> Type of the list item
* @param nodeList Node list to be formatted
* @param itemTrailingWS Number of single-length spaces to be added after each item in the list
* @param itemTrailingNL Number of newlines to be added after each item in the list
* @param listTrailingWS Number of single-length spaces to be added after the last item in the list
* @param listTrailingNL Number of newlines to be added after the last item in the list
* @return Formatted node list
*/
protected <T extends Node> SeparatedNodeList<T> formatSeparatedNodeList(SeparatedNodeList<T> nodeList,
int itemTrailingWS,
int itemTrailingNL,
int listTrailingWS,
int listTrailingNL) {
return formatSeparatedNodeList(nodeList, itemTrailingWS, itemTrailingNL, 1, 0, listTrailingWS, listTrailingNL);
}
/**
* Format a delimited list of nodes.
*
* @param <T> Type of the list item
* @param nodeList Node list to be formatted
* @param itemTrailingWS Number of single-length spaces to be added after each item in the list
* @param itemTrailingNL Number of newlines to be added after each item in the list
* @param separatorTrailingWS Number of single-length spaces to be added after each separator in the list
* @param separatorTrailingNL Number of newlines to be added after each each separator in the list
* @param listTrailingWS Number of single-length spaces to be added after the last item in the list
* @param listTrailingNL Number of newlines to be added after the last item in the list
* @return Formatted node list
*/
@SuppressWarnings("unchecked")
protected <T extends Node> SeparatedNodeList<T> formatSeparatedNodeList(SeparatedNodeList<T> nodeList,
int itemTrailingWS,
int itemTrailingNL,
int separatorTrailingWS,
int separatorTrailingNL,
int listTrailingWS,
int listTrailingNL) {
if (nodeList.isEmpty()) {
return nodeList;
}
boolean nodeModified = false;
int size = nodeList.size();
Node[] newNodes = new Node[size * 2 - 1];
for (int index = 0; index < size; index++) {
T oldNode = nodeList.get(index);
T newNode;
if (index == size - 1) {
newNode = formatNode(oldNode, listTrailingWS, listTrailingNL);
} else {
newNode = formatNode(oldNode, itemTrailingWS, itemTrailingNL);
}
newNodes[2 * index] = newNode;
if (oldNode != newNode) {
nodeModified = true;
}
if (index == nodeList.size() - 1) {
break;
}
Token oldSeperator = nodeList.getSeparator(index);
Token newSeperator = formatToken(oldSeperator, separatorTrailingWS, separatorTrailingNL);
newNodes[(2 * index) + 1] = newSeperator;
if (oldSeperator != newSeperator) {
nodeModified = true;
}
}
if (!nodeModified) {
return nodeList;
}
return (SeparatedNodeList<T>) NodeFactory.createSeparatedNodeList(newNodes);
}
/**
* Format a token.
*
* @param <T> Type of the token
* @param token Token to be formatted
* @return Formatted token
*/
@SuppressWarnings("unchecked")
private <T extends Token> T formatTokenInternal(T token) {
MinutiaeList newLeadingMinutiaeList = getLeadingMinutiae();
this.lineLength += token.text().length();
MinutiaeList newTrailingMinutiaeList = getTrailingMinutiae();
return (T) token.modify(newLeadingMinutiaeList, newTrailingMinutiaeList);
}
/**
* Get leading minutiae.
*
* @return Leading minutiae list
*/
private MinutiaeList getLeadingMinutiae() {
List<Minutiae> leadingMinutiae = new ArrayList<>();
if (this.hasNewline) {
for (int i = 0; i < this.leadingNL; i++) {
leadingMinutiae.add(getNewline());
}
if (this.indentation > 0) {
String wsContent = getWSContent(this.indentation);
leadingMinutiae.add(NodeFactory.createWhitespaceMinutiae(wsContent));
}
}
MinutiaeList newLeadingMinutiaeList = NodeFactory.createMinutiaeList(leadingMinutiae);
return newLeadingMinutiaeList;
}
/**
* Get trailing minutiae.
*
* @return Trailing minutiae list
*/
private MinutiaeList getTrailingMinutiae() {
List<Minutiae> trailingMinutiae = new ArrayList<>();
if (this.trailingWS > 0) {
String wsContent = getWSContent(this.trailingWS);
trailingMinutiae.add(NodeFactory.createWhitespaceMinutiae(wsContent));
}
if (this.trailingNL > 0) {
trailingMinutiae.add(getNewline());
}
MinutiaeList newTrailingMinutiaeList = NodeFactory.createMinutiaeList(trailingMinutiae);
return newTrailingMinutiaeList;
}
private Minutiae getNewline() {
this.lineLength = 0;
return NodeFactory.createEndOfLineMinutiae(FormatterUtils.NEWLINE_SYMBOL);
}
/**
* Indent the code by the 4-whitespace characters.
*/
private void indent() {
this.indentation += DEFAULT_INDENTATION;
}
/**
* Undo the indentation of the code by the 4-whitespace characters.
*/
private void unindent() {
if (this.indentation < DEFAULT_INDENTATION) {
this.indentation = 0;
return;
}
this.indentation -= DEFAULT_INDENTATION;
}
/**
* Set the indentation for the code to follow.
*
* @param value Number of characters to set the indentation from the start of the line.
*/
private void setIndentation(int value) {
this.indentation = value;
}
private String getWSContent(int count) {
this.lineLength += count;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < count; i++) {
sb.append(" ");
}
return sb.toString();
}
private boolean shouldExpand(RecordTypeDescriptorNode recordTypeDesc) {
int fieldCount = recordTypeDesc.fields().size();
fieldCount += recordTypeDesc.recordRestDescriptor().isPresent() ? 1 : 0;
if (fieldCount <= 1) {
return false;
}
if (fieldCount > 3) {
return true;
}
for (Node field : recordTypeDesc.fields()) {
TextRange textRange = field.textRange();
if ((textRange.endOffset() - textRange.startOffset()) > 15) {
return true;
}
}
return false;
}
}
|
If we can return `false` from the calling methods (ie `hasQualifiedIdentifier` and `hasBacktickExpr`) whenever `peek(lookahead.offset).kind == SyntaxKind.BACKTIC_CONTENT` then we can simplify this by removing the `Lookahead` class
|
private boolean isValidBacktickContentSequence(ReferenceGenre refGenre) {
boolean hasMatch;
Lookahead lookahead = new Lookahead();
switch (refGenre) {
case SPECIAL_KEY:
hasMatch = hasQualifiedIdentifier(lookahead);
break;
case FUNCTION_KEY:
hasMatch = hasBacktickExpr(lookahead, true);
break;
case NO_KEY:
hasMatch = hasBacktickExpr(lookahead, false);
break;
default:
throw new IllegalStateException("Unsupported backtick reference genre");
}
return hasMatch && peek(lookahead.offset).kind == SyntaxKind.BACKTICK_TOKEN;
}
|
return hasMatch && peek(lookahead.offset).kind == SyntaxKind.BACKTICK_TOKEN;
|
private boolean isValidBacktickContentSequence(ReferenceGenre refGenre) {
boolean hasMatch;
Lookahead lookahead = new Lookahead();
switch (refGenre) {
case SPECIAL_KEY:
hasMatch = hasQualifiedIdentifier(lookahead);
break;
case FUNCTION_KEY:
hasMatch = hasBacktickExpr(lookahead, true);
break;
case NO_KEY:
hasMatch = hasBacktickExpr(lookahead, false);
break;
default:
throw new IllegalStateException("Unsupported backtick reference genre");
}
return hasMatch && peek(lookahead.offset).kind == SyntaxKind.BACKTICK_TOKEN;
}
|
class Lookahead {
private int offset = 1;
}
|
class Lookahead {
private int offset = 1;
}
|
Please use each to replace tableHintLimitedContext.
|
public ASTNode visitWithTableHint(final WithTableHintContext ctx) {
WithTableHintSegment withTableHintSegment = new WithTableHintSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex());
if (null != ctx.tableHintLimited()) {
Collection<TableHintLimitedSegment> tableHintLimitedSegments = new LinkedList<>();
for (TableHintLimitedContext tableHintLimitedContext : ctx.tableHintLimited()) {
tableHintLimitedSegments.add((TableHintLimitedSegment) visit(tableHintLimitedContext));
}
withTableHintSegment.getTableHintLimitedSegments().addAll(tableHintLimitedSegments);
}
return withTableHintSegment;
}
|
for (TableHintLimitedContext tableHintLimitedContext : ctx.tableHintLimited()) {
|
public ASTNode visitWithTableHint(final WithTableHintContext ctx) {
WithTableHintSegment result = new WithTableHintSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex());
if (null != ctx.tableHintLimited()) {
Collection<TableHintLimitedSegment> tableHintLimitedSegments = new LinkedList<>();
for (TableHintLimitedContext each : ctx.tableHintLimited()) {
tableHintLimitedSegments.add((TableHintLimitedSegment) visit(each));
}
result.getTableHintLimitedSegments().addAll(tableHintLimitedSegments);
}
return result;
}
|
class SQLServerStatementVisitor extends SQLServerStatementBaseVisitor<ASTNode> {
private final Collection<ParameterMarkerSegment> parameterMarkerSegments = new LinkedList<>();
@Override
public final ASTNode visitParameterMarker(final ParameterMarkerContext ctx) {
return new ParameterMarkerValue(parameterMarkerSegments.size(), ParameterMarkerType.QUESTION);
}
@Override
public final ASTNode visitLiterals(final LiteralsContext ctx) {
if (null != ctx.stringLiterals()) {
return visit(ctx.stringLiterals());
}
if (null != ctx.numberLiterals()) {
return visit(ctx.numberLiterals());
}
if (null != ctx.hexadecimalLiterals()) {
return visit(ctx.hexadecimalLiterals());
}
if (null != ctx.bitValueLiterals()) {
return visit(ctx.bitValueLiterals());
}
if (null != ctx.booleanLiterals()) {
return visit(ctx.booleanLiterals());
}
if (null != ctx.nullValueLiterals()) {
return visit(ctx.nullValueLiterals());
}
throw new IllegalStateException("Literals must have string, number, dateTime, hex, bit, boolean or null.");
}
@Override
public final ASTNode visitStringLiterals(final StringLiteralsContext ctx) {
if (null != ctx.STRING_()) {
return new StringLiteralValue(ctx.getText());
} else {
return new StringLiteralValue(ctx.getText().substring(1));
}
}
@Override
public final ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) {
return new NumberLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitHexadecimalLiterals(final HexadecimalLiteralsContext ctx) {
return new OtherLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitBitValueLiterals(final BitValueLiteralsContext ctx) {
return new OtherLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitBooleanLiterals(final BooleanLiteralsContext ctx) {
return new BooleanLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitNullValueLiterals(final NullValueLiteralsContext ctx) {
return new NullLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitIdentifier(final IdentifierContext ctx) {
return null == ctx.regularIdentifier() ? visit(ctx.delimitedIdentifier()) : visit(ctx.regularIdentifier());
}
@Override
public final ASTNode visitRegularIdentifier(final RegularIdentifierContext ctx) {
UnreservedWordContext unreservedWord = ctx.unreservedWord();
return null == unreservedWord ? new IdentifierValue(ctx.getText()) : (IdentifierValue) visit(unreservedWord);
}
@Override
public final ASTNode visitDelimitedIdentifier(final DelimitedIdentifierContext ctx) {
return new IdentifierValue(ctx.getText());
}
@Override
public final ASTNode visitUnreservedWord(final UnreservedWordContext ctx) {
return new IdentifierValue(ctx.getText());
}
@Override
public final ASTNode visitSchemaName(final SchemaNameContext ctx) {
return visit(ctx.identifier());
}
@Override
public final ASTNode visitTableName(final TableNameContext ctx) {
SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.name().getStart().getStartIndex(), ctx.name().getStop().getStopIndex(), (IdentifierValue) visit(ctx.name())));
OwnerContext owner = ctx.owner();
if (null != owner) {
OwnerSegment ownerSegment = new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()));
if (null != ctx.databaseName()) {
DatabaseNameContext databaseName = ctx.databaseName();
ownerSegment.setOwner(new OwnerSegment(databaseName.getStart().getStartIndex(), databaseName.getStop().getStopIndex(), (IdentifierValue) visit(databaseName.identifier())));
}
result.setOwner(ownerSegment);
} else if (null != ctx.databaseName()) {
DatabaseNameContext databaseName = ctx.databaseName();
result.setOwner(new OwnerSegment(databaseName.getStart().getStartIndex(), databaseName.getStop().getStopIndex(), (IdentifierValue) visit(databaseName.identifier())));
}
return result;
}
@Override
public final ASTNode visitColumnName(final ColumnNameContext ctx) {
ColumnSegment result;
if (null != ctx.name()) {
result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.name()));
} else {
result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.scriptVariableName()));
}
OwnerContext owner = ctx.owner();
if (null != owner) {
result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier())));
}
return result;
}
@Override
public ASTNode visitScriptVariableName(final ScriptVariableNameContext ctx) {
return new IdentifierValue(ctx.getText());
}
@Override
public final ASTNode visitIndexName(final IndexNameContext ctx) {
IndexNameSegment indexName = new IndexNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
return new IndexSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), indexName);
}
@Override
public final ASTNode visitConstraintName(final ConstraintNameContext ctx) {
return new ConstraintSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
}
@Override
public final ASTNode visitTableNames(final TableNamesContext ctx) {
CollectionValue<SimpleTableSegment> result = new CollectionValue<>();
for (TableNameContext each : ctx.tableName()) {
result.getValue().add((SimpleTableSegment) visit(each));
}
return result;
}
@Override
public final ASTNode visitColumnNames(final ColumnNamesContext ctx) {
CollectionValue<ColumnSegment> result = new CollectionValue<>();
for (ColumnNameContext each : ctx.columnName()) {
result.getValue().add((ColumnSegment) visit(each));
}
return result;
}
@Override
public ASTNode visitColumnNamesWithSort(final ColumnNamesWithSortContext ctx) {
CollectionValue<ColumnSegment> result = new CollectionValue<>();
for (ColumnNameWithSortContext each : ctx.columnNameWithSort()) {
result.getValue().add((ColumnSegment) visit(each));
}
return result;
}
@Override
public final ASTNode visitExpr(final ExprContext ctx) {
if (null != ctx.booleanPrimary()) {
return visit(ctx.booleanPrimary());
}
if (null != ctx.LP_()) {
return visit(ctx.expr(0));
}
if (null != ctx.andOperator()) {
return createBinaryOperationExpression(ctx, ctx.andOperator().getText());
}
if (null != ctx.orOperator()) {
return createBinaryOperationExpression(ctx, ctx.orOperator().getText());
}
if (null != ctx.distinctFrom()) {
return createBinaryOperationExpression(ctx, ctx.distinctFrom().getText());
}
return new NotExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (ExpressionSegment) visit(ctx.expr(0)), false);
}
private ASTNode createBinaryOperationExpression(final ExprContext ctx, final String operator) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.expr(0));
ExpressionSegment right = (ExpressionSegment) visit(ctx.expr(1));
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
@Override
public final ASTNode visitBooleanPrimary(final BooleanPrimaryContext ctx) {
if (null != ctx.IS()) {
String rightText = "";
if (null != ctx.NOT()) {
rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(ctx.NOT().getSymbol().getStartIndex(),
ctx.NOT().getSymbol().getStopIndex()))).concat(" ");
}
Token operatorToken = null;
if (null != ctx.NULL()) {
operatorToken = ctx.NULL().getSymbol();
}
if (null != ctx.TRUE()) {
operatorToken = ctx.TRUE().getSymbol();
}
if (null != ctx.FALSE()) {
operatorToken = ctx.FALSE().getSymbol();
}
int startIndex = null == operatorToken ? ctx.IS().getSymbol().getStopIndex() + 2 : operatorToken.getStartIndex();
rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(startIndex, ctx.stop.getStopIndex())));
ExpressionSegment right = new LiteralExpressionSegment(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex(), rightText);
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary());
String operator = "IS";
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
if (null != ctx.comparisonOperator() || null != ctx.SAFE_EQ_()) {
return createCompareSegment(ctx);
}
return visit(ctx.predicate());
}
private ASTNode createCompareSegment(final BooleanPrimaryContext ctx) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary());
ExpressionSegment right;
if (null != ctx.predicate()) {
right = (ExpressionSegment) visit(ctx.predicate());
} else {
right = (ExpressionSegment) visit(ctx.subquery());
}
String operator = null == ctx.SAFE_EQ_() ? ctx.comparisonOperator().getText() : ctx.SAFE_EQ_().getText();
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
@Override
public final ASTNode visitPredicate(final PredicateContext ctx) {
if (null != ctx.IN()) {
return createInSegment(ctx);
}
if (null != ctx.BETWEEN()) {
return createBetweenSegment(ctx);
}
if (null != ctx.LIKE()) {
return createBinaryOperationExpressionFromLike(ctx);
}
return visit(ctx.bitExpr(0));
}
private BinaryOperationExpression createBinaryOperationExpressionFromLike(final PredicateContext ctx) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
ListExpression right = new ListExpression(ctx.simpleExpr(0).start.getStartIndex(), ctx.simpleExpr().get(ctx.simpleExpr().size() - 1).stop.getStopIndex());
for (SimpleExprContext each : ctx.simpleExpr()) {
right.getItems().add((ExpressionSegment) visit(each));
}
String operator = null == ctx.NOT() ? "LIKE" : "NOT LIKE";
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
private InExpression createInSegment(final PredicateContext ctx) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
ExpressionSegment right;
if (null == ctx.subquery()) {
ListExpression listExpression = new ListExpression(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex());
for (ExprContext each : ctx.expr()) {
listExpression.getItems().add((ExpressionSegment) visit(each));
}
right = listExpression;
} else {
right = new SubqueryExpressionSegment(new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), (SQLServerSelectStatement) visit(ctx.subquery()),
getOriginalText(ctx.subquery())));
}
boolean not = null != ctx.NOT();
return new InExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, not);
}
private BetweenExpression createBetweenSegment(final PredicateContext ctx) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
ExpressionSegment between = (ExpressionSegment) visit(ctx.bitExpr(1));
ExpressionSegment and = (ExpressionSegment) visit(ctx.predicate());
boolean not = null != ctx.NOT();
return new BetweenExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, between, and, not);
}
@Override
public final ASTNode visitBitExpr(final BitExprContext ctx) {
if (null != ctx.simpleExpr()) {
return createExpressionSegment(visit(ctx.simpleExpr()), ctx);
}
ExpressionSegment left = (ExpressionSegment) visit(ctx.getChild(0));
ExpressionSegment right = (ExpressionSegment) visit(ctx.getChild(2));
String operator = ctx.getChild(1).getText();
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
private ASTNode createExpressionSegment(final ASTNode astNode, final ParserRuleContext context) {
if (astNode instanceof StringLiteralValue) {
return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((StringLiteralValue) astNode).getValue());
}
if (astNode instanceof NumberLiteralValue) {
return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((NumberLiteralValue) astNode).getValue());
}
if (astNode instanceof BooleanLiteralValue) {
return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((BooleanLiteralValue) astNode).getValue());
}
if (astNode instanceof ParameterMarkerValue) {
ParameterMarkerValue parameterMarker = (ParameterMarkerValue) astNode;
ParameterMarkerExpressionSegment segment = new ParameterMarkerExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(),
parameterMarker.getValue(), parameterMarker.getType());
parameterMarkerSegments.add(segment);
return segment;
}
if (astNode instanceof SubquerySegment) {
return new SubqueryExpressionSegment((SubquerySegment) astNode);
}
if (astNode instanceof OtherLiteralValue) {
return new CommonExpressionSegment(context.getStart().getStartIndex(), context.getStop().getStopIndex(), context.getText());
}
return astNode;
}
@Override
public final ASTNode visitSimpleExpr(final SimpleExprContext ctx) {
int startIndex = ctx.getStart().getStartIndex();
int stopIndex = ctx.getStop().getStopIndex();
if (null != ctx.subquery()) {
return new SubquerySegment(startIndex, stopIndex, (SQLServerSelectStatement) visit(ctx.subquery()), getOriginalText(ctx.subquery()));
}
if (null != ctx.parameterMarker()) {
ParameterMarkerValue parameterMarker = (ParameterMarkerValue) visit(ctx.parameterMarker());
ParameterMarkerExpressionSegment result = new ParameterMarkerExpressionSegment(startIndex, stopIndex, parameterMarker.getValue(), parameterMarker.getType());
parameterMarkerSegments.add(result);
return result;
}
if (null != ctx.literals()) {
return SQLUtils.createLiteralExpression(visit(ctx.literals()), startIndex, stopIndex, ctx.literals().start.getInputStream().getText(new Interval(startIndex, stopIndex)));
}
if (null != ctx.functionCall()) {
return visit(ctx.functionCall());
}
if (null != ctx.columnName()) {
return visit(ctx.columnName());
}
return visitRemainSimpleExpr(ctx);
}
private ASTNode visitRemainSimpleExpr(final SimpleExprContext ctx) {
if (null != ctx.caseExpression()) {
visit(ctx.caseExpression());
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new OtherLiteralValue(text);
}
for (ExprContext each : ctx.expr()) {
visit(each);
}
for (SimpleExprContext each : ctx.simpleExpr()) {
visit(each);
}
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), text);
}
@Override
public final ASTNode visitFunctionCall(final FunctionCallContext ctx) {
if (null != ctx.aggregationFunction()) {
return visit(ctx.aggregationFunction());
}
if (null != ctx.specialFunction()) {
return visit(ctx.specialFunction());
}
if (null != ctx.regularFunction()) {
return visit(ctx.regularFunction());
}
throw new IllegalStateException("FunctionCallContext must have aggregationFunction, regularFunction or specialFunction.");
}
@Override
public final ASTNode visitAggregationFunction(final AggregationFunctionContext ctx) {
String aggregationType = ctx.aggregationFunctionName().getText();
return AggregationType.isAggregationType(aggregationType)
? createAggregationSegment(ctx, aggregationType)
: new ExpressionProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), getOriginalText(ctx));
}
private ASTNode createAggregationSegment(final AggregationFunctionContext ctx, final String aggregationType) {
AggregationType type = AggregationType.valueOf(aggregationType.toUpperCase());
if (null != ctx.distinct()) {
AggregationDistinctProjectionSegment result =
new AggregationDistinctProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, getOriginalText(ctx), getDistinctExpression(ctx));
result.getParameters().addAll(getExpressions(ctx));
return result;
}
AggregationProjectionSegment result = new AggregationProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, getOriginalText(ctx));
result.getParameters().addAll(getExpressions(ctx));
return result;
}
private Collection<ExpressionSegment> getExpressions(final AggregationFunctionContext ctx) {
if (null == ctx.expr()) {
return Collections.emptyList();
}
Collection<ExpressionSegment> result = new LinkedList<>();
for (ExprContext each : ctx.expr()) {
result.add((ExpressionSegment) visit(each));
}
return result;
}
private String getDistinctExpression(final AggregationFunctionContext ctx) {
StringBuilder result = new StringBuilder();
for (int i = 3; i < ctx.getChildCount() - 1; i++) {
result.append(ctx.getChild(i).getText());
}
return result.toString();
}
@Override
public final ASTNode visitSpecialFunction(final SpecialFunctionContext ctx) {
if (null != ctx.castFunction()) {
return visit(ctx.castFunction());
}
if (null != ctx.convertFunction()) {
return visit(ctx.convertFunction());
}
if (null != ctx.charFunction()) {
return visit(ctx.charFunction());
}
return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getChild(0).getChild(0).getText(), getOriginalText(ctx));
}
@Override
public final ASTNode visitCastFunction(final CastFunctionContext ctx) {
calculateParameterCount(Collections.singleton(ctx.expr()));
FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CAST().getText(), getOriginalText(ctx));
ASTNode exprSegment = visit(ctx.expr());
if (exprSegment instanceof ColumnSegment) {
result.getParameters().add((ColumnSegment) exprSegment);
} else if (exprSegment instanceof LiteralExpressionSegment) {
result.getParameters().add((LiteralExpressionSegment) exprSegment);
}
result.getParameters().add((DataTypeSegment) visit(ctx.dataType()));
return result;
}
@Override
public ASTNode visitConvertFunction(final ConvertFunctionContext ctx) {
FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CONVERT().getText(), getOriginalText(ctx));
result.getParameters().add((DataTypeSegment) visit(ctx.dataType()));
result.getParameters().add((ExpressionSegment) visit(ctx.expr()));
if (null != ctx.NUMBER_()) {
result.getParameters().add(new LiteralExpressionSegment(ctx.NUMBER_().getSymbol().getStartIndex(), ctx.NUMBER_().getSymbol().getStopIndex(), ctx.NUMBER_().getText()));
}
return result;
}
@Override
public final ASTNode visitCharFunction(final CharFunctionContext ctx) {
calculateParameterCount(ctx.expr());
return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CHAR().getText(), getOriginalText(ctx));
}
@Override
public final ASTNode visitRegularFunction(final RegularFunctionContext ctx) {
FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.regularFunctionName().getText(), getOriginalText(ctx));
Collection<ExpressionSegment> expressionSegments = ctx.expr().stream().map(each -> (ExpressionSegment) visit(each)).collect(Collectors.toList());
result.getParameters().addAll(expressionSegments);
return result;
}
@Override
public final ASTNode visitDataTypeName(final DataTypeNameContext ctx) {
return new KeywordValue(ctx.getText());
}
private void calculateParameterCount(final Collection<ExprContext> exprContexts) {
for (ExprContext each : exprContexts) {
visit(each);
}
}
@Override
public final ASTNode visitOrderByItem(final OrderByItemContext ctx) {
OrderDirection orderDirection = null == ctx.DESC() ? OrderDirection.ASC : OrderDirection.DESC;
if (null != ctx.columnName()) {
ColumnSegment column = (ColumnSegment) visit(ctx.columnName());
return new ColumnOrderByItemSegment(column, orderDirection, null);
}
if (null != ctx.numberLiterals()) {
return new IndexOrderByItemSegment(ctx.numberLiterals().getStart().getStartIndex(), ctx.numberLiterals().getStop().getStopIndex(),
SQLUtils.getExactlyNumber(ctx.numberLiterals().getText(), 10).intValue(), orderDirection, null);
}
return new ExpressionOrderByItemSegment(ctx.expr().getStart().getStartIndex(), ctx.expr().getStop().getStopIndex(), getOriginalText(ctx.expr()), orderDirection, null,
(ExpressionSegment) visit(ctx.expr()));
}
@Override
public final ASTNode visitDataType(final DataTypeContext ctx) {
DataTypeSegment result = new DataTypeSegment();
result.setDataTypeName(((KeywordValue) visit(ctx.dataTypeName())).getValue());
result.setStartIndex(ctx.start.getStartIndex());
result.setStopIndex(ctx.stop.getStopIndex());
if (null != ctx.dataTypeLength()) {
DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.dataTypeLength());
result.setDataLength(dataTypeLengthSegment);
}
return result;
}
@Override
public final ASTNode visitDataTypeLength(final DataTypeLengthContext ctx) {
DataTypeLengthSegment result = new DataTypeLengthSegment();
result.setStartIndex(ctx.start.getStartIndex());
result.setStopIndex(ctx.stop.getStartIndex());
List<TerminalNode> numbers = ctx.NUMBER_();
if (numbers.size() == 1) {
result.setPrecision(Integer.parseInt(numbers.get(0).getText()));
}
if (numbers.size() == 2) {
result.setPrecision(Integer.parseInt(numbers.get(0).getText()));
result.setScale(Integer.parseInt(numbers.get(1).getText()));
}
return result;
}
@Override
public final ASTNode visitViewName(final ViewNameContext ctx) {
SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.name().getStart().getStartIndex(),
ctx.name().getStop().getStopIndex(), (IdentifierValue) visit(ctx.name())));
OwnerContext owner = ctx.owner();
if (null != owner) {
result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier())));
}
return result;
}
@Override
public ASTNode visitSelect(final SelectContext ctx) {
SQLServerSelectStatement result = (SQLServerSelectStatement) visit(ctx.aggregationClause());
result.addParameterMarkerSegments(getParameterMarkerSegments());
return result;
}
@Override
public ASTNode visitAggregationClause(final AggregationClauseContext ctx) {
return visit(ctx.selectClause(0));
}
@Override
public ASTNode visitSelectClause(final SelectClauseContext ctx) {
SQLServerSelectStatement result = new SQLServerSelectStatement();
result.setProjections((ProjectionsSegment) visit(ctx.projections()));
if (null != ctx.selectWithClause() && null != ctx.selectWithClause().cteClauseSet()) {
Collection<CommonTableExpressionSegment> commonTableExpressionSegments = getCommonTableExpressionSegmentsUsingCteClauseSet(ctx.selectWithClause().cteClauseSet());
WithSegment withSegment = new WithSegment(ctx.selectWithClause().start.getStartIndex(), ctx.selectWithClause().stop.getStopIndex(), commonTableExpressionSegments);
result.setWithSegment(withSegment);
}
if (null != ctx.duplicateSpecification()) {
result.getProjections().setDistinctRow(isDistinct(ctx));
}
if (null != ctx.fromClause()) {
TableSegment tableSource = (TableSegment) visit(ctx.fromClause().tableReferences());
result.setFrom(tableSource);
}
if (null != ctx.whereClause()) {
result.setWhere((WhereSegment) visit(ctx.whereClause()));
}
if (null != ctx.groupByClause()) {
result.setGroupBy((GroupBySegment) visit(ctx.groupByClause()));
}
if (null != ctx.havingClause()) {
result.setHaving((HavingSegment) visit(ctx.havingClause()));
}
if (null != ctx.orderByClause()) {
visitOrderBy(result, ctx.orderByClause());
}
return result;
}
private Collection<CommonTableExpressionSegment> getCommonTableExpressionSegmentsUsingCteClauseSet(final CteClauseSetContext ctx) {
Collection<CommonTableExpressionSegment> result = new LinkedList<>();
for (CteClauseContext each : ctx.cteClause()) {
SubquerySegment subquery = new SubquerySegment(each.subquery().aggregationClause().start.getStartIndex(),
each.subquery().aggregationClause().stop.getStopIndex(), (SQLServerSelectStatement) visit(each.subquery()), getOriginalText(each.subquery()));
IdentifierValue identifier = (IdentifierValue) visit(each.identifier());
CommonTableExpressionSegment commonTableExpression = new CommonTableExpressionSegment(each.start.getStartIndex(), each.stop.getStopIndex(), identifier, subquery);
if (null != each.columnNames()) {
ColumnNamesContext columnNames = each.columnNames();
CollectionValue<ColumnSegment> columns = (CollectionValue<ColumnSegment>) visit(columnNames);
commonTableExpression.getColumns().addAll(columns.getValue());
}
result.add(commonTableExpression);
}
return result;
}
@Override
public ASTNode visitHavingClause(final HavingClauseContext ctx) {
ExpressionSegment expr = (ExpressionSegment) visit(ctx.expr());
return new HavingSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr);
}
private SQLServerSelectStatement visitOrderBy(final SQLServerSelectStatement selectStatement, final OrderByClauseContext ctx) {
Collection<OrderByItemSegment> items = new LinkedList<>();
int orderByStartIndex = ctx.start.getStartIndex();
int orderByStopIndex = ctx.start.getStartIndex();
for (OrderByItemContext each : ctx.orderByItem()) {
items.add((OrderByItemSegment) visit(each));
orderByStopIndex = each.stop.getStopIndex();
}
OrderBySegment orderBySegment = new OrderBySegment(orderByStartIndex, orderByStopIndex, items);
selectStatement.setOrderBy(orderBySegment);
PaginationValueSegment offset = null;
PaginationValueSegment rowcount = null;
LimitSegment limitSegment = null;
if (null != ctx.OFFSET()) {
ASTNode astNode = visit(ctx.expr(0));
if (astNode instanceof LiteralExpressionSegment && ((LiteralExpressionSegment) astNode).getLiterals() instanceof Number) {
offset = new NumberLiteralLimitValueSegment(ctx.expr(0).start.getStartIndex(), ctx.expr(0).stop.getStopIndex(),
((Number) ((LiteralExpressionSegment) astNode).getLiterals()).longValue());
} else if (astNode instanceof ParameterMarkerExpressionSegment) {
offset = new ParameterMarkerLimitValueSegment(ctx.expr(0).start.getStartIndex(), ctx.expr(0).stop.getStopIndex(), parameterMarkerSegments.size());
}
}
if (null != ctx.FETCH()) {
ASTNode astNode = visit(ctx.expr(1));
if (astNode instanceof LiteralExpressionSegment && ((LiteralExpressionSegment) astNode).getLiterals() instanceof Number) {
rowcount = new NumberLiteralLimitValueSegment(ctx.expr(1).start.getStartIndex(), ctx.expr(1).stop.getStopIndex(),
((Number) ((LiteralExpressionSegment) astNode).getLiterals()).longValue());
} else if (astNode instanceof ParameterMarkerExpressionSegment) {
rowcount = new ParameterMarkerLimitValueSegment(ctx.expr(1).start.getStartIndex(), ctx.expr(1).stop.getStopIndex(), parameterMarkerSegments.size());
}
}
if (null != offset) {
limitSegment = new LimitSegment(ctx.OFFSET().getSymbol().getStartIndex(), ctx.stop.getStopIndex(), offset, rowcount);
}
selectStatement.setLimit(limitSegment);
return selectStatement;
}
private boolean isDistinct(final SelectClauseContext ctx) {
return ((BooleanLiteralValue) visit(ctx.duplicateSpecification())).getValue();
}
@Override
public ASTNode visitProjections(final ProjectionsContext ctx) {
List<ProjectionSegment> projections = new LinkedList<>();
if (null != ctx.top()) {
projections.add((ProjectionSegment) visit(ctx.top()));
}
for (ProjectionContext each : ctx.projection()) {
projections.add((ProjectionSegment) visit(each));
}
ProjectionsSegment result = new ProjectionsSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
result.getProjections().addAll(projections);
return result;
}
@Override
public ASTNode visitTableReferences(final TableReferencesContext ctx) {
TableSegment result = (TableSegment) visit(ctx.tableReference(0));
if (ctx.tableReference().size() > 1) {
for (int i = 1; i < ctx.tableReference().size(); i++) {
result = generateJoinTableSourceFromTableReference(ctx.tableReference(i), result);
}
}
return result;
}
private JoinTableSegment generateJoinTableSourceFromTableReference(final TableReferenceContext ctx, final TableSegment tableSegment) {
JoinTableSegment result = new JoinTableSegment();
result.setStartIndex(tableSegment.getStartIndex());
result.setStopIndex(ctx.stop.getStopIndex());
result.setLeft(tableSegment);
result.setRight((TableSegment) visit(ctx));
result.setJoinType(JoinType.COMMA.name());
return result;
}
@Override
public ASTNode visitWhereClause(final WhereClauseContext ctx) {
return new WhereSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (ExpressionSegment) visit(ctx.expr()));
}
@Override
public ASTNode visitGroupByClause(final GroupByClauseContext ctx) {
Collection<OrderByItemSegment> items = new LinkedList<>();
for (OrderByItemContext each : ctx.orderByItem()) {
items.add((OrderByItemSegment) visit(each));
}
return new GroupBySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), items);
}
/**
* Get original text.
*
* @param ctx context
* @return original text
*/
protected String getOriginalText(final ParserRuleContext ctx) {
return ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
}
@Override
public ASTNode visitInsert(final InsertContext ctx) {
SQLServerInsertStatement result;
if (null != ctx.insertDefaultValue()) {
result = (SQLServerInsertStatement) visit(ctx.insertDefaultValue());
} else if (null != ctx.insertValuesClause()) {
result = (SQLServerInsertStatement) visit(ctx.insertValuesClause());
} else if (null != ctx.insertExecClause()) {
result = (SQLServerInsertStatement) visit(ctx.insertExecClause());
} else {
result = (SQLServerInsertStatement) visit(ctx.insertSelectClause());
}
if (null != ctx.withClause()) {
result.setWithSegment((WithSegment) visit(ctx.withClause()));
}
if (null != ctx.withTableHint()) {
result.setWithTableHintSegment((WithTableHintSegment) visit(ctx.withTableHint()));
}
result.setTable((SimpleTableSegment) visit(ctx.tableName()));
result.addParameterMarkerSegments(getParameterMarkerSegments());
return result;
}
@Override
@Override
public ASTNode visitTableHintLimited(final TableHintLimitedContext ctx) {
TableHintLimitedSegment result = new TableHintLimitedSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex());
result.setValue(ctx.getText());
return result;
}
@Override
public ASTNode visitInsertDefaultValue(final InsertDefaultValueContext ctx) {
SQLServerInsertStatement result = new SQLServerInsertStatement();
result.setInsertColumns(createInsertColumns(ctx.columnNames(), ctx.start.getStartIndex()));
if (null != ctx.outputClause()) {
result.setOutputSegment((OutputSegment) visit(ctx.outputClause()));
}
return result;
}
@Override
public ASTNode visitInsertExecClause(final InsertExecClauseContext ctx) {
SQLServerInsertStatement result = new SQLServerInsertStatement();
result.setInsertColumns(createInsertColumns(ctx.columnNames(), ctx.start.getStartIndex()));
result.setExecSegment((ExecSegment) visit(ctx.exec()));
return result;
}
@Override
public ASTNode visitExec(final ExecContext ctx) {
ExecSegment result = new ExecSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
if (null != ctx.procedureName()) {
result.setProcedureName((FunctionNameSegment) visitProcedureName(ctx.procedureName()));
}
if (null != ctx.expr()) {
Collection<ExpressionSegment> items = new LinkedList<>();
for (ExprContext each : ctx.expr()) {
items.add((ExpressionSegment) visit(each));
}
result.getExpressionSegments().addAll(items);
}
return result;
}
@Override
public ASTNode visitProcedureName(final ProcedureNameContext ctx) {
FunctionNameSegment result = new FunctionNameSegment(ctx.name().start.getStartIndex(), ctx.name().stop.getStopIndex(), (IdentifierValue) visit(ctx.name()));
if (null != ctx.owner()) {
result.setOwner(new OwnerSegment(ctx.owner().start.getStartIndex(), ctx.owner().stop.getStopIndex(), (IdentifierValue) visit(ctx.owner())));
}
return result;
}
@Override
public ASTNode visitOutputClause(final OutputClauseContext ctx) {
OutputSegment result = new OutputSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex());
if (null != ctx.outputWithColumns()) {
OutputWithColumnsContext outputWithColumnsContext = ctx.outputWithColumns();
List<OutputWithColumnContext> outputWithColumnContexts = outputWithColumnsContext.outputWithColumn();
Collection<ColumnProjectionSegment> outputColumns = new LinkedList<>();
for (OutputWithColumnContext each : outputWithColumnContexts) {
ColumnSegment column = new ColumnSegment(each.start.getStartIndex(), each.stop.getStopIndex(), new IdentifierValue(each.name().getText()));
ColumnProjectionSegment outputColumn = new ColumnProjectionSegment(column);
if (null != each.alias()) {
outputColumn.setAlias(new AliasSegment(each.alias().start.getStartIndex(), each.alias().stop.getStopIndex(), new IdentifierValue(each.name().getText())));
}
outputColumns.add(outputColumn);
}
result.getOutputColumns().addAll(outputColumns);
}
if (null != ctx.outputTableName()) {
OutputTableNameContext outputTableNameContext = ctx.outputTableName();
TableNameSegment tableName = new TableNameSegment(outputTableNameContext.start.getStartIndex(),
outputTableNameContext.stop.getStopIndex(), new IdentifierValue(outputTableNameContext.getText()));
result.setTableName(tableName);
if (null != ctx.columnNames()) {
ColumnNamesContext columnNames = ctx.columnNames();
CollectionValue<ColumnSegment> columns = (CollectionValue<ColumnSegment>) visit(columnNames);
result.getTableColumns().addAll(columns.getValue());
}
}
return result;
}
@Override
public ASTNode visitInsertValuesClause(final InsertValuesClauseContext ctx) {
SQLServerInsertStatement result = new SQLServerInsertStatement();
result.setInsertColumns(createInsertColumns(ctx.columnNames(), ctx.start.getStartIndex()));
result.getValues().addAll(createInsertValuesSegments(ctx.assignmentValues()));
if (null != ctx.outputClause()) {
result.setOutputSegment((OutputSegment) visit(ctx.outputClause()));
}
return result;
}
private Collection<InsertValuesSegment> createInsertValuesSegments(final Collection<AssignmentValuesContext> assignmentValuesContexts) {
Collection<InsertValuesSegment> result = new LinkedList<>();
for (AssignmentValuesContext each : assignmentValuesContexts) {
result.add((InsertValuesSegment) visit(each));
}
return result;
}
@Override
public ASTNode visitInsertSelectClause(final InsertSelectClauseContext ctx) {
SQLServerInsertStatement result = new SQLServerInsertStatement();
result.setInsertColumns(createInsertColumns(ctx.columnNames(), ctx.start.getStartIndex()));
result.setInsertSelect(createInsertSelectSegment(ctx));
if (null != ctx.outputClause()) {
result.setOutputSegment((OutputSegment) visit(ctx.outputClause()));
}
return result;
}
@SuppressWarnings("unchecked")
private InsertColumnsSegment createInsertColumns(final ColumnNamesContext columnNames, final int startIndex) {
if (null == columnNames) {
return new InsertColumnsSegment(startIndex - 1, startIndex - 1, Collections.emptyList());
} else {
CollectionValue<ColumnSegment> columnSegments = (CollectionValue<ColumnSegment>) visit(columnNames);
return new InsertColumnsSegment(columnNames.start.getStartIndex(), columnNames.stop.getStopIndex(), columnSegments.getValue());
}
}
private SubquerySegment createInsertSelectSegment(final InsertSelectClauseContext ctx) {
SQLServerSelectStatement selectStatement = (SQLServerSelectStatement) visit(ctx.select());
return new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement, getOriginalText(ctx.select()));
}
@Override
public ASTNode visitWithClause(final WithClauseContext ctx) {
Collection<CommonTableExpressionSegment> commonTableExpressionSegments = getCommonTableExpressionSegmentsUsingCteClauseSet(ctx.cteClauseSet());
return new WithSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), commonTableExpressionSegments);
}
@Override
public ASTNode visitUpdate(final UpdateContext ctx) {
SQLServerUpdateStatement result = new SQLServerUpdateStatement();
if (null != ctx.withClause()) {
result.setWithSegment((WithSegment) visit(ctx.withClause()));
}
result.setTable((TableSegment) visit(ctx.tableReferences()));
result.setSetAssignment((SetAssignmentSegment) visit(ctx.setAssignmentsClause()));
if (null != ctx.whereClause()) {
result.setWhere((WhereSegment) visit(ctx.whereClause()));
}
result.addParameterMarkerSegments(getParameterMarkerSegments());
return result;
}
@Override
public ASTNode visitSetAssignmentsClause(final SetAssignmentsClauseContext ctx) {
Collection<ColumnAssignmentSegment> assignments = new LinkedList<>();
for (AssignmentContext each : ctx.assignment()) {
assignments.add((ColumnAssignmentSegment) visit(each));
}
return new SetAssignmentSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), assignments);
}
@Override
public ASTNode visitAssignmentValues(final AssignmentValuesContext ctx) {
List<ExpressionSegment> segments = new LinkedList<>();
for (AssignmentValueContext each : ctx.assignmentValue()) {
segments.add((ExpressionSegment) visit(each));
}
return new InsertValuesSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), segments);
}
@Override
public ASTNode visitAssignment(final AssignmentContext ctx) {
ColumnSegment column = (ColumnSegment) visitColumnName(ctx.columnName());
List<ColumnSegment> columnSegments = new LinkedList<>();
columnSegments.add(column);
ExpressionSegment value = (ExpressionSegment) visit(ctx.assignmentValue());
ColumnAssignmentSegment result = new ColumnAssignmentSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnSegments, value);
result.getColumns().add(column);
return result;
}
@Override
public ASTNode visitAssignmentValue(final AssignmentValueContext ctx) {
ExprContext expr = ctx.expr();
if (null != expr) {
return visit(expr);
}
return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getText());
}
@Override
public ASTNode visitDelete(final DeleteContext ctx) {
SQLServerDeleteStatement result = new SQLServerDeleteStatement();
if (null != ctx.withClause()) {
result.setWithSegment((WithSegment) visit(ctx.withClause()));
}
if (null != ctx.multipleTablesClause()) {
result.setTable((TableSegment) visit(ctx.multipleTablesClause()));
} else {
result.setTable((TableSegment) visit(ctx.singleTableClause()));
}
if (null != ctx.outputClause()) {
result.setOutputSegment((OutputSegment) visit(ctx.outputClause()));
}
if (null != ctx.whereClause()) {
result.setWhere((WhereSegment) visit(ctx.whereClause()));
}
result.addParameterMarkerSegments(getParameterMarkerSegments());
return result;
}
@Override
public ASTNode visitSingleTableClause(final SingleTableClauseContext ctx) {
SimpleTableSegment result = (SimpleTableSegment) visit(ctx.tableName());
if (null != ctx.alias()) {
result.setAlias((AliasSegment) visit(ctx.alias()));
}
return result;
}
@Override
public ASTNode visitMultipleTablesClause(final MultipleTablesClauseContext ctx) {
DeleteMultiTableSegment result = new DeleteMultiTableSegment();
TableSegment relateTableSource = (TableSegment) visit(ctx.tableReferences());
result.setRelationTable(relateTableSource);
result.setActualDeleteTables(generateTablesFromTableMultipleTableNames(ctx.multipleTableNames()));
return result;
}
private List<SimpleTableSegment> generateTablesFromTableMultipleTableNames(final MultipleTableNamesContext ctx) {
List<SimpleTableSegment> result = new LinkedList<>();
for (TableNameContext each : ctx.tableName()) {
result.add((SimpleTableSegment) visit(each));
}
return result;
}
@Override
public ASTNode visitDuplicateSpecification(final DuplicateSpecificationContext ctx) {
return new BooleanLiteralValue(null != ctx.DISTINCT());
}
@Override
public ASTNode visitProjection(final ProjectionContext ctx) {
if (null != ctx.qualifiedShorthand()) {
QualifiedShorthandContext shorthand = ctx.qualifiedShorthand();
ShorthandProjectionSegment result = new ShorthandProjectionSegment(shorthand.getStart().getStartIndex(), shorthand.getStop().getStopIndex());
IdentifierValue identifier = new IdentifierValue(shorthand.identifier().getText());
result.setOwner(new OwnerSegment(shorthand.identifier().getStart().getStartIndex(), shorthand.identifier().getStop().getStopIndex(), identifier));
return result;
}
if (null != ctx.unqualifiedShorthand()) {
return new ShorthandProjectionSegment(ctx.unqualifiedShorthand().getStart().getStartIndex(), ctx.unqualifiedShorthand().getStop().getStopIndex());
}
AliasSegment alias = null == ctx.alias() ? null : (AliasSegment) visit(ctx.alias());
if (null != ctx.columnName()) {
ColumnSegment column = (ColumnSegment) visit(ctx.columnName());
ColumnProjectionSegment result = new ColumnProjectionSegment(column);
result.setAlias(alias);
return result;
}
return createProjection(ctx, alias);
}
@Override
public ASTNode visitTop(final TopContext ctx) {
int startIndex = ctx.topNum().getStart().getStartIndex();
int stopIndex = ctx.topNum().getStop().getStopIndex();
ASTNode topNum = visit(ctx.topNum());
if (topNum instanceof NumberLiteralValue) {
NumberLiteralRowNumberValueSegment rowNumberSegment = new NumberLiteralRowNumberValueSegment(startIndex, stopIndex, ((NumberLiteralValue) topNum).getValue().longValue(), false);
return new TopProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), rowNumberSegment, null != ctx.alias() ? ctx.alias().getText() : null);
}
ParameterMarkerSegment parameterSegment = new ParameterMarkerRowNumberValueSegment(startIndex, stopIndex, ((ParameterMarkerValue) topNum).getValue(), false);
parameterMarkerSegments.add(parameterSegment);
return new TopProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (RowNumberValueSegment) parameterSegment, null != ctx.alias() ? ctx.alias().getText() : null);
}
@Override
public ASTNode visitAlias(final AliasContext ctx) {
if (null != ctx.identifier()) {
return new AliasSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
}
return new AliasSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(ctx.STRING_().getText()));
}
private ASTNode createProjection(final ProjectionContext ctx, final AliasSegment alias) {
ASTNode projection = visit(ctx.expr());
if (projection instanceof AggregationProjectionSegment) {
((AggregationProjectionSegment) projection).setAlias(alias);
return projection;
}
if (projection instanceof ExpressionProjectionSegment) {
((ExpressionProjectionSegment) projection).setAlias(alias);
return projection;
}
if (projection instanceof FunctionSegment) {
FunctionSegment segment = (FunctionSegment) projection;
ExpressionProjectionSegment result = new ExpressionProjectionSegment(segment.getStartIndex(), segment.getStopIndex(), segment.getText(), segment);
result.setAlias(alias);
return result;
}
if (projection instanceof CommonExpressionSegment) {
CommonExpressionSegment segment = (CommonExpressionSegment) projection;
ExpressionProjectionSegment result = new ExpressionProjectionSegment(segment.getStartIndex(), segment.getStopIndex(), segment.getText(), segment);
result.setAlias(alias);
return result;
}
if (projection instanceof ColumnSegment) {
ColumnProjectionSegment result = new ColumnProjectionSegment((ColumnSegment) projection);
result.setAlias(alias);
return result;
}
if (projection instanceof SubqueryExpressionSegment) {
SubqueryExpressionSegment subqueryExpressionSegment = (SubqueryExpressionSegment) projection;
String text = ctx.start.getInputStream().getText(new Interval(subqueryExpressionSegment.getStartIndex(), subqueryExpressionSegment.getStopIndex()));
SubqueryProjectionSegment result = new SubqueryProjectionSegment(((SubqueryExpressionSegment) projection).getSubquery(), text);
result.setAlias(alias);
return result;
}
if (projection instanceof BinaryOperationExpression) {
BinaryOperationExpression binaryExpression = (BinaryOperationExpression) projection;
int startIndex = getStartIndexWithAlias(binaryExpression, alias);
int stopIndex = getStopIndexWithAlias(binaryExpression, alias);
ExpressionProjectionSegment result = new ExpressionProjectionSegment(startIndex, stopIndex, binaryExpression.getText(), binaryExpression);
result.setAlias(alias);
return result;
}
if (projection instanceof ParameterMarkerExpressionSegment) {
ParameterMarkerExpressionSegment result = (ParameterMarkerExpressionSegment) projection;
result.setAlias(alias);
return projection;
}
LiteralExpressionSegment column = (LiteralExpressionSegment) projection;
ExpressionProjectionSegment result = new ExpressionProjectionSegment(getStartIndexWithAlias(column, alias), getStopIndexWithAlias(column, alias), String.valueOf(column.getLiterals()), column);
result.setAlias(alias);
return result;
}
private int getStartIndexWithAlias(final SQLSegment sqlSegment, final AliasSegment alias) {
return null != alias && alias.getStartIndex() < sqlSegment.getStartIndex() ? alias.getStartIndex() : sqlSegment.getStartIndex();
}
private int getStopIndexWithAlias(final SQLSegment sqlSegment, final AliasSegment alias) {
return null != alias && alias.getStopIndex() > sqlSegment.getStopIndex() ? alias.getStopIndex() : sqlSegment.getStopIndex();
}
@Override
public ASTNode visitFromClause(final FromClauseContext ctx) {
return visit(ctx.tableReferences());
}
@Override
public ASTNode visitTableReference(final TableReferenceContext ctx) {
TableSegment result;
TableSegment left;
left = (TableSegment) visit(ctx.tableFactor());
if (!ctx.joinedTable().isEmpty()) {
for (JoinedTableContext each : ctx.joinedTable()) {
left = visitJoinedTable(each, left);
}
}
result = left;
return result;
}
@Override
public ASTNode visitTableFactor(final TableFactorContext ctx) {
if (null != ctx.subquery()) {
SQLServerSelectStatement subquery = (SQLServerSelectStatement) visit(ctx.subquery());
SubquerySegment subquerySegment = new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), subquery, getOriginalText(ctx.subquery()));
SubqueryTableSegment result = new SubqueryTableSegment(subquerySegment);
if (null != ctx.alias()) {
result.setAlias((AliasSegment) visit(ctx.alias()));
}
return result;
}
if (null != ctx.tableName()) {
SimpleTableSegment result = (SimpleTableSegment) visit(ctx.tableName());
if (null != ctx.alias()) {
result.setAlias((AliasSegment) visit(ctx.alias()));
}
return result;
}
if (null != ctx.expr()) {
ExpressionSegment exprSegment = (ExpressionSegment) visit(ctx.expr());
FunctionTableSegment result = new FunctionTableSegment(exprSegment.getStartIndex(), exprSegment.getStopIndex(), exprSegment);
if (null != ctx.alias()) {
result.setAlias((AliasSegment) visit(ctx.alias()));
}
return result;
}
return visit(ctx.tableReferences());
}
private JoinTableSegment visitJoinedTable(final JoinedTableContext ctx, final TableSegment tableSegment) {
JoinTableSegment result = new JoinTableSegment();
result.setLeft(tableSegment);
result.setStartIndex(tableSegment.getStartIndex());
result.setStopIndex(ctx.stop.getStopIndex());
TableSegment right = (TableSegment) visit(ctx.tableFactor());
result.setRight(right);
result.setJoinType(getJoinType(ctx));
if (null != ctx.joinSpecification()) {
visitJoinSpecification(ctx.joinSpecification(), result);
}
return result;
}
private String getJoinType(final JoinedTableContext ctx) {
if (null != ctx.LEFT()) {
return JoinType.LEFT.name();
} else if (null != ctx.RIGHT()) {
return JoinType.RIGHT.name();
} else if (null != ctx.FULL()) {
return JoinType.FULL.name();
} else if (null != ctx.INNER()) {
return JoinType.INNER.name();
} else if (null != ctx.CROSS()) {
return JoinType.CROSS.name();
}
return JoinType.INNER.name();
}
private void visitJoinSpecification(final JoinSpecificationContext ctx, final JoinTableSegment joinTableSource) {
if (null != ctx.expr()) {
ExpressionSegment condition = (ExpressionSegment) visit(ctx.expr());
joinTableSource.setCondition(condition);
}
if (null != ctx.USING()) {
joinTableSource.setUsing(ctx.columnNames().columnName().stream().map(each -> (ColumnSegment) visit(each)).collect(Collectors.toList()));
}
}
@Override
public ASTNode visitSubquery(final SubqueryContext ctx) {
return visit(ctx.aggregationClause());
}
@Override
public ASTNode visitCreateTableAsSelectClause(final CreateTableAsSelectClauseContext ctx) {
SQLServerCreateTableStatement result = new SQLServerCreateTableStatement();
if (null != ctx.createTableAsSelect()) {
result.setTable((SimpleTableSegment) visit(ctx.createTableAsSelect().tableName()));
result.setSelectStatement((SQLServerSelectStatement) visit(ctx.createTableAsSelect().select()));
if (null != ctx.createTableAsSelect().columnNames()) {
CollectionValue<ColumnSegment> columnSegments = (CollectionValue<ColumnSegment>) visit(ctx.createTableAsSelect().columnNames());
for (ColumnSegment each : columnSegments.getValue()) {
result.getColumns().add(each);
}
}
} else {
result.setTable((SimpleTableSegment) visit(ctx.createRemoteTableAsSelect().tableName()));
result.setSelectStatement((SQLServerSelectStatement) visit(ctx.createRemoteTableAsSelect().select()));
}
return result;
}
@Override
public ASTNode visitUpdateStatistics(final UpdateStatisticsContext ctx) {
SQLServerUpdateStatisticsStatement result = new SQLServerUpdateStatisticsStatement();
if (null != ctx.tableName()) {
result.setTable((SimpleTableSegment) visit(ctx.tableName()));
}
if (null != ctx.indexName() && ctx.indexName().size() > 0) {
List<IndexSegment> indexSegments = new LinkedList<>();
for (IndexNameContext indexNameContext : ctx.indexName()) {
indexSegments.add((IndexSegment) visit(indexNameContext));
}
result.setIndexes(indexSegments);
}
if (null != ctx.statisticsWithClause()) {
result.setStrategy((StatisticsStrategySegment) visit(ctx.statisticsWithClause()));
}
return result;
}
@Override
public ASTNode visitStatisticsWithClause(final StatisticsWithClauseContext ctx) {
StatisticsStrategySegment result = new StatisticsStrategySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
if (null != ctx.sampleOption()) {
result.setSampleOption((SampleOptionSegment) visit(ctx.sampleOption()));
}
if (null != ctx.statisticsOptions()) {
result.setStatisticsOptions((StatisticsOptionSegment) visit(ctx.statisticsOptions()));
}
return result;
}
@Override
public ASTNode visitStatisticsOption(final StatisticsOptionContext ctx) {
return super.visitStatisticsOption(ctx);
}
@Override
public ASTNode visitSampleOption(final SampleOptionContext ctx) {
SampleOptionSegment result = new SampleOptionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
if (null != ctx.FULLSCAN()) {
result.setStrategy(SampleStrategy.FULLSCAN);
} else if (null != ctx.SAMPLE()) {
result.setStrategy(SampleStrategy.SAMPLE);
if (null != ctx.NUMBER_()) {
List<TerminalNode> number = ctx.NUMBER_();
result.setSampleNumber(number.get(0).getText());
}
if (null != ctx.PERCENT()) {
result.setScanUnit(ScanUnit.PERCENT);
} else if (null != ctx.ROWS()) {
result.setScanUnit(ScanUnit.ROWS);
}
} else if (null != ctx.RESAMPLE()) {
result.setStrategy(SampleStrategy.RESAMPLE);
if (null != ctx.NUMBER_()) {
List<String> partitions = new LinkedList<>();
for (TerminalNode terminalNode : ctx.NUMBER_()) {
partitions.add(terminalNode.getText());
}
result.setPartitions(partitions);
}
}
if (null != ctx.PERSIST_SAMPLE_PERCENT()) {
result.setPersistSamplePercent(null != ctx.ON());
}
return result;
}
@Override
public ASTNode visitStatisticsOptions(final StatisticsOptionsContext ctx) {
StatisticsOptionSegment result = new StatisticsOptionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
for (StatisticsOptionContext option : ctx.statisticsOption()) {
if (null != option.ALL()) {
result.setStatisticsDimension(StatisticsDimension.ALL);
} else if (null != option.COLUMNS()) {
result.setStatisticsDimension(StatisticsDimension.COLUMNS);
} else if (null != option.INDEX()) {
result.setStatisticsDimension(StatisticsDimension.INDEX);
}
if (null != option.NORECOMPUTE()) {
result.setNoRecompute(true);
}
if (null != option.INCREMENTAL()) {
result.setIncremental(null != option.ON());
}
if (null != option.MAXDOP()) {
result.setMaxDegreeOfParallelism(option.NUMBER_().getText());
}
if (null != option.AUTO_DROP()) {
result.setAutoDrop(null != option.ON());
}
}
return result;
}
}
|
class SQLServerStatementVisitor extends SQLServerStatementBaseVisitor<ASTNode> {
private final Collection<ParameterMarkerSegment> parameterMarkerSegments = new LinkedList<>();
@Override
public final ASTNode visitParameterMarker(final ParameterMarkerContext ctx) {
return new ParameterMarkerValue(parameterMarkerSegments.size(), ParameterMarkerType.QUESTION);
}
@Override
public final ASTNode visitLiterals(final LiteralsContext ctx) {
if (null != ctx.stringLiterals()) {
return visit(ctx.stringLiterals());
}
if (null != ctx.numberLiterals()) {
return visit(ctx.numberLiterals());
}
if (null != ctx.hexadecimalLiterals()) {
return visit(ctx.hexadecimalLiterals());
}
if (null != ctx.bitValueLiterals()) {
return visit(ctx.bitValueLiterals());
}
if (null != ctx.booleanLiterals()) {
return visit(ctx.booleanLiterals());
}
if (null != ctx.nullValueLiterals()) {
return visit(ctx.nullValueLiterals());
}
throw new IllegalStateException("Literals must have string, number, dateTime, hex, bit, boolean or null.");
}
@Override
public final ASTNode visitStringLiterals(final StringLiteralsContext ctx) {
if (null != ctx.STRING_()) {
return new StringLiteralValue(ctx.getText());
} else {
return new StringLiteralValue(ctx.getText().substring(1));
}
}
@Override
public final ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) {
return new NumberLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitHexadecimalLiterals(final HexadecimalLiteralsContext ctx) {
return new OtherLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitBitValueLiterals(final BitValueLiteralsContext ctx) {
return new OtherLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitBooleanLiterals(final BooleanLiteralsContext ctx) {
return new BooleanLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitNullValueLiterals(final NullValueLiteralsContext ctx) {
return new NullLiteralValue(ctx.getText());
}
@Override
public final ASTNode visitIdentifier(final IdentifierContext ctx) {
return null == ctx.regularIdentifier() ? visit(ctx.delimitedIdentifier()) : visit(ctx.regularIdentifier());
}
@Override
public final ASTNode visitRegularIdentifier(final RegularIdentifierContext ctx) {
UnreservedWordContext unreservedWord = ctx.unreservedWord();
return null == unreservedWord ? new IdentifierValue(ctx.getText()) : (IdentifierValue) visit(unreservedWord);
}
@Override
public final ASTNode visitDelimitedIdentifier(final DelimitedIdentifierContext ctx) {
return new IdentifierValue(ctx.getText());
}
@Override
public final ASTNode visitUnreservedWord(final UnreservedWordContext ctx) {
return new IdentifierValue(ctx.getText());
}
@Override
public final ASTNode visitSchemaName(final SchemaNameContext ctx) {
return visit(ctx.identifier());
}
@Override
public final ASTNode visitTableName(final TableNameContext ctx) {
SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.name().getStart().getStartIndex(), ctx.name().getStop().getStopIndex(), (IdentifierValue) visit(ctx.name())));
OwnerContext owner = ctx.owner();
if (null != owner) {
OwnerSegment ownerSegment = new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()));
if (null != ctx.databaseName()) {
DatabaseNameContext databaseName = ctx.databaseName();
ownerSegment.setOwner(new OwnerSegment(databaseName.getStart().getStartIndex(), databaseName.getStop().getStopIndex(), (IdentifierValue) visit(databaseName.identifier())));
}
result.setOwner(ownerSegment);
} else if (null != ctx.databaseName()) {
DatabaseNameContext databaseName = ctx.databaseName();
result.setOwner(new OwnerSegment(databaseName.getStart().getStartIndex(), databaseName.getStop().getStopIndex(), (IdentifierValue) visit(databaseName.identifier())));
}
return result;
}
@Override
public final ASTNode visitColumnName(final ColumnNameContext ctx) {
ColumnSegment result;
if (null != ctx.name()) {
result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.name()));
} else {
result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.scriptVariableName()));
}
OwnerContext owner = ctx.owner();
if (null != owner) {
result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier())));
}
return result;
}
@Override
public ASTNode visitScriptVariableName(final ScriptVariableNameContext ctx) {
return new IdentifierValue(ctx.getText());
}
@Override
public final ASTNode visitIndexName(final IndexNameContext ctx) {
IndexNameSegment indexName = new IndexNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
return new IndexSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), indexName);
}
@Override
public final ASTNode visitConstraintName(final ConstraintNameContext ctx) {
return new ConstraintSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
}
@Override
public final ASTNode visitTableNames(final TableNamesContext ctx) {
CollectionValue<SimpleTableSegment> result = new CollectionValue<>();
for (TableNameContext each : ctx.tableName()) {
result.getValue().add((SimpleTableSegment) visit(each));
}
return result;
}
@Override
public final ASTNode visitColumnNames(final ColumnNamesContext ctx) {
CollectionValue<ColumnSegment> result = new CollectionValue<>();
for (ColumnNameContext each : ctx.columnName()) {
result.getValue().add((ColumnSegment) visit(each));
}
return result;
}
@Override
public ASTNode visitColumnNamesWithSort(final ColumnNamesWithSortContext ctx) {
CollectionValue<ColumnSegment> result = new CollectionValue<>();
for (ColumnNameWithSortContext each : ctx.columnNameWithSort()) {
result.getValue().add((ColumnSegment) visit(each));
}
return result;
}
@Override
public final ASTNode visitExpr(final ExprContext ctx) {
if (null != ctx.booleanPrimary()) {
return visit(ctx.booleanPrimary());
}
if (null != ctx.LP_()) {
return visit(ctx.expr(0));
}
if (null != ctx.andOperator()) {
return createBinaryOperationExpression(ctx, ctx.andOperator().getText());
}
if (null != ctx.orOperator()) {
return createBinaryOperationExpression(ctx, ctx.orOperator().getText());
}
if (null != ctx.distinctFrom()) {
return createBinaryOperationExpression(ctx, ctx.distinctFrom().getText());
}
return new NotExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (ExpressionSegment) visit(ctx.expr(0)), false);
}
private ASTNode createBinaryOperationExpression(final ExprContext ctx, final String operator) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.expr(0));
ExpressionSegment right = (ExpressionSegment) visit(ctx.expr(1));
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
@Override
public final ASTNode visitBooleanPrimary(final BooleanPrimaryContext ctx) {
if (null != ctx.IS()) {
String rightText = "";
if (null != ctx.NOT()) {
rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(ctx.NOT().getSymbol().getStartIndex(),
ctx.NOT().getSymbol().getStopIndex()))).concat(" ");
}
Token operatorToken = null;
if (null != ctx.NULL()) {
operatorToken = ctx.NULL().getSymbol();
}
if (null != ctx.TRUE()) {
operatorToken = ctx.TRUE().getSymbol();
}
if (null != ctx.FALSE()) {
operatorToken = ctx.FALSE().getSymbol();
}
int startIndex = null == operatorToken ? ctx.IS().getSymbol().getStopIndex() + 2 : operatorToken.getStartIndex();
rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(startIndex, ctx.stop.getStopIndex())));
ExpressionSegment right = new LiteralExpressionSegment(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex(), rightText);
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary());
String operator = "IS";
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
if (null != ctx.comparisonOperator() || null != ctx.SAFE_EQ_()) {
return createCompareSegment(ctx);
}
return visit(ctx.predicate());
}
private ASTNode createCompareSegment(final BooleanPrimaryContext ctx) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary());
ExpressionSegment right;
if (null != ctx.predicate()) {
right = (ExpressionSegment) visit(ctx.predicate());
} else {
right = (ExpressionSegment) visit(ctx.subquery());
}
String operator = null == ctx.SAFE_EQ_() ? ctx.comparisonOperator().getText() : ctx.SAFE_EQ_().getText();
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
@Override
public final ASTNode visitPredicate(final PredicateContext ctx) {
if (null != ctx.IN()) {
return createInSegment(ctx);
}
if (null != ctx.BETWEEN()) {
return createBetweenSegment(ctx);
}
if (null != ctx.LIKE()) {
return createBinaryOperationExpressionFromLike(ctx);
}
return visit(ctx.bitExpr(0));
}
private BinaryOperationExpression createBinaryOperationExpressionFromLike(final PredicateContext ctx) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
ListExpression right = new ListExpression(ctx.simpleExpr(0).start.getStartIndex(), ctx.simpleExpr().get(ctx.simpleExpr().size() - 1).stop.getStopIndex());
for (SimpleExprContext each : ctx.simpleExpr()) {
right.getItems().add((ExpressionSegment) visit(each));
}
String operator = null == ctx.NOT() ? "LIKE" : "NOT LIKE";
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
private InExpression createInSegment(final PredicateContext ctx) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
ExpressionSegment right;
if (null == ctx.subquery()) {
ListExpression listExpression = new ListExpression(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex());
for (ExprContext each : ctx.expr()) {
listExpression.getItems().add((ExpressionSegment) visit(each));
}
right = listExpression;
} else {
right = new SubqueryExpressionSegment(new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), (SQLServerSelectStatement) visit(ctx.subquery()),
getOriginalText(ctx.subquery())));
}
boolean not = null != ctx.NOT();
return new InExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, not);
}
private BetweenExpression createBetweenSegment(final PredicateContext ctx) {
ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
ExpressionSegment between = (ExpressionSegment) visit(ctx.bitExpr(1));
ExpressionSegment and = (ExpressionSegment) visit(ctx.predicate());
boolean not = null != ctx.NOT();
return new BetweenExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, between, and, not);
}
@Override
public final ASTNode visitBitExpr(final BitExprContext ctx) {
if (null != ctx.simpleExpr()) {
return createExpressionSegment(visit(ctx.simpleExpr()), ctx);
}
ExpressionSegment left = (ExpressionSegment) visit(ctx.getChild(0));
ExpressionSegment right = (ExpressionSegment) visit(ctx.getChild(2));
String operator = ctx.getChild(1).getText();
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
}
private ASTNode createExpressionSegment(final ASTNode astNode, final ParserRuleContext context) {
if (astNode instanceof StringLiteralValue) {
return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((StringLiteralValue) astNode).getValue());
}
if (astNode instanceof NumberLiteralValue) {
return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((NumberLiteralValue) astNode).getValue());
}
if (astNode instanceof BooleanLiteralValue) {
return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((BooleanLiteralValue) astNode).getValue());
}
if (astNode instanceof ParameterMarkerValue) {
ParameterMarkerValue parameterMarker = (ParameterMarkerValue) astNode;
ParameterMarkerExpressionSegment segment = new ParameterMarkerExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(),
parameterMarker.getValue(), parameterMarker.getType());
parameterMarkerSegments.add(segment);
return segment;
}
if (astNode instanceof SubquerySegment) {
return new SubqueryExpressionSegment((SubquerySegment) astNode);
}
if (astNode instanceof OtherLiteralValue) {
return new CommonExpressionSegment(context.getStart().getStartIndex(), context.getStop().getStopIndex(), context.getText());
}
return astNode;
}
@Override
public final ASTNode visitSimpleExpr(final SimpleExprContext ctx) {
int startIndex = ctx.getStart().getStartIndex();
int stopIndex = ctx.getStop().getStopIndex();
if (null != ctx.subquery()) {
return new SubquerySegment(startIndex, stopIndex, (SQLServerSelectStatement) visit(ctx.subquery()), getOriginalText(ctx.subquery()));
}
if (null != ctx.parameterMarker()) {
ParameterMarkerValue parameterMarker = (ParameterMarkerValue) visit(ctx.parameterMarker());
ParameterMarkerExpressionSegment result = new ParameterMarkerExpressionSegment(startIndex, stopIndex, parameterMarker.getValue(), parameterMarker.getType());
parameterMarkerSegments.add(result);
return result;
}
if (null != ctx.literals()) {
return SQLUtils.createLiteralExpression(visit(ctx.literals()), startIndex, stopIndex, ctx.literals().start.getInputStream().getText(new Interval(startIndex, stopIndex)));
}
if (null != ctx.functionCall()) {
return visit(ctx.functionCall());
}
if (null != ctx.columnName()) {
return visit(ctx.columnName());
}
return visitRemainSimpleExpr(ctx);
}
private ASTNode visitRemainSimpleExpr(final SimpleExprContext ctx) {
if (null != ctx.caseExpression()) {
visit(ctx.caseExpression());
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new OtherLiteralValue(text);
}
for (ExprContext each : ctx.expr()) {
visit(each);
}
for (SimpleExprContext each : ctx.simpleExpr()) {
visit(each);
}
String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), text);
}
@Override
public final ASTNode visitFunctionCall(final FunctionCallContext ctx) {
if (null != ctx.aggregationFunction()) {
return visit(ctx.aggregationFunction());
}
if (null != ctx.specialFunction()) {
return visit(ctx.specialFunction());
}
if (null != ctx.regularFunction()) {
return visit(ctx.regularFunction());
}
throw new IllegalStateException("FunctionCallContext must have aggregationFunction, regularFunction or specialFunction.");
}
@Override
public final ASTNode visitAggregationFunction(final AggregationFunctionContext ctx) {
String aggregationType = ctx.aggregationFunctionName().getText();
return AggregationType.isAggregationType(aggregationType)
? createAggregationSegment(ctx, aggregationType)
: new ExpressionProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), getOriginalText(ctx));
}
private ASTNode createAggregationSegment(final AggregationFunctionContext ctx, final String aggregationType) {
AggregationType type = AggregationType.valueOf(aggregationType.toUpperCase());
if (null != ctx.distinct()) {
AggregationDistinctProjectionSegment result =
new AggregationDistinctProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, getOriginalText(ctx), getDistinctExpression(ctx));
result.getParameters().addAll(getExpressions(ctx));
return result;
}
AggregationProjectionSegment result = new AggregationProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, getOriginalText(ctx));
result.getParameters().addAll(getExpressions(ctx));
return result;
}
private Collection<ExpressionSegment> getExpressions(final AggregationFunctionContext ctx) {
if (null == ctx.expr()) {
return Collections.emptyList();
}
Collection<ExpressionSegment> result = new LinkedList<>();
for (ExprContext each : ctx.expr()) {
result.add((ExpressionSegment) visit(each));
}
return result;
}
private String getDistinctExpression(final AggregationFunctionContext ctx) {
StringBuilder result = new StringBuilder();
for (int i = 3; i < ctx.getChildCount() - 1; i++) {
result.append(ctx.getChild(i).getText());
}
return result.toString();
}
@Override
public final ASTNode visitSpecialFunction(final SpecialFunctionContext ctx) {
if (null != ctx.castFunction()) {
return visit(ctx.castFunction());
}
if (null != ctx.convertFunction()) {
return visit(ctx.convertFunction());
}
if (null != ctx.charFunction()) {
return visit(ctx.charFunction());
}
return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getChild(0).getChild(0).getText(), getOriginalText(ctx));
}
@Override
public final ASTNode visitCastFunction(final CastFunctionContext ctx) {
calculateParameterCount(Collections.singleton(ctx.expr()));
FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CAST().getText(), getOriginalText(ctx));
ASTNode exprSegment = visit(ctx.expr());
if (exprSegment instanceof ColumnSegment) {
result.getParameters().add((ColumnSegment) exprSegment);
} else if (exprSegment instanceof LiteralExpressionSegment) {
result.getParameters().add((LiteralExpressionSegment) exprSegment);
}
result.getParameters().add((DataTypeSegment) visit(ctx.dataType()));
return result;
}
@Override
public ASTNode visitConvertFunction(final ConvertFunctionContext ctx) {
FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CONVERT().getText(), getOriginalText(ctx));
result.getParameters().add((DataTypeSegment) visit(ctx.dataType()));
result.getParameters().add((ExpressionSegment) visit(ctx.expr()));
if (null != ctx.NUMBER_()) {
result.getParameters().add(new LiteralExpressionSegment(ctx.NUMBER_().getSymbol().getStartIndex(), ctx.NUMBER_().getSymbol().getStopIndex(), ctx.NUMBER_().getText()));
}
return result;
}
@Override
public final ASTNode visitCharFunction(final CharFunctionContext ctx) {
calculateParameterCount(ctx.expr());
return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CHAR().getText(), getOriginalText(ctx));
}
@Override
public final ASTNode visitRegularFunction(final RegularFunctionContext ctx) {
FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.regularFunctionName().getText(), getOriginalText(ctx));
Collection<ExpressionSegment> expressionSegments = ctx.expr().stream().map(each -> (ExpressionSegment) visit(each)).collect(Collectors.toList());
result.getParameters().addAll(expressionSegments);
return result;
}
@Override
public final ASTNode visitDataTypeName(final DataTypeNameContext ctx) {
return new KeywordValue(ctx.getText());
}
private void calculateParameterCount(final Collection<ExprContext> exprContexts) {
for (ExprContext each : exprContexts) {
visit(each);
}
}
@Override
public final ASTNode visitOrderByItem(final OrderByItemContext ctx) {
OrderDirection orderDirection = null == ctx.DESC() ? OrderDirection.ASC : OrderDirection.DESC;
if (null != ctx.columnName()) {
ColumnSegment column = (ColumnSegment) visit(ctx.columnName());
return new ColumnOrderByItemSegment(column, orderDirection, null);
}
if (null != ctx.numberLiterals()) {
return new IndexOrderByItemSegment(ctx.numberLiterals().getStart().getStartIndex(), ctx.numberLiterals().getStop().getStopIndex(),
SQLUtils.getExactlyNumber(ctx.numberLiterals().getText(), 10).intValue(), orderDirection, null);
}
return new ExpressionOrderByItemSegment(ctx.expr().getStart().getStartIndex(), ctx.expr().getStop().getStopIndex(), getOriginalText(ctx.expr()), orderDirection, null,
(ExpressionSegment) visit(ctx.expr()));
}
@Override
public final ASTNode visitDataType(final DataTypeContext ctx) {
DataTypeSegment result = new DataTypeSegment();
result.setDataTypeName(((KeywordValue) visit(ctx.dataTypeName())).getValue());
result.setStartIndex(ctx.start.getStartIndex());
result.setStopIndex(ctx.stop.getStopIndex());
if (null != ctx.dataTypeLength()) {
DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.dataTypeLength());
result.setDataLength(dataTypeLengthSegment);
}
return result;
}
@Override
public final ASTNode visitDataTypeLength(final DataTypeLengthContext ctx) {
DataTypeLengthSegment result = new DataTypeLengthSegment();
result.setStartIndex(ctx.start.getStartIndex());
result.setStopIndex(ctx.stop.getStartIndex());
List<TerminalNode> numbers = ctx.NUMBER_();
if (numbers.size() == 1) {
result.setPrecision(Integer.parseInt(numbers.get(0).getText()));
}
if (numbers.size() == 2) {
result.setPrecision(Integer.parseInt(numbers.get(0).getText()));
result.setScale(Integer.parseInt(numbers.get(1).getText()));
}
return result;
}
@Override
public final ASTNode visitViewName(final ViewNameContext ctx) {
SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.name().getStart().getStartIndex(),
ctx.name().getStop().getStopIndex(), (IdentifierValue) visit(ctx.name())));
OwnerContext owner = ctx.owner();
if (null != owner) {
result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier())));
}
return result;
}
@Override
public ASTNode visitSelect(final SelectContext ctx) {
SQLServerSelectStatement result = (SQLServerSelectStatement) visit(ctx.aggregationClause());
result.addParameterMarkerSegments(getParameterMarkerSegments());
return result;
}
@Override
public ASTNode visitAggregationClause(final AggregationClauseContext ctx) {
return visit(ctx.selectClause(0));
}
@Override
public ASTNode visitSelectClause(final SelectClauseContext ctx) {
SQLServerSelectStatement result = new SQLServerSelectStatement();
result.setProjections((ProjectionsSegment) visit(ctx.projections()));
if (null != ctx.selectWithClause() && null != ctx.selectWithClause().cteClauseSet()) {
Collection<CommonTableExpressionSegment> commonTableExpressionSegments = getCommonTableExpressionSegmentsUsingCteClauseSet(ctx.selectWithClause().cteClauseSet());
WithSegment withSegment = new WithSegment(ctx.selectWithClause().start.getStartIndex(), ctx.selectWithClause().stop.getStopIndex(), commonTableExpressionSegments);
result.setWithSegment(withSegment);
}
if (null != ctx.duplicateSpecification()) {
result.getProjections().setDistinctRow(isDistinct(ctx));
}
if (null != ctx.fromClause()) {
TableSegment tableSource = (TableSegment) visit(ctx.fromClause().tableReferences());
result.setFrom(tableSource);
}
if (null != ctx.whereClause()) {
result.setWhere((WhereSegment) visit(ctx.whereClause()));
}
if (null != ctx.groupByClause()) {
result.setGroupBy((GroupBySegment) visit(ctx.groupByClause()));
}
if (null != ctx.havingClause()) {
result.setHaving((HavingSegment) visit(ctx.havingClause()));
}
if (null != ctx.orderByClause()) {
visitOrderBy(result, ctx.orderByClause());
}
return result;
}
private Collection<CommonTableExpressionSegment> getCommonTableExpressionSegmentsUsingCteClauseSet(final CteClauseSetContext ctx) {
Collection<CommonTableExpressionSegment> result = new LinkedList<>();
for (CteClauseContext each : ctx.cteClause()) {
SubquerySegment subquery = new SubquerySegment(each.subquery().aggregationClause().start.getStartIndex(),
each.subquery().aggregationClause().stop.getStopIndex(), (SQLServerSelectStatement) visit(each.subquery()), getOriginalText(each.subquery()));
IdentifierValue identifier = (IdentifierValue) visit(each.identifier());
CommonTableExpressionSegment commonTableExpression = new CommonTableExpressionSegment(each.start.getStartIndex(), each.stop.getStopIndex(), identifier, subquery);
if (null != each.columnNames()) {
ColumnNamesContext columnNames = each.columnNames();
CollectionValue<ColumnSegment> columns = (CollectionValue<ColumnSegment>) visit(columnNames);
commonTableExpression.getColumns().addAll(columns.getValue());
}
result.add(commonTableExpression);
}
return result;
}
@Override
public ASTNode visitHavingClause(final HavingClauseContext ctx) {
ExpressionSegment expr = (ExpressionSegment) visit(ctx.expr());
return new HavingSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr);
}
private SQLServerSelectStatement visitOrderBy(final SQLServerSelectStatement selectStatement, final OrderByClauseContext ctx) {
Collection<OrderByItemSegment> items = new LinkedList<>();
int orderByStartIndex = ctx.start.getStartIndex();
int orderByStopIndex = ctx.start.getStartIndex();
for (OrderByItemContext each : ctx.orderByItem()) {
items.add((OrderByItemSegment) visit(each));
orderByStopIndex = each.stop.getStopIndex();
}
OrderBySegment orderBySegment = new OrderBySegment(orderByStartIndex, orderByStopIndex, items);
selectStatement.setOrderBy(orderBySegment);
PaginationValueSegment offset = null;
PaginationValueSegment rowcount = null;
LimitSegment limitSegment = null;
if (null != ctx.OFFSET()) {
ASTNode astNode = visit(ctx.expr(0));
if (astNode instanceof LiteralExpressionSegment && ((LiteralExpressionSegment) astNode).getLiterals() instanceof Number) {
offset = new NumberLiteralLimitValueSegment(ctx.expr(0).start.getStartIndex(), ctx.expr(0).stop.getStopIndex(),
((Number) ((LiteralExpressionSegment) astNode).getLiterals()).longValue());
} else if (astNode instanceof ParameterMarkerExpressionSegment) {
offset = new ParameterMarkerLimitValueSegment(ctx.expr(0).start.getStartIndex(), ctx.expr(0).stop.getStopIndex(), parameterMarkerSegments.size());
}
}
if (null != ctx.FETCH()) {
ASTNode astNode = visit(ctx.expr(1));
if (astNode instanceof LiteralExpressionSegment && ((LiteralExpressionSegment) astNode).getLiterals() instanceof Number) {
rowcount = new NumberLiteralLimitValueSegment(ctx.expr(1).start.getStartIndex(), ctx.expr(1).stop.getStopIndex(),
((Number) ((LiteralExpressionSegment) astNode).getLiterals()).longValue());
} else if (astNode instanceof ParameterMarkerExpressionSegment) {
rowcount = new ParameterMarkerLimitValueSegment(ctx.expr(1).start.getStartIndex(), ctx.expr(1).stop.getStopIndex(), parameterMarkerSegments.size());
}
}
if (null != offset) {
limitSegment = new LimitSegment(ctx.OFFSET().getSymbol().getStartIndex(), ctx.stop.getStopIndex(), offset, rowcount);
}
selectStatement.setLimit(limitSegment);
return selectStatement;
}
private boolean isDistinct(final SelectClauseContext ctx) {
return ((BooleanLiteralValue) visit(ctx.duplicateSpecification())).getValue();
}
@Override
public ASTNode visitProjections(final ProjectionsContext ctx) {
List<ProjectionSegment> projections = new LinkedList<>();
if (null != ctx.top()) {
projections.add((ProjectionSegment) visit(ctx.top()));
}
for (ProjectionContext each : ctx.projection()) {
projections.add((ProjectionSegment) visit(each));
}
ProjectionsSegment result = new ProjectionsSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
result.getProjections().addAll(projections);
return result;
}
@Override
public ASTNode visitTableReferences(final TableReferencesContext ctx) {
TableSegment result = (TableSegment) visit(ctx.tableReference(0));
if (ctx.tableReference().size() > 1) {
for (int i = 1; i < ctx.tableReference().size(); i++) {
result = generateJoinTableSourceFromTableReference(ctx.tableReference(i), result);
}
}
return result;
}
private JoinTableSegment generateJoinTableSourceFromTableReference(final TableReferenceContext ctx, final TableSegment tableSegment) {
JoinTableSegment result = new JoinTableSegment();
result.setStartIndex(tableSegment.getStartIndex());
result.setStopIndex(ctx.stop.getStopIndex());
result.setLeft(tableSegment);
result.setRight((TableSegment) visit(ctx));
result.setJoinType(JoinType.COMMA.name());
return result;
}
@Override
public ASTNode visitWhereClause(final WhereClauseContext ctx) {
return new WhereSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (ExpressionSegment) visit(ctx.expr()));
}
@Override
public ASTNode visitGroupByClause(final GroupByClauseContext ctx) {
Collection<OrderByItemSegment> items = new LinkedList<>();
for (OrderByItemContext each : ctx.orderByItem()) {
items.add((OrderByItemSegment) visit(each));
}
return new GroupBySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), items);
}
/**
* Get original text.
*
* @param ctx context
* @return original text
*/
protected String getOriginalText(final ParserRuleContext ctx) {
return ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
}
@Override
public ASTNode visitInsert(final InsertContext ctx) {
SQLServerInsertStatement result;
if (null != ctx.insertDefaultValue()) {
result = (SQLServerInsertStatement) visit(ctx.insertDefaultValue());
} else if (null != ctx.insertValuesClause()) {
result = (SQLServerInsertStatement) visit(ctx.insertValuesClause());
} else if (null != ctx.insertExecClause()) {
result = (SQLServerInsertStatement) visit(ctx.insertExecClause());
} else {
result = (SQLServerInsertStatement) visit(ctx.insertSelectClause());
}
if (null != ctx.withClause()) {
result.setWithSegment((WithSegment) visit(ctx.withClause()));
}
if (null != ctx.withTableHint()) {
result.setWithTableHintSegment((WithTableHintSegment) visit(ctx.withTableHint()));
}
result.setTable((SimpleTableSegment) visit(ctx.tableName()));
result.addParameterMarkerSegments(getParameterMarkerSegments());
return result;
}
@Override
@Override
public ASTNode visitTableHintLimited(final TableHintLimitedContext ctx) {
TableHintLimitedSegment result = new TableHintLimitedSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex());
result.setValue(ctx.getText());
return result;
}
@Override
public ASTNode visitInsertDefaultValue(final InsertDefaultValueContext ctx) {
SQLServerInsertStatement result = new SQLServerInsertStatement();
result.setInsertColumns(createInsertColumns(ctx.columnNames(), ctx.start.getStartIndex()));
if (null != ctx.outputClause()) {
result.setOutputSegment((OutputSegment) visit(ctx.outputClause()));
}
return result;
}
@Override
public ASTNode visitInsertExecClause(final InsertExecClauseContext ctx) {
SQLServerInsertStatement result = new SQLServerInsertStatement();
result.setInsertColumns(createInsertColumns(ctx.columnNames(), ctx.start.getStartIndex()));
result.setExecSegment((ExecSegment) visit(ctx.exec()));
return result;
}
@Override
public ASTNode visitExec(final ExecContext ctx) {
ExecSegment result = new ExecSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
if (null != ctx.procedureName()) {
result.setProcedureName((FunctionNameSegment) visitProcedureName(ctx.procedureName()));
}
if (null != ctx.expr()) {
Collection<ExpressionSegment> items = new LinkedList<>();
for (ExprContext each : ctx.expr()) {
items.add((ExpressionSegment) visit(each));
}
result.getExpressionSegments().addAll(items);
}
return result;
}
@Override
public ASTNode visitProcedureName(final ProcedureNameContext ctx) {
FunctionNameSegment result = new FunctionNameSegment(ctx.name().start.getStartIndex(), ctx.name().stop.getStopIndex(), (IdentifierValue) visit(ctx.name()));
if (null != ctx.owner()) {
result.setOwner(new OwnerSegment(ctx.owner().start.getStartIndex(), ctx.owner().stop.getStopIndex(), (IdentifierValue) visit(ctx.owner())));
}
return result;
}
@Override
public ASTNode visitOutputClause(final OutputClauseContext ctx) {
OutputSegment result = new OutputSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex());
if (null != ctx.outputWithColumns()) {
OutputWithColumnsContext outputWithColumnsContext = ctx.outputWithColumns();
List<OutputWithColumnContext> outputWithColumnContexts = outputWithColumnsContext.outputWithColumn();
Collection<ColumnProjectionSegment> outputColumns = new LinkedList<>();
for (OutputWithColumnContext each : outputWithColumnContexts) {
ColumnSegment column = new ColumnSegment(each.start.getStartIndex(), each.stop.getStopIndex(), new IdentifierValue(each.name().getText()));
ColumnProjectionSegment outputColumn = new ColumnProjectionSegment(column);
if (null != each.alias()) {
outputColumn.setAlias(new AliasSegment(each.alias().start.getStartIndex(), each.alias().stop.getStopIndex(), new IdentifierValue(each.name().getText())));
}
outputColumns.add(outputColumn);
}
result.getOutputColumns().addAll(outputColumns);
}
if (null != ctx.outputTableName()) {
OutputTableNameContext outputTableNameContext = ctx.outputTableName();
TableNameSegment tableName = new TableNameSegment(outputTableNameContext.start.getStartIndex(),
outputTableNameContext.stop.getStopIndex(), new IdentifierValue(outputTableNameContext.getText()));
result.setTableName(tableName);
if (null != ctx.columnNames()) {
ColumnNamesContext columnNames = ctx.columnNames();
CollectionValue<ColumnSegment> columns = (CollectionValue<ColumnSegment>) visit(columnNames);
result.getTableColumns().addAll(columns.getValue());
}
}
return result;
}
@Override
public ASTNode visitInsertValuesClause(final InsertValuesClauseContext ctx) {
SQLServerInsertStatement result = new SQLServerInsertStatement();
result.setInsertColumns(createInsertColumns(ctx.columnNames(), ctx.start.getStartIndex()));
result.getValues().addAll(createInsertValuesSegments(ctx.assignmentValues()));
if (null != ctx.outputClause()) {
result.setOutputSegment((OutputSegment) visit(ctx.outputClause()));
}
return result;
}
private Collection<InsertValuesSegment> createInsertValuesSegments(final Collection<AssignmentValuesContext> assignmentValuesContexts) {
Collection<InsertValuesSegment> result = new LinkedList<>();
for (AssignmentValuesContext each : assignmentValuesContexts) {
result.add((InsertValuesSegment) visit(each));
}
return result;
}
@Override
public ASTNode visitInsertSelectClause(final InsertSelectClauseContext ctx) {
SQLServerInsertStatement result = new SQLServerInsertStatement();
result.setInsertColumns(createInsertColumns(ctx.columnNames(), ctx.start.getStartIndex()));
result.setInsertSelect(createInsertSelectSegment(ctx));
if (null != ctx.outputClause()) {
result.setOutputSegment((OutputSegment) visit(ctx.outputClause()));
}
return result;
}
@SuppressWarnings("unchecked")
private InsertColumnsSegment createInsertColumns(final ColumnNamesContext columnNames, final int startIndex) {
if (null == columnNames) {
return new InsertColumnsSegment(startIndex - 1, startIndex - 1, Collections.emptyList());
} else {
CollectionValue<ColumnSegment> columnSegments = (CollectionValue<ColumnSegment>) visit(columnNames);
return new InsertColumnsSegment(columnNames.start.getStartIndex(), columnNames.stop.getStopIndex(), columnSegments.getValue());
}
}
private SubquerySegment createInsertSelectSegment(final InsertSelectClauseContext ctx) {
SQLServerSelectStatement selectStatement = (SQLServerSelectStatement) visit(ctx.select());
return new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement, getOriginalText(ctx.select()));
}
@Override
public ASTNode visitWithClause(final WithClauseContext ctx) {
Collection<CommonTableExpressionSegment> commonTableExpressionSegments = getCommonTableExpressionSegmentsUsingCteClauseSet(ctx.cteClauseSet());
return new WithSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), commonTableExpressionSegments);
}
@Override
public ASTNode visitUpdate(final UpdateContext ctx) {
SQLServerUpdateStatement result = new SQLServerUpdateStatement();
if (null != ctx.withClause()) {
result.setWithSegment((WithSegment) visit(ctx.withClause()));
}
result.setTable((TableSegment) visit(ctx.tableReferences()));
result.setSetAssignment((SetAssignmentSegment) visit(ctx.setAssignmentsClause()));
if (null != ctx.whereClause()) {
result.setWhere((WhereSegment) visit(ctx.whereClause()));
}
result.addParameterMarkerSegments(getParameterMarkerSegments());
return result;
}
@Override
public ASTNode visitSetAssignmentsClause(final SetAssignmentsClauseContext ctx) {
Collection<ColumnAssignmentSegment> assignments = new LinkedList<>();
for (AssignmentContext each : ctx.assignment()) {
assignments.add((ColumnAssignmentSegment) visit(each));
}
return new SetAssignmentSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), assignments);
}
@Override
public ASTNode visitAssignmentValues(final AssignmentValuesContext ctx) {
List<ExpressionSegment> segments = new LinkedList<>();
for (AssignmentValueContext each : ctx.assignmentValue()) {
segments.add((ExpressionSegment) visit(each));
}
return new InsertValuesSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), segments);
}
@Override
public ASTNode visitAssignment(final AssignmentContext ctx) {
ColumnSegment column = (ColumnSegment) visitColumnName(ctx.columnName());
List<ColumnSegment> columnSegments = new LinkedList<>();
columnSegments.add(column);
ExpressionSegment value = (ExpressionSegment) visit(ctx.assignmentValue());
ColumnAssignmentSegment result = new ColumnAssignmentSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnSegments, value);
result.getColumns().add(column);
return result;
}
@Override
public ASTNode visitAssignmentValue(final AssignmentValueContext ctx) {
ExprContext expr = ctx.expr();
if (null != expr) {
return visit(expr);
}
return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getText());
}
@Override
public ASTNode visitDelete(final DeleteContext ctx) {
SQLServerDeleteStatement result = new SQLServerDeleteStatement();
if (null != ctx.withClause()) {
result.setWithSegment((WithSegment) visit(ctx.withClause()));
}
if (null != ctx.multipleTablesClause()) {
result.setTable((TableSegment) visit(ctx.multipleTablesClause()));
} else {
result.setTable((TableSegment) visit(ctx.singleTableClause()));
}
if (null != ctx.outputClause()) {
result.setOutputSegment((OutputSegment) visit(ctx.outputClause()));
}
if (null != ctx.whereClause()) {
result.setWhere((WhereSegment) visit(ctx.whereClause()));
}
result.addParameterMarkerSegments(getParameterMarkerSegments());
return result;
}
@Override
public ASTNode visitSingleTableClause(final SingleTableClauseContext ctx) {
SimpleTableSegment result = (SimpleTableSegment) visit(ctx.tableName());
if (null != ctx.alias()) {
result.setAlias((AliasSegment) visit(ctx.alias()));
}
return result;
}
@Override
public ASTNode visitMultipleTablesClause(final MultipleTablesClauseContext ctx) {
DeleteMultiTableSegment result = new DeleteMultiTableSegment();
TableSegment relateTableSource = (TableSegment) visit(ctx.tableReferences());
result.setRelationTable(relateTableSource);
result.setActualDeleteTables(generateTablesFromTableMultipleTableNames(ctx.multipleTableNames()));
return result;
}
private List<SimpleTableSegment> generateTablesFromTableMultipleTableNames(final MultipleTableNamesContext ctx) {
List<SimpleTableSegment> result = new LinkedList<>();
for (TableNameContext each : ctx.tableName()) {
result.add((SimpleTableSegment) visit(each));
}
return result;
}
@Override
public ASTNode visitDuplicateSpecification(final DuplicateSpecificationContext ctx) {
return new BooleanLiteralValue(null != ctx.DISTINCT());
}
@Override
public ASTNode visitProjection(final ProjectionContext ctx) {
if (null != ctx.qualifiedShorthand()) {
QualifiedShorthandContext shorthand = ctx.qualifiedShorthand();
ShorthandProjectionSegment result = new ShorthandProjectionSegment(shorthand.getStart().getStartIndex(), shorthand.getStop().getStopIndex());
IdentifierValue identifier = new IdentifierValue(shorthand.identifier().getText());
result.setOwner(new OwnerSegment(shorthand.identifier().getStart().getStartIndex(), shorthand.identifier().getStop().getStopIndex(), identifier));
return result;
}
if (null != ctx.unqualifiedShorthand()) {
return new ShorthandProjectionSegment(ctx.unqualifiedShorthand().getStart().getStartIndex(), ctx.unqualifiedShorthand().getStop().getStopIndex());
}
AliasSegment alias = null == ctx.alias() ? null : (AliasSegment) visit(ctx.alias());
if (null != ctx.columnName()) {
ColumnSegment column = (ColumnSegment) visit(ctx.columnName());
ColumnProjectionSegment result = new ColumnProjectionSegment(column);
result.setAlias(alias);
return result;
}
return createProjection(ctx, alias);
}
@Override
public ASTNode visitTop(final TopContext ctx) {
int startIndex = ctx.topNum().getStart().getStartIndex();
int stopIndex = ctx.topNum().getStop().getStopIndex();
ASTNode topNum = visit(ctx.topNum());
if (topNum instanceof NumberLiteralValue) {
NumberLiteralRowNumberValueSegment rowNumberSegment = new NumberLiteralRowNumberValueSegment(startIndex, stopIndex, ((NumberLiteralValue) topNum).getValue().longValue(), false);
return new TopProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), rowNumberSegment, null != ctx.alias() ? ctx.alias().getText() : null);
}
ParameterMarkerSegment parameterSegment = new ParameterMarkerRowNumberValueSegment(startIndex, stopIndex, ((ParameterMarkerValue) topNum).getValue(), false);
parameterMarkerSegments.add(parameterSegment);
return new TopProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (RowNumberValueSegment) parameterSegment, null != ctx.alias() ? ctx.alias().getText() : null);
}
@Override
public ASTNode visitAlias(final AliasContext ctx) {
if (null != ctx.identifier()) {
return new AliasSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
}
return new AliasSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(ctx.STRING_().getText()));
}
private ASTNode createProjection(final ProjectionContext ctx, final AliasSegment alias) {
ASTNode projection = visit(ctx.expr());
if (projection instanceof AggregationProjectionSegment) {
((AggregationProjectionSegment) projection).setAlias(alias);
return projection;
}
if (projection instanceof ExpressionProjectionSegment) {
((ExpressionProjectionSegment) projection).setAlias(alias);
return projection;
}
if (projection instanceof FunctionSegment) {
FunctionSegment segment = (FunctionSegment) projection;
ExpressionProjectionSegment result = new ExpressionProjectionSegment(segment.getStartIndex(), segment.getStopIndex(), segment.getText(), segment);
result.setAlias(alias);
return result;
}
if (projection instanceof CommonExpressionSegment) {
CommonExpressionSegment segment = (CommonExpressionSegment) projection;
ExpressionProjectionSegment result = new ExpressionProjectionSegment(segment.getStartIndex(), segment.getStopIndex(), segment.getText(), segment);
result.setAlias(alias);
return result;
}
if (projection instanceof ColumnSegment) {
ColumnProjectionSegment result = new ColumnProjectionSegment((ColumnSegment) projection);
result.setAlias(alias);
return result;
}
if (projection instanceof SubqueryExpressionSegment) {
SubqueryExpressionSegment subqueryExpressionSegment = (SubqueryExpressionSegment) projection;
String text = ctx.start.getInputStream().getText(new Interval(subqueryExpressionSegment.getStartIndex(), subqueryExpressionSegment.getStopIndex()));
SubqueryProjectionSegment result = new SubqueryProjectionSegment(((SubqueryExpressionSegment) projection).getSubquery(), text);
result.setAlias(alias);
return result;
}
if (projection instanceof BinaryOperationExpression) {
BinaryOperationExpression binaryExpression = (BinaryOperationExpression) projection;
int startIndex = getStartIndexWithAlias(binaryExpression, alias);
int stopIndex = getStopIndexWithAlias(binaryExpression, alias);
ExpressionProjectionSegment result = new ExpressionProjectionSegment(startIndex, stopIndex, binaryExpression.getText(), binaryExpression);
result.setAlias(alias);
return result;
}
if (projection instanceof ParameterMarkerExpressionSegment) {
ParameterMarkerExpressionSegment result = (ParameterMarkerExpressionSegment) projection;
result.setAlias(alias);
return projection;
}
LiteralExpressionSegment column = (LiteralExpressionSegment) projection;
ExpressionProjectionSegment result = new ExpressionProjectionSegment(getStartIndexWithAlias(column, alias), getStopIndexWithAlias(column, alias), String.valueOf(column.getLiterals()), column);
result.setAlias(alias);
return result;
}
private int getStartIndexWithAlias(final SQLSegment sqlSegment, final AliasSegment alias) {
return null != alias && alias.getStartIndex() < sqlSegment.getStartIndex() ? alias.getStartIndex() : sqlSegment.getStartIndex();
}
private int getStopIndexWithAlias(final SQLSegment sqlSegment, final AliasSegment alias) {
return null != alias && alias.getStopIndex() > sqlSegment.getStopIndex() ? alias.getStopIndex() : sqlSegment.getStopIndex();
}
@Override
public ASTNode visitFromClause(final FromClauseContext ctx) {
return visit(ctx.tableReferences());
}
@Override
public ASTNode visitTableReference(final TableReferenceContext ctx) {
TableSegment result;
TableSegment left;
left = (TableSegment) visit(ctx.tableFactor());
if (!ctx.joinedTable().isEmpty()) {
for (JoinedTableContext each : ctx.joinedTable()) {
left = visitJoinedTable(each, left);
}
}
result = left;
return result;
}
@Override
public ASTNode visitTableFactor(final TableFactorContext ctx) {
if (null != ctx.subquery()) {
SQLServerSelectStatement subquery = (SQLServerSelectStatement) visit(ctx.subquery());
SubquerySegment subquerySegment = new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), subquery, getOriginalText(ctx.subquery()));
SubqueryTableSegment result = new SubqueryTableSegment(subquerySegment);
if (null != ctx.alias()) {
result.setAlias((AliasSegment) visit(ctx.alias()));
}
return result;
}
if (null != ctx.tableName()) {
SimpleTableSegment result = (SimpleTableSegment) visit(ctx.tableName());
if (null != ctx.alias()) {
result.setAlias((AliasSegment) visit(ctx.alias()));
}
return result;
}
if (null != ctx.expr()) {
ExpressionSegment exprSegment = (ExpressionSegment) visit(ctx.expr());
FunctionTableSegment result = new FunctionTableSegment(exprSegment.getStartIndex(), exprSegment.getStopIndex(), exprSegment);
if (null != ctx.alias()) {
result.setAlias((AliasSegment) visit(ctx.alias()));
}
return result;
}
return visit(ctx.tableReferences());
}
private JoinTableSegment visitJoinedTable(final JoinedTableContext ctx, final TableSegment tableSegment) {
JoinTableSegment result = new JoinTableSegment();
result.setLeft(tableSegment);
result.setStartIndex(tableSegment.getStartIndex());
result.setStopIndex(ctx.stop.getStopIndex());
TableSegment right = (TableSegment) visit(ctx.tableFactor());
result.setRight(right);
result.setJoinType(getJoinType(ctx));
if (null != ctx.joinSpecification()) {
visitJoinSpecification(ctx.joinSpecification(), result);
}
return result;
}
private String getJoinType(final JoinedTableContext ctx) {
if (null != ctx.LEFT()) {
return JoinType.LEFT.name();
} else if (null != ctx.RIGHT()) {
return JoinType.RIGHT.name();
} else if (null != ctx.FULL()) {
return JoinType.FULL.name();
} else if (null != ctx.INNER()) {
return JoinType.INNER.name();
} else if (null != ctx.CROSS()) {
return JoinType.CROSS.name();
}
return JoinType.INNER.name();
}
private void visitJoinSpecification(final JoinSpecificationContext ctx, final JoinTableSegment joinTableSource) {
if (null != ctx.expr()) {
ExpressionSegment condition = (ExpressionSegment) visit(ctx.expr());
joinTableSource.setCondition(condition);
}
if (null != ctx.USING()) {
joinTableSource.setUsing(ctx.columnNames().columnName().stream().map(each -> (ColumnSegment) visit(each)).collect(Collectors.toList()));
}
}
@Override
public ASTNode visitSubquery(final SubqueryContext ctx) {
return visit(ctx.aggregationClause());
}
@Override
public ASTNode visitCreateTableAsSelectClause(final CreateTableAsSelectClauseContext ctx) {
SQLServerCreateTableStatement result = new SQLServerCreateTableStatement();
if (null != ctx.createTableAsSelect()) {
result.setTable((SimpleTableSegment) visit(ctx.createTableAsSelect().tableName()));
result.setSelectStatement((SQLServerSelectStatement) visit(ctx.createTableAsSelect().select()));
if (null != ctx.createTableAsSelect().columnNames()) {
CollectionValue<ColumnSegment> columnSegments = (CollectionValue<ColumnSegment>) visit(ctx.createTableAsSelect().columnNames());
for (ColumnSegment each : columnSegments.getValue()) {
result.getColumns().add(each);
}
}
} else {
result.setTable((SimpleTableSegment) visit(ctx.createRemoteTableAsSelect().tableName()));
result.setSelectStatement((SQLServerSelectStatement) visit(ctx.createRemoteTableAsSelect().select()));
}
return result;
}
@Override
public ASTNode visitUpdateStatistics(final UpdateStatisticsContext ctx) {
SQLServerUpdateStatisticsStatement result = new SQLServerUpdateStatisticsStatement();
if (null != ctx.tableName()) {
result.setTable((SimpleTableSegment) visit(ctx.tableName()));
}
if (null != ctx.indexName() && ctx.indexName().size() > 0) {
List<IndexSegment> indexSegments = new LinkedList<>();
for (IndexNameContext indexNameContext : ctx.indexName()) {
indexSegments.add((IndexSegment) visit(indexNameContext));
}
result.setIndexes(indexSegments);
}
if (null != ctx.statisticsWithClause()) {
result.setStrategy((StatisticsStrategySegment) visit(ctx.statisticsWithClause()));
}
return result;
}
@Override
public ASTNode visitStatisticsWithClause(final StatisticsWithClauseContext ctx) {
StatisticsStrategySegment result = new StatisticsStrategySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
if (null != ctx.sampleOption()) {
result.setSampleOption((SampleOptionSegment) visit(ctx.sampleOption()));
}
if (null != ctx.statisticsOptions()) {
result.setStatisticsOptions((StatisticsOptionSegment) visit(ctx.statisticsOptions()));
}
return result;
}
@Override
public ASTNode visitStatisticsOption(final StatisticsOptionContext ctx) {
return super.visitStatisticsOption(ctx);
}
@Override
public ASTNode visitSampleOption(final SampleOptionContext ctx) {
SampleOptionSegment result = new SampleOptionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
if (null != ctx.FULLSCAN()) {
result.setStrategy(SampleStrategy.FULLSCAN);
} else if (null != ctx.SAMPLE()) {
result.setStrategy(SampleStrategy.SAMPLE);
if (null != ctx.NUMBER_()) {
List<TerminalNode> number = ctx.NUMBER_();
result.setSampleNumber(number.get(0).getText());
}
if (null != ctx.PERCENT()) {
result.setScanUnit(ScanUnit.PERCENT);
} else if (null != ctx.ROWS()) {
result.setScanUnit(ScanUnit.ROWS);
}
} else if (null != ctx.RESAMPLE()) {
result.setStrategy(SampleStrategy.RESAMPLE);
if (null != ctx.NUMBER_()) {
List<String> partitions = new LinkedList<>();
for (TerminalNode terminalNode : ctx.NUMBER_()) {
partitions.add(terminalNode.getText());
}
result.setPartitions(partitions);
}
}
if (null != ctx.PERSIST_SAMPLE_PERCENT()) {
result.setPersistSamplePercent(null != ctx.ON());
}
return result;
}
@Override
public ASTNode visitStatisticsOptions(final StatisticsOptionsContext ctx) {
StatisticsOptionSegment result = new StatisticsOptionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
for (StatisticsOptionContext option : ctx.statisticsOption()) {
if (null != option.ALL()) {
result.setStatisticsDimension(StatisticsDimension.ALL);
} else if (null != option.COLUMNS()) {
result.setStatisticsDimension(StatisticsDimension.COLUMNS);
} else if (null != option.INDEX()) {
result.setStatisticsDimension(StatisticsDimension.INDEX);
}
if (null != option.NORECOMPUTE()) {
result.setNoRecompute(true);
}
if (null != option.INCREMENTAL()) {
result.setIncremental(null != option.ON());
}
if (null != option.MAXDOP()) {
result.setMaxDegreeOfParallelism(option.NUMBER_().getText());
}
if (null != option.AUTO_DROP()) {
result.setAutoDrop(null != option.ON());
}
}
return result;
}
}
|
The keySet method of org.springframework.messaging.MessageHeaders returns an unmodified Set. I have updated the copyHeaders as a map and use the putAll API.
|
protected void setCustomHeaders(MessageHeaders headers, ServiceBusMessage message) {
Set<String> copyHeaders = new HashSet<String>();
headers.forEach((key, value) -> {
copyHeaders.add(key);
});
getStringHeader(headers, copyHeaders, MessageHeaders.ID).ifPresent(message::setMessageId);
getStringHeader(headers, copyHeaders, MessageHeaders.CONTENT_TYPE).ifPresent(message::setContentType);
getStringHeader(headers, copyHeaders, MessageHeaders.REPLY_CHANNEL).ifPresent(message::setReplyTo);
getStringHeader(headers, copyHeaders, AzureHeaders.RAW_ID).ifPresent(message::setMessageId);
Optional.of(AzureHeaders.SCHEDULED_ENQUEUE_MESSAGE)
.filter(copyHeaders::remove)
.map(key -> headers.get(key, Integer.class))
.map(Duration::ofMillis)
.map(Instant.now()::plus)
.map((ins) -> OffsetDateTime.ofInstant(ins, ZoneId.systemDefault()))
.ifPresent(message::setScheduledEnqueueTime);
getStringHeader(headers, copyHeaders, MESSAGE_ID).ifPresent(message::setMessageId);
Optional.of(TIME_TO_LIVE)
.filter(copyHeaders::remove)
.map(key -> headers.get(key, Duration.class))
.ifPresent(message::setTimeToLive);
Optional.of(SCHEDULED_ENQUEUE_TIME)
.filter(copyHeaders::remove)
.map(key -> headers.get(key, Instant.class))
.map((ins) -> OffsetDateTime.ofInstant(ins, ZoneId.systemDefault()))
.ifPresent(message::setScheduledEnqueueTime);
getStringHeader(headers, copyHeaders, SESSION_ID).ifPresent(message::setSessionId);
getStringHeader(headers, copyHeaders, CORRELATION_ID).ifPresent(message::setCorrelationId);
getStringHeader(headers, copyHeaders, TO).ifPresent(message::setTo);
getStringHeader(headers, copyHeaders, REPLY_TO_SESSION_ID).ifPresent(message::setReplyToSessionId);
getStringHeader(headers, copyHeaders, PARTITION_KEY).ifPresent(message::setPartitionKey);
copyHeaders.forEach(key -> {
message.getApplicationProperties().put(key, headers.get(key).toString());
});
}
|
});
|
protected void setCustomHeaders(MessageHeaders headers, ServiceBusMessage message) {
Map<String, Object> copySpringMessageHeaders = new HashMap<String, Object>();
copySpringMessageHeaders.putAll(headers);
getAndRemove(copySpringMessageHeaders, MessageHeaders.ID, UUID.class)
.ifPresent(val -> message.setMessageId(val.toString()));
getAndRemove(copySpringMessageHeaders, MessageHeaders.CONTENT_TYPE).ifPresent(message::setContentType);
getAndRemove(copySpringMessageHeaders, MessageHeaders.REPLY_CHANNEL).ifPresent(message::setReplyTo);
getAndRemove(copySpringMessageHeaders, AzureHeaders.RAW_ID).ifPresent(val -> {
message.setMessageId(val);
logOverriddenHeaders(AzureHeaders.RAW_ID, MessageHeaders.ID, headers);
});
getAndRemove(copySpringMessageHeaders, AzureHeaders.SCHEDULED_ENQUEUE_MESSAGE, Integer.class)
.map(Duration::ofMillis)
.map(Instant.now()::plus)
.map((ins) -> OffsetDateTime.ofInstant(ins, ZoneId.systemDefault()))
.ifPresent(message::setScheduledEnqueueTime);
getAndRemove(copySpringMessageHeaders, MESSAGE_ID).ifPresent(val -> {
message.setMessageId(val);
if (!logOverriddenHeaders(MESSAGE_ID, AzureHeaders.RAW_ID, headers)) {
logOverriddenHeaders(MESSAGE_ID, MessageHeaders.ID, headers);
}
});
getAndRemove(copySpringMessageHeaders, TIME_TO_LIVE, Duration.class).ifPresent(message::setTimeToLive);
getAndRemove(copySpringMessageHeaders, SCHEDULED_ENQUEUE_TIME, Instant.class)
.map((ins) -> OffsetDateTime.ofInstant(ins, ZoneId.systemDefault()))
.ifPresent(val -> {
message.setScheduledEnqueueTime(val);
logOverriddenHeaders(SCHEDULED_ENQUEUE_TIME, AzureHeaders.SCHEDULED_ENQUEUE_MESSAGE, headers);
});
getAndRemove(copySpringMessageHeaders, SESSION_ID).ifPresent(message::setSessionId);
getAndRemove(copySpringMessageHeaders, CORRELATION_ID).ifPresent(message::setCorrelationId);
getAndRemove(copySpringMessageHeaders, TO).ifPresent(message::setTo);
getAndRemove(copySpringMessageHeaders, REPLY_TO_SESSION_ID).ifPresent(message::setReplyToSessionId);
getAndRemove(copySpringMessageHeaders, PARTITION_KEY).ifPresent(message::setPartitionKey);
copySpringMessageHeaders.forEach((key, value) -> {
message.getApplicationProperties().put(key, value.toString());
});
}
|
class ServiceBusMessageConverter
extends AbstractAzureMessageConverter<ServiceBusReceivedMessage, ServiceBusMessage> {
private final ObjectMapper objectMapper;
public ServiceBusMessageConverter() {
objectMapper = OBJECT_MAPPER;
}
public ServiceBusMessageConverter(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}
@Override
protected ObjectMapper getObjectMapper() {
return objectMapper;
}
@Override
protected byte[] getPayload(ServiceBusReceivedMessage azureMessage) {
final BinaryData body = azureMessage.getBody();
return body == null ? null : body.toBytes();
}
@Override
protected ServiceBusMessage fromString(String payload) {
return new ServiceBusMessage(payload);
}
@Override
protected ServiceBusMessage fromByte(byte[] payload) {
return new ServiceBusMessage(payload);
}
@Override
@Override
protected Map<String, Object> buildCustomHeaders(ServiceBusReceivedMessage message) {
Map<String, Object> headers = new HashMap<>();
setValueIfHasText(headers, MessageHeaders.ID, message.getMessageId());
setValueIfHasText(headers, MessageHeaders.CONTENT_TYPE, message.getContentType());
setValueIfHasText(headers, MessageHeaders.REPLY_CHANNEL, message.getReplyTo());
setValueIfHasText(headers, AzureHeaders.RAW_ID, message.getMessageId());
setValueIfHasText(headers, CORRELATION_ID, message.getCorrelationId());
setValueIfHasText(headers, MESSAGE_ID, message.getMessageId());
setValueIfHasText(headers, PARTITION_KEY, message.getPartitionKey());
setValueIfHasText(headers, TO, message.getTo());
setValueIfPresent(headers, TIME_TO_LIVE, message.getTimeToLive());
setValueIfPresent(headers, SCHEDULED_ENQUEUE_TIME, message.getScheduledEnqueueTime());
setValueIfHasText(headers, REPLY_TO_SESSION_ID, message.getReplyToSessionId());
setValueIfHasText(headers, SESSION_ID, message.getSessionId());
message.getApplicationProperties().forEach((key, value) -> {
if (!headers.containsKey(key)) {
headers.put(key, value);
}
});
return Collections.unmodifiableMap(headers);
}
private Optional<String> getStringHeader(MessageHeaders springMessageHeaders, Set<String> copyHeaders, String key) {
return Optional.of(key)
.filter(copyHeaders::remove)
.map(springMessageHeaders::get)
.map(Object::toString)
.filter(StringUtils::hasText);
}
private void setValueIfHasText(Map<String, Object> map, String key, String value) {
Optional.ofNullable(value).filter(StringUtils::hasText).ifPresent(s -> map.put(key, s));
}
private void setValueIfPresent(Map<String, Object> map, String key, Object value) {
Optional.ofNullable(value).ifPresent(s -> map.put(key, s));
}
}
|
class ServiceBusMessageConverter
extends AbstractAzureMessageConverter<ServiceBusReceivedMessage, ServiceBusMessage> {
private static final Logger LOGGER = LoggerFactory.getLogger(ServiceBusMessageConverter.class);
private final ObjectMapper objectMapper;
public ServiceBusMessageConverter() {
objectMapper = OBJECT_MAPPER;
}
public ServiceBusMessageConverter(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}
@Override
protected ObjectMapper getObjectMapper() {
return objectMapper;
}
@Override
protected byte[] getPayload(ServiceBusReceivedMessage azureMessage) {
final BinaryData body = azureMessage.getBody();
return body == null ? null : body.toBytes();
}
@Override
protected ServiceBusMessage fromString(String payload) {
return new ServiceBusMessage(payload);
}
@Override
protected ServiceBusMessage fromByte(byte[] payload) {
return new ServiceBusMessage(payload);
}
@Override
@Override
protected Map<String, Object> buildCustomHeaders(ServiceBusReceivedMessage message) {
Map<String, Object> headers = new HashMap<>();
setValueIfHasText(headers, MessageHeaders.ID, message.getMessageId());
setValueIfHasText(headers, MessageHeaders.CONTENT_TYPE, message.getContentType());
setValueIfHasText(headers, MessageHeaders.REPLY_CHANNEL, message.getReplyTo());
setValueIfHasText(headers, AzureHeaders.RAW_ID, message.getMessageId());
setValueIfHasText(headers, CORRELATION_ID, message.getCorrelationId());
setValueIfHasText(headers, MESSAGE_ID, message.getMessageId());
setValueIfHasText(headers, PARTITION_KEY, message.getPartitionKey());
setValueIfHasText(headers, TO, message.getTo());
setValueIfPresent(headers, TIME_TO_LIVE, message.getTimeToLive());
setValueIfPresent(headers, SCHEDULED_ENQUEUE_TIME, message.getScheduledEnqueueTime());
setValueIfHasText(headers, REPLY_TO_SESSION_ID, message.getReplyToSessionId());
setValueIfHasText(headers, SESSION_ID, message.getSessionId());
message.getApplicationProperties().forEach((key, value) -> {
headers.putIfAbsent(key, value);
});
return Collections.unmodifiableMap(headers);
}
/**
* Get and remove the header value as {@link String} from a copy of {@link MessageHeaders} .
*
* @param copySpringMessageHeaders A copy of the original {@link MessageHeaders}.
* @param key The header key to get value.
* @return {@link Optional} of the header value.
*/
private Optional<String> getAndRemove(Map<String, Object> copySpringMessageHeaders, String key) {
return getAndRemove(copySpringMessageHeaders, key, String.class).filter(StringUtils::hasText);
}
/**
* Get and remove the header value from a copy of {@link MessageHeaders} and convert to the target type.
*
* @param copySpringMessageHeaders A copy of the original {@link MessageHeaders}.
* @param key The header key to get value.
* @param clazz The class that the header value converts to.
* @param <T> The generic type of the class.
* @return {@link Optional} of the header value.
*/
private <T> Optional<T> getAndRemove(Map<String, Object> copySpringMessageHeaders, String key, Class<T> clazz) {
return Optional.ofNullable(clazz.cast(copySpringMessageHeaders.remove(key)));
}
private Boolean logOverriddenHeaders(String currentHeader, String overriddenHeader,
MessageHeaders springMessageHeaders) {
Boolean isExisted = false;
if (springMessageHeaders.containsKey(overriddenHeader)) {
isExisted = true;
LOGGER.warn("{} header detected, usage of {} header will be overridden", currentHeader,
overriddenHeader);
}
return isExisted;
}
private void setValueIfHasText(Map<String, Object> map, String key, String value) {
Optional.ofNullable(value).filter(StringUtils::hasText).ifPresent(s -> map.put(key, s));
}
private void setValueIfPresent(Map<String, Object> map, String key, Object value) {
Optional.ofNullable(value).ifPresent(s -> map.put(key, s));
}
}
|
```suggestion Objects.equals(checkPointType, that.checkpointType) && ```
|
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CheckpointStatistics that = (CheckpointStatistics) o;
return id == that.id &&
savepoint == that.savepoint &&
triggerTimestamp == that.triggerTimestamp &&
latestAckTimestamp == that.latestAckTimestamp &&
stateSize == that.stateSize &&
duration == that.duration &&
alignmentBuffered == that.alignmentBuffered &&
numSubtasks == that.numSubtasks &&
numAckSubtasks == that.numAckSubtasks &&
status == that.status &&
Objects.equals(checkPointType, that.checkPointType) &&
Objects.equals(checkpointStatisticsPerTask, that.checkpointStatisticsPerTask);
}
|
Objects.equals(checkPointType, that.checkPointType) &&
|
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CheckpointStatistics that = (CheckpointStatistics) o;
return id == that.id &&
savepoint == that.savepoint &&
triggerTimestamp == that.triggerTimestamp &&
latestAckTimestamp == that.latestAckTimestamp &&
stateSize == that.stateSize &&
duration == that.duration &&
alignmentBuffered == that.alignmentBuffered &&
processedData == processedData &&
persistedData == that.persistedData &&
numSubtasks == that.numSubtasks &&
numAckSubtasks == that.numAckSubtasks &&
status == that.status &&
Objects.equals(checkpointType, that.checkpointType) &&
Objects.equals(checkpointStatisticsPerTask, that.checkpointStatisticsPerTask);
}
|
class CheckpointStatistics implements ResponseBody {
public static final String FIELD_NAME_ID = "id";
public static final String FIELD_NAME_STATUS = "status";
public static final String FIELD_NAME_IS_SAVEPOINT = "is_savepoint";
public static final String FIELD_NAME_TRIGGER_TIMESTAMP = "trigger_timestamp";
public static final String FIELD_NAME_LATEST_ACK_TIMESTAMP = "latest_ack_timestamp";
/**
* The accurate name of this field should be 'checkpointed_data_size',
* keep it as before to not break backwards compatibility for old web UI.
*
* @see <a href="https:
*/
public static final String FIELD_NAME_STATE_SIZE = "state_size";
public static final String FIELD_NAME_DURATION = "end_to_end_duration";
public static final String FIELD_NAME_ALIGNMENT_BUFFERED = "alignment_buffered";
public static final String FIELD_NAME_NUM_SUBTASKS = "num_subtasks";
public static final String FIELD_NAME_NUM_ACK_SUBTASKS = "num_acknowledged_subtasks";
public static final String FIELD_NAME_TASKS = "tasks";
public static final String FIELD_NAME_CHECKPOINT_TYPE = "checkpoint_type";
@JsonProperty(FIELD_NAME_ID)
private final long id;
@JsonProperty(FIELD_NAME_STATUS)
private final CheckpointStatsStatus status;
@JsonProperty(FIELD_NAME_IS_SAVEPOINT)
private final boolean savepoint;
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP)
private final long triggerTimestamp;
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP)
private final long latestAckTimestamp;
@JsonProperty(FIELD_NAME_STATE_SIZE)
private final long stateSize;
@JsonProperty(FIELD_NAME_DURATION)
private final long duration;
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED)
private final long alignmentBuffered;
@JsonProperty(FIELD_NAME_NUM_SUBTASKS)
private final int numSubtasks;
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS)
private final int numAckSubtasks;
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE)
private final CheckpointType checkPointType;
@JsonProperty(FIELD_NAME_TASKS)
@JsonSerialize(keyUsing = JobVertexIDKeySerializer.class)
private final Map<JobVertexID, TaskCheckpointStatistics> checkpointStatisticsPerTask;
@JsonCreator
private CheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long id,
@JsonProperty(FIELD_NAME_STATUS) CheckpointStatsStatus status,
@JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint,
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks,
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) CheckpointType checkPointType,
@JsonDeserialize(keyUsing = JobVertexIDKeyDeserializer.class) @JsonProperty(FIELD_NAME_TASKS) Map<JobVertexID, TaskCheckpointStatistics> checkpointStatisticsPerTask) {
this.id = id;
this.status = Preconditions.checkNotNull(status);
this.savepoint = savepoint;
this.triggerTimestamp = triggerTimestamp;
this.latestAckTimestamp = latestAckTimestamp;
this.stateSize = stateSize;
this.duration = duration;
this.alignmentBuffered = alignmentBuffered;
this.numSubtasks = numSubtasks;
this.numAckSubtasks = numAckSubtasks;
this.checkPointType = checkPointType;
this.checkpointStatisticsPerTask = Preconditions.checkNotNull(checkpointStatisticsPerTask);
}
public long getId() {
return id;
}
public CheckpointStatsStatus getStatus() {
return status;
}
public boolean isSavepoint() {
return savepoint;
}
public long getTriggerTimestamp() {
return triggerTimestamp;
}
public long getLatestAckTimestamp() {
return latestAckTimestamp;
}
public long getStateSize() {
return stateSize;
}
public long getDuration() {
return duration;
}
public int getNumSubtasks() {
return numSubtasks;
}
public int getNumAckSubtasks() {
return numAckSubtasks;
}
public CheckpointType getCheckPointType() {
return checkPointType;
}
@Nullable
public Map<JobVertexID, TaskCheckpointStatistics> getCheckpointStatisticsPerTask() {
return checkpointStatisticsPerTask;
}
@Override
@Override
public int hashCode() {
return Objects.hash(id, status, savepoint, triggerTimestamp, latestAckTimestamp, stateSize, duration, alignmentBuffered, numSubtasks, numAckSubtasks, checkPointType, checkpointStatisticsPerTask);
}
public static CheckpointStatistics generateCheckpointStatistics(AbstractCheckpointStats checkpointStats, boolean includeTaskCheckpointStatistics) {
Preconditions.checkNotNull(checkpointStats);
Map<JobVertexID, TaskCheckpointStatistics> checkpointStatisticsPerTask;
if (includeTaskCheckpointStatistics) {
Collection<TaskStateStats> taskStateStats = checkpointStats.getAllTaskStateStats();
checkpointStatisticsPerTask = new HashMap<>(taskStateStats.size());
for (TaskStateStats taskStateStat : taskStateStats) {
checkpointStatisticsPerTask.put(
taskStateStat.getJobVertexId(),
new TaskCheckpointStatistics(
checkpointStats.getCheckpointId(),
checkpointStats.getStatus(),
taskStateStat.getLatestAckTimestamp(),
taskStateStat.getStateSize(),
taskStateStat.getEndToEndDuration(checkpointStats.getTriggerTimestamp()),
0,
taskStateStat.getNumberOfSubtasks(),
taskStateStat.getNumberOfAcknowledgedSubtasks()));
}
} else {
checkpointStatisticsPerTask = Collections.emptyMap();
}
if (checkpointStats instanceof CompletedCheckpointStats) {
final CompletedCheckpointStats completedCheckpointStats = ((CompletedCheckpointStats) checkpointStats);
return new CheckpointStatistics.CompletedCheckpointStatistics(
completedCheckpointStats.getCheckpointId(),
completedCheckpointStats.getStatus(),
completedCheckpointStats.getProperties().isSavepoint(),
completedCheckpointStats.getTriggerTimestamp(),
completedCheckpointStats.getLatestAckTimestamp(),
completedCheckpointStats.getStateSize(),
completedCheckpointStats.getEndToEndDuration(),
0,
completedCheckpointStats.getNumberOfSubtasks(),
completedCheckpointStats.getNumberOfAcknowledgedSubtasks(),
completedCheckpointStats.getProperties().getCheckpointType(),
checkpointStatisticsPerTask,
completedCheckpointStats.getExternalPath(),
completedCheckpointStats.isDiscarded());
} else if (checkpointStats instanceof FailedCheckpointStats) {
final FailedCheckpointStats failedCheckpointStats = ((FailedCheckpointStats) checkpointStats);
return new CheckpointStatistics.FailedCheckpointStatistics(
failedCheckpointStats.getCheckpointId(),
failedCheckpointStats.getStatus(),
failedCheckpointStats.getProperties().isSavepoint(),
failedCheckpointStats.getTriggerTimestamp(),
failedCheckpointStats.getLatestAckTimestamp(),
failedCheckpointStats.getStateSize(),
failedCheckpointStats.getEndToEndDuration(),
0,
failedCheckpointStats.getNumberOfSubtasks(),
failedCheckpointStats.getNumberOfAcknowledgedSubtasks(),
failedCheckpointStats.getProperties().getCheckpointType(),
checkpointStatisticsPerTask,
failedCheckpointStats.getFailureTimestamp(),
failedCheckpointStats.getFailureMessage());
} else if (checkpointStats instanceof PendingCheckpointStats) {
final PendingCheckpointStats pendingCheckpointStats = ((PendingCheckpointStats) checkpointStats);
return new CheckpointStatistics.PendingCheckpointStatistics(
pendingCheckpointStats.getCheckpointId(),
pendingCheckpointStats.getStatus(),
pendingCheckpointStats.getProperties().isSavepoint(),
pendingCheckpointStats.getTriggerTimestamp(),
pendingCheckpointStats.getLatestAckTimestamp(),
pendingCheckpointStats.getStateSize(),
pendingCheckpointStats.getEndToEndDuration(),
0,
pendingCheckpointStats.getNumberOfSubtasks(),
pendingCheckpointStats.getNumberOfAcknowledgedSubtasks(),
pendingCheckpointStats.getProperties().getCheckpointType(),
checkpointStatisticsPerTask
);
} else {
throw new IllegalArgumentException("Given checkpoint stats object of type "
+ checkpointStats.getClass().getName() + " cannot be converted.");
}
}
/**
* Statistics for a completed checkpoint.
*/
public static final class CompletedCheckpointStatistics extends CheckpointStatistics {
public static final String FIELD_NAME_EXTERNAL_PATH = "external_path";
public static final String FIELD_NAME_DISCARDED = "discarded";
@JsonProperty(FIELD_NAME_EXTERNAL_PATH)
@Nullable
private final String externalPath;
@JsonProperty(FIELD_NAME_DISCARDED)
private final boolean discarded;
@JsonCreator
public CompletedCheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long id,
@JsonProperty(FIELD_NAME_STATUS) CheckpointStatsStatus status,
@JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint,
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks,
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) CheckpointType checkPointType,
@JsonDeserialize(keyUsing = JobVertexIDKeyDeserializer.class) @JsonProperty(FIELD_NAME_TASKS) Map<JobVertexID, TaskCheckpointStatistics> checkpointingStatisticsPerTask,
@JsonProperty(FIELD_NAME_EXTERNAL_PATH) @Nullable String externalPath,
@JsonProperty(FIELD_NAME_DISCARDED) boolean discarded) {
super(
id,
status,
savepoint,
triggerTimestamp,
latestAckTimestamp,
stateSize,
duration,
alignmentBuffered,
numSubtasks,
numAckSubtasks,
checkPointType,
checkpointingStatisticsPerTask);
this.externalPath = externalPath;
this.discarded = discarded;
}
@Nullable
public String getExternalPath() {
return externalPath;
}
public boolean isDiscarded() {
return discarded;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
CompletedCheckpointStatistics that = (CompletedCheckpointStatistics) o;
return discarded == that.discarded &&
Objects.equals(externalPath, that.externalPath);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), externalPath, discarded);
}
}
/**
* Statistics for a failed checkpoint.
*/
public static final class FailedCheckpointStatistics extends CheckpointStatistics {
public static final String FIELD_NAME_FAILURE_TIMESTAMP = "failure_timestamp";
public static final String FIELD_NAME_FAILURE_MESSAGE = "failure_message";
@JsonProperty(FIELD_NAME_FAILURE_TIMESTAMP)
private final long failureTimestamp;
@JsonProperty(FIELD_NAME_FAILURE_MESSAGE)
@Nullable
private final String failureMessage;
@JsonCreator
public FailedCheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long id,
@JsonProperty(FIELD_NAME_STATUS) CheckpointStatsStatus status,
@JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint,
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks,
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) CheckpointType checkPointType,
@JsonDeserialize(keyUsing = JobVertexIDKeyDeserializer.class) @JsonProperty(FIELD_NAME_TASKS) Map<JobVertexID, TaskCheckpointStatistics> checkpointingStatisticsPerTask,
@JsonProperty(FIELD_NAME_FAILURE_TIMESTAMP) long failureTimestamp,
@JsonProperty(FIELD_NAME_FAILURE_MESSAGE) @Nullable String failureMessage) {
super(
id,
status,
savepoint,
triggerTimestamp,
latestAckTimestamp,
stateSize,
duration,
alignmentBuffered,
numSubtasks,
numAckSubtasks,
checkPointType,
checkpointingStatisticsPerTask);
this.failureTimestamp = failureTimestamp;
this.failureMessage = failureMessage;
}
public long getFailureTimestamp() {
return failureTimestamp;
}
@Nullable
public String getFailureMessage() {
return failureMessage;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
FailedCheckpointStatistics that = (FailedCheckpointStatistics) o;
return failureTimestamp == that.failureTimestamp &&
Objects.equals(failureMessage, that.failureMessage);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), failureTimestamp, failureMessage);
}
}
/**
* Statistics for a pending checkpoint.
*/
public static final class PendingCheckpointStatistics extends CheckpointStatistics {
@JsonCreator
public PendingCheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long id,
@JsonProperty(FIELD_NAME_STATUS) CheckpointStatsStatus status,
@JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint,
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks,
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) CheckpointType checkPointType,
@JsonDeserialize(keyUsing = JobVertexIDKeyDeserializer.class) @JsonProperty(FIELD_NAME_TASKS) Map<JobVertexID, TaskCheckpointStatistics> checkpointingStatisticsPerTask) {
super(
id,
status,
savepoint,
triggerTimestamp,
latestAckTimestamp,
stateSize,
duration,
alignmentBuffered,
numSubtasks,
numAckSubtasks,
checkPointType,
checkpointingStatisticsPerTask);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
return true;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode());
}
}
}
|
class CheckpointStatistics implements ResponseBody {
public static final String FIELD_NAME_ID = "id";
public static final String FIELD_NAME_STATUS = "status";
public static final String FIELD_NAME_IS_SAVEPOINT = "is_savepoint";
public static final String FIELD_NAME_TRIGGER_TIMESTAMP = "trigger_timestamp";
public static final String FIELD_NAME_LATEST_ACK_TIMESTAMP = "latest_ack_timestamp";
/**
* The accurate name of this field should be 'checkpointed_data_size',
* keep it as before to not break backwards compatibility for old web UI.
*
* @see <a href="https:
*/
public static final String FIELD_NAME_STATE_SIZE = "state_size";
public static final String FIELD_NAME_DURATION = "end_to_end_duration";
public static final String FIELD_NAME_ALIGNMENT_BUFFERED = "alignment_buffered";
public static final String FIELD_NAME_PROCESSED_DATA = "processed_data";
public static final String FIELD_NAME_PERSISTED_DATA = "persisted_data";
public static final String FIELD_NAME_NUM_SUBTASKS = "num_subtasks";
public static final String FIELD_NAME_NUM_ACK_SUBTASKS = "num_acknowledged_subtasks";
public static final String FIELD_NAME_TASKS = "tasks";
public static final String FIELD_NAME_CHECKPOINT_TYPE = "checkpoint_type";
@JsonProperty(FIELD_NAME_ID)
private final long id;
@JsonProperty(FIELD_NAME_STATUS)
private final CheckpointStatsStatus status;
@JsonProperty(FIELD_NAME_IS_SAVEPOINT)
private final boolean savepoint;
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP)
private final long triggerTimestamp;
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP)
private final long latestAckTimestamp;
@JsonProperty(FIELD_NAME_STATE_SIZE)
private final long stateSize;
@JsonProperty(FIELD_NAME_DURATION)
private final long duration;
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED)
private final long alignmentBuffered;
@JsonProperty(FIELD_NAME_PROCESSED_DATA)
private final long processedData;
@JsonProperty(FIELD_NAME_PERSISTED_DATA)
private final long persistedData;
@JsonProperty(FIELD_NAME_NUM_SUBTASKS)
private final int numSubtasks;
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS)
private final int numAckSubtasks;
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE)
private final CheckpointType checkpointType;
@JsonProperty(FIELD_NAME_TASKS)
@JsonSerialize(keyUsing = JobVertexIDKeySerializer.class)
private final Map<JobVertexID, TaskCheckpointStatistics> checkpointStatisticsPerTask;
@JsonCreator
private CheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long id,
@JsonProperty(FIELD_NAME_STATUS) CheckpointStatsStatus status,
@JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint,
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData,
@JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks,
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) CheckpointType checkpointType,
@JsonDeserialize(keyUsing = JobVertexIDKeyDeserializer.class) @JsonProperty(FIELD_NAME_TASKS) Map<JobVertexID, TaskCheckpointStatistics> checkpointStatisticsPerTask) {
this.id = id;
this.status = Preconditions.checkNotNull(status);
this.savepoint = savepoint;
this.triggerTimestamp = triggerTimestamp;
this.latestAckTimestamp = latestAckTimestamp;
this.stateSize = stateSize;
this.duration = duration;
this.alignmentBuffered = alignmentBuffered;
this.processedData = processedData;
this.persistedData = persistedData;
this.numSubtasks = numSubtasks;
this.numAckSubtasks = numAckSubtasks;
this.checkpointType = Preconditions.checkNotNull(checkpointType);
this.checkpointStatisticsPerTask = Preconditions.checkNotNull(checkpointStatisticsPerTask);
}
public long getId() {
return id;
}
public CheckpointStatsStatus getStatus() {
return status;
}
public boolean isSavepoint() {
return savepoint;
}
public long getTriggerTimestamp() {
return triggerTimestamp;
}
public long getLatestAckTimestamp() {
return latestAckTimestamp;
}
public long getStateSize() {
return stateSize;
}
public long getDuration() {
return duration;
}
public int getNumSubtasks() {
return numSubtasks;
}
public int getNumAckSubtasks() {
return numAckSubtasks;
}
public CheckpointType getCheckpointType() {
return checkpointType;
}
@Nullable
public Map<JobVertexID, TaskCheckpointStatistics> getCheckpointStatisticsPerTask() {
return checkpointStatisticsPerTask;
}
@Override
@Override
public int hashCode() {
return Objects.hash(
id,
status,
savepoint,
triggerTimestamp,
latestAckTimestamp,
stateSize,
duration,
alignmentBuffered,
processedData,
persistedData,
numSubtasks,
numAckSubtasks,
checkpointType,
checkpointStatisticsPerTask);
}
public static CheckpointStatistics generateCheckpointStatistics(AbstractCheckpointStats checkpointStats, boolean includeTaskCheckpointStatistics) {
Preconditions.checkNotNull(checkpointStats);
Map<JobVertexID, TaskCheckpointStatistics> checkpointStatisticsPerTask;
if (includeTaskCheckpointStatistics) {
Collection<TaskStateStats> taskStateStats = checkpointStats.getAllTaskStateStats();
checkpointStatisticsPerTask = new HashMap<>(taskStateStats.size());
for (TaskStateStats taskStateStat : taskStateStats) {
checkpointStatisticsPerTask.put(
taskStateStat.getJobVertexId(),
new TaskCheckpointStatistics(
checkpointStats.getCheckpointId(),
checkpointStats.getStatus(),
taskStateStat.getLatestAckTimestamp(),
taskStateStat.getStateSize(),
taskStateStat.getEndToEndDuration(checkpointStats.getTriggerTimestamp()),
0,
taskStateStat.getProcessedDataStats(),
taskStateStat.getPersistedDataStats(),
taskStateStat.getNumberOfSubtasks(),
taskStateStat.getNumberOfAcknowledgedSubtasks()));
}
} else {
checkpointStatisticsPerTask = Collections.emptyMap();
}
if (checkpointStats instanceof CompletedCheckpointStats) {
final CompletedCheckpointStats completedCheckpointStats = ((CompletedCheckpointStats) checkpointStats);
return new CheckpointStatistics.CompletedCheckpointStatistics(
completedCheckpointStats.getCheckpointId(),
completedCheckpointStats.getStatus(),
completedCheckpointStats.getProperties().isSavepoint(),
completedCheckpointStats.getTriggerTimestamp(),
completedCheckpointStats.getLatestAckTimestamp(),
completedCheckpointStats.getStateSize(),
completedCheckpointStats.getEndToEndDuration(),
0,
completedCheckpointStats.getProcessedData(),
completedCheckpointStats.getPersistedData(),
completedCheckpointStats.getNumberOfSubtasks(),
completedCheckpointStats.getNumberOfAcknowledgedSubtasks(),
completedCheckpointStats.getProperties().getCheckpointType(),
checkpointStatisticsPerTask,
completedCheckpointStats.getExternalPath(),
completedCheckpointStats.isDiscarded());
} else if (checkpointStats instanceof FailedCheckpointStats) {
final FailedCheckpointStats failedCheckpointStats = ((FailedCheckpointStats) checkpointStats);
return new CheckpointStatistics.FailedCheckpointStatistics(
failedCheckpointStats.getCheckpointId(),
failedCheckpointStats.getStatus(),
failedCheckpointStats.getProperties().isSavepoint(),
failedCheckpointStats.getTriggerTimestamp(),
failedCheckpointStats.getLatestAckTimestamp(),
failedCheckpointStats.getStateSize(),
failedCheckpointStats.getEndToEndDuration(),
0,
failedCheckpointStats.getProcessedData(),
failedCheckpointStats.getPersistedData(),
failedCheckpointStats.getNumberOfSubtasks(),
failedCheckpointStats.getNumberOfAcknowledgedSubtasks(),
failedCheckpointStats.getProperties().getCheckpointType(),
checkpointStatisticsPerTask,
failedCheckpointStats.getFailureTimestamp(),
failedCheckpointStats.getFailureMessage());
} else if (checkpointStats instanceof PendingCheckpointStats) {
final PendingCheckpointStats pendingCheckpointStats = ((PendingCheckpointStats) checkpointStats);
return new CheckpointStatistics.PendingCheckpointStatistics(
pendingCheckpointStats.getCheckpointId(),
pendingCheckpointStats.getStatus(),
pendingCheckpointStats.getProperties().isSavepoint(),
pendingCheckpointStats.getTriggerTimestamp(),
pendingCheckpointStats.getLatestAckTimestamp(),
pendingCheckpointStats.getStateSize(),
pendingCheckpointStats.getEndToEndDuration(),
0,
pendingCheckpointStats.getProcessedData(),
pendingCheckpointStats.getPersistedData(),
pendingCheckpointStats.getNumberOfSubtasks(),
pendingCheckpointStats.getNumberOfAcknowledgedSubtasks(),
pendingCheckpointStats.getProperties().getCheckpointType(),
checkpointStatisticsPerTask
);
} else {
throw new IllegalArgumentException("Given checkpoint stats object of type "
+ checkpointStats.getClass().getName() + " cannot be converted.");
}
}
/**
* Statistics for a completed checkpoint.
*/
public static final class CompletedCheckpointStatistics extends CheckpointStatistics {
public static final String FIELD_NAME_EXTERNAL_PATH = "external_path";
public static final String FIELD_NAME_DISCARDED = "discarded";
@JsonProperty(FIELD_NAME_EXTERNAL_PATH)
@Nullable
private final String externalPath;
@JsonProperty(FIELD_NAME_DISCARDED)
private final boolean discarded;
@JsonCreator
public CompletedCheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long id,
@JsonProperty(FIELD_NAME_STATUS) CheckpointStatsStatus status,
@JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint,
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData,
@JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks,
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) CheckpointType checkpointType,
@JsonDeserialize(keyUsing = JobVertexIDKeyDeserializer.class) @JsonProperty(FIELD_NAME_TASKS) Map<JobVertexID, TaskCheckpointStatistics> checkpointingStatisticsPerTask,
@JsonProperty(FIELD_NAME_EXTERNAL_PATH) @Nullable String externalPath,
@JsonProperty(FIELD_NAME_DISCARDED) boolean discarded) {
super(
id,
status,
savepoint,
triggerTimestamp,
latestAckTimestamp,
stateSize,
duration,
alignmentBuffered,
processedData,
persistedData,
numSubtasks,
numAckSubtasks,
checkpointType,
checkpointingStatisticsPerTask);
this.externalPath = externalPath;
this.discarded = discarded;
}
@Nullable
public String getExternalPath() {
return externalPath;
}
public boolean isDiscarded() {
return discarded;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
CompletedCheckpointStatistics that = (CompletedCheckpointStatistics) o;
return discarded == that.discarded &&
Objects.equals(externalPath, that.externalPath);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), externalPath, discarded);
}
}
/**
* Statistics for a failed checkpoint.
*/
public static final class FailedCheckpointStatistics extends CheckpointStatistics {
public static final String FIELD_NAME_FAILURE_TIMESTAMP = "failure_timestamp";
public static final String FIELD_NAME_FAILURE_MESSAGE = "failure_message";
@JsonProperty(FIELD_NAME_FAILURE_TIMESTAMP)
private final long failureTimestamp;
@JsonProperty(FIELD_NAME_FAILURE_MESSAGE)
@Nullable
private final String failureMessage;
@JsonCreator
public FailedCheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long id,
@JsonProperty(FIELD_NAME_STATUS) CheckpointStatsStatus status,
@JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint,
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData,
@JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks,
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) CheckpointType checkpointType,
@JsonDeserialize(keyUsing = JobVertexIDKeyDeserializer.class) @JsonProperty(FIELD_NAME_TASKS) Map<JobVertexID, TaskCheckpointStatistics> checkpointingStatisticsPerTask,
@JsonProperty(FIELD_NAME_FAILURE_TIMESTAMP) long failureTimestamp,
@JsonProperty(FIELD_NAME_FAILURE_MESSAGE) @Nullable String failureMessage) {
super(
id,
status,
savepoint,
triggerTimestamp,
latestAckTimestamp,
stateSize,
duration,
alignmentBuffered,
processedData,
persistedData,
numSubtasks,
numAckSubtasks,
checkpointType,
checkpointingStatisticsPerTask);
this.failureTimestamp = failureTimestamp;
this.failureMessage = failureMessage;
}
public long getFailureTimestamp() {
return failureTimestamp;
}
@Nullable
public String getFailureMessage() {
return failureMessage;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
FailedCheckpointStatistics that = (FailedCheckpointStatistics) o;
return failureTimestamp == that.failureTimestamp &&
Objects.equals(failureMessage, that.failureMessage);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), failureTimestamp, failureMessage);
}
}
/**
* Statistics for a pending checkpoint.
*/
public static final class PendingCheckpointStatistics extends CheckpointStatistics {
@JsonCreator
public PendingCheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long id,
@JsonProperty(FIELD_NAME_STATUS) CheckpointStatsStatus status,
@JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint,
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData,
@JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks,
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) CheckpointType checkpointType,
@JsonDeserialize(keyUsing = JobVertexIDKeyDeserializer.class) @JsonProperty(FIELD_NAME_TASKS) Map<JobVertexID, TaskCheckpointStatistics> checkpointingStatisticsPerTask) {
super(
id,
status,
savepoint,
triggerTimestamp,
latestAckTimestamp,
stateSize,
duration,
alignmentBuffered,
processedData,
persistedData,
numSubtasks,
numAckSubtasks,
checkpointType,
checkpointingStatisticsPerTask);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
return true;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode());
}
}
}
|
hi, silver These two cases are meant to explain the critical scenario
|
public void dateFormat() {
Locale.setDefault(Locale.ENGLISH);
ConstantOperator testDate = ConstantOperator.createDatetime(LocalDateTime.of(2001, 1, 9, 13, 4, 5));
assertEquals("1",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%c")).getVarchar());
assertEquals("09",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%d")).getVarchar());
assertEquals("9",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%e")).getVarchar());
assertEquals("13",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%H")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%h")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%I")).getVarchar());
assertEquals("04",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%i")).getVarchar());
assertEquals("009",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%j")).getVarchar());
assertEquals("13",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%k")).getVarchar());
assertEquals("1",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%l")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%m")).getVarchar());
assertEquals("05",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%S")).getVarchar());
assertEquals("05",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%s")).getVarchar());
assertEquals("13:04:05",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%T")).getVarchar());
assertEquals("02",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%v")).getVarchar());
assertEquals("2001",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%Y")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%y")).getVarchar());
assertEquals("%",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%%")).getVarchar());
assertEquals("foo",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("foo")).getVarchar());
assertEquals("g",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%g")).getVarchar());
assertEquals("4",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%4")).getVarchar());
assertEquals("02",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%v")).getVarchar());
assertEquals("yyyy",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyy")).getVarchar());
assertEquals("20010109",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyyMMdd")).getVarchar());
assertEquals("yyyyMMdd HH:mm:ss",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyyMMdd HH:mm:ss"))
.getVarchar());
assertEquals("HH:mm:ss",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("HH:mm:ss")).getVarchar());
assertEquals("2001-01-09",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyy-MM-dd"))
.getVarchar());
assertEquals("2001-01-09 13:04:05",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyy-MM-dd HH:mm:ss"))
.getVarchar());
assertEquals("2001-01-09",
ScalarOperatorFunctions.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2001, 1, 9, 13, 4, 5)),
ConstantOperator.createVarchar("%Y-%m-%d"))
.getVarchar());
assertEquals("123000", ScalarOperatorFunctions
.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2022, 3, 13, 0, 0, 0, 123000000)),
ConstantOperator.createVarchar("%f")).getVarchar());
assertEquals("asdfafdfsçv",
ScalarOperatorFunctions.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2020, 2, 21, 13, 4, 5)),
ConstantOperator.createVarchar("asdfafdfsçv")).getVarchar());
Assert.assertNotEquals("53",
ScalarOperatorFunctions.dateFormat(ConstantOperator.createDatetime(LocalDateTime.of(2024, 12, 31, 22, 0, 0)),
ConstantOperator.createVarchar("%v")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(ConstantOperator.createDatetime(LocalDateTime.of(2024, 12, 31, 22, 0, 0)),
ConstantOperator.createVarchar("%v")).getVarchar());
Assert.assertThrows("%a not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%a")).getVarchar());
Assert.assertThrows("%b not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%b")).getVarchar());
Assert.assertThrows("%M not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%M")).getVarchar());
Assert.assertThrows("%W not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%W")).getVarchar());
Assert.assertThrows("%x not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%x")).getVarchar());
Assert.assertThrows("%w not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%w")).getVarchar());
Assert.assertThrows("%p not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%p")).getVarchar());
Assert.assertThrows("%r not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%r")).getVarchar());
Assert.assertThrows(IllegalArgumentException.class, () -> ScalarOperatorFunctions
.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2020, 2, 21, 13, 4, 5)),
ConstantOperator.createVarchar("%U")).getVarchar());
Assert.assertThrows(IllegalArgumentException.class, () -> ScalarOperatorFunctions
.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2020, 2, 21, 13, 4, 5)),
ConstantOperator.createVarchar("%X")).getVarchar());
assertTrue(ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar(""))
.isNull());
assertEquals(" ",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar(" "))
.getVarchar());
}
|
ConstantOperator.createVarchar("%v")).getVarchar());
|
public void dateFormat() {
Locale.setDefault(Locale.ENGLISH);
ConstantOperator testDate = ConstantOperator.createDatetime(LocalDateTime.of(2001, 1, 9, 13, 4, 5));
assertEquals("1",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%c")).getVarchar());
assertEquals("09",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%d")).getVarchar());
assertEquals("9",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%e")).getVarchar());
assertEquals("13",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%H")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%h")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%I")).getVarchar());
assertEquals("04",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%i")).getVarchar());
assertEquals("009",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%j")).getVarchar());
assertEquals("13",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%k")).getVarchar());
assertEquals("1",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%l")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%m")).getVarchar());
assertEquals("05",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%S")).getVarchar());
assertEquals("05",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%s")).getVarchar());
assertEquals("13:04:05",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%T")).getVarchar());
assertEquals("02",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%v")).getVarchar());
assertEquals("2001",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%Y")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%y")).getVarchar());
assertEquals("%",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%%")).getVarchar());
assertEquals("foo",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("foo")).getVarchar());
assertEquals("g",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%g")).getVarchar());
assertEquals("4",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%4")).getVarchar());
assertEquals("02",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%v")).getVarchar());
assertEquals("yyyy",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyy")).getVarchar());
assertEquals("20010109",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyyMMdd")).getVarchar());
assertEquals("yyyyMMdd HH:mm:ss",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyyMMdd HH:mm:ss"))
.getVarchar());
assertEquals("HH:mm:ss",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("HH:mm:ss")).getVarchar());
assertEquals("2001-01-09",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyy-MM-dd"))
.getVarchar());
assertEquals("2001-01-09 13:04:05",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyy-MM-dd HH:mm:ss"))
.getVarchar());
assertEquals("2001-01-09",
ScalarOperatorFunctions.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2001, 1, 9, 13, 4, 5)),
ConstantOperator.createVarchar("%Y-%m-%d"))
.getVarchar());
assertEquals("123000", ScalarOperatorFunctions
.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2022, 3, 13, 0, 0, 0, 123000000)),
ConstantOperator.createVarchar("%f")).getVarchar());
assertEquals("asdfafdfsçv",
ScalarOperatorFunctions.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2020, 2, 21, 13, 4, 5)),
ConstantOperator.createVarchar("asdfafdfsçv")).getVarchar());
Assert.assertNotEquals("53",
ScalarOperatorFunctions.dateFormat(ConstantOperator.createDatetime(LocalDateTime.of(2024, 12, 31, 22, 0, 0)),
ConstantOperator.createVarchar("%v")).getVarchar());
assertEquals("01",
ScalarOperatorFunctions.dateFormat(ConstantOperator.createDatetime(LocalDateTime.of(2024, 12, 31, 22, 0, 0)),
ConstantOperator.createVarchar("%v")).getVarchar());
Assert.assertThrows("%a not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%a")).getVarchar());
Assert.assertThrows("%b not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%b")).getVarchar());
Assert.assertThrows("%M not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%M")).getVarchar());
Assert.assertThrows("%W not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%W")).getVarchar());
Assert.assertThrows("%x not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%x")).getVarchar());
Assert.assertThrows("%w not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%w")).getVarchar());
Assert.assertThrows("%p not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%p")).getVarchar());
Assert.assertThrows("%r not supported in date format string", IllegalArgumentException.class, () ->
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%r")).getVarchar());
Assert.assertThrows(IllegalArgumentException.class, () -> ScalarOperatorFunctions
.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2020, 2, 21, 13, 4, 5)),
ConstantOperator.createVarchar("%U")).getVarchar());
Assert.assertThrows(IllegalArgumentException.class, () -> ScalarOperatorFunctions
.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2020, 2, 21, 13, 4, 5)),
ConstantOperator.createVarchar("%X")).getVarchar());
assertTrue(ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar(""))
.isNull());
assertEquals(" ",
ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar(" "))
.getVarchar());
}
|
class ScalarOperatorFunctionsTest {
private static ConstantOperator O_DT_20101102_183010;
private static ConstantOperator O_DT_20101202_023010;
private static ConstantOperator O_DT_20150323_092355;
private static ConstantOperator O_TI_10;
private static ConstantOperator O_SI_10;
private static ConstantOperator O_INT_10;
private static ConstantOperator O_FLOAT_100;
private static ConstantOperator O_DOUBLE_100;
private static ConstantOperator O_BI_100;
private static ConstantOperator O_BI_3;
private static ConstantOperator O_BI_10;
private static ConstantOperator O_BI_131;
private static ConstantOperator O_BI_NEG_3;
private static ConstantOperator O_LI_100;
private static ConstantOperator O_LI_NEG_100;
private static ConstantOperator O_DECIMAL_100;
private static ConstantOperator O_DECIMAL32P7S2_100;
private static ConstantOperator O_DECIMAL32P9S0_100;
private static ConstantOperator O_DECIMAL64P18S15_100;
private static ConstantOperator O_DECIMAL64P15S10_100;
private static ConstantOperator O_DECIMAL128P38S20_100;
private static ConstantOperator O_DECIMAL128P30S2_100;
@Before
public void setUp() throws AnalysisException {
O_DT_20101102_183010 = ConstantOperator.createDatetime(LocalDateTime.of(2010, 11, 2, 18, 30, 10));
O_DT_20101202_023010 = ConstantOperator.createDatetime(LocalDateTime.of(2010, 12, 2, 2, 30, 10));
O_DT_20150323_092355 = ConstantOperator.createDatetime(LocalDateTime.of(2015, 3, 23, 9, 23, 55));
O_TI_10 = ConstantOperator.createTinyInt((byte) 10);
O_SI_10 = ConstantOperator.createSmallInt((short) 10);
O_INT_10 = ConstantOperator.createInt(10);
O_FLOAT_100 = ConstantOperator.createFloat(100);
O_DOUBLE_100 = ConstantOperator.createFloat(100);
O_BI_100 = ConstantOperator.createBigint(100);
O_BI_3 = ConstantOperator.createBigint(3);
O_BI_10 = ConstantOperator.createBigint(10);
O_BI_131 = ConstantOperator.createBigint(131);
O_BI_NEG_3 = ConstantOperator.createBigint(-3);
O_LI_100 = ConstantOperator.createLargeInt(new BigInteger("100"));
O_LI_NEG_100 = ConstantOperator.createLargeInt(new BigInteger("-100"));
O_DECIMAL_100 = ConstantOperator.createDecimal(new BigDecimal(100), Type.DECIMALV2);
O_DECIMAL32P7S2_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL32, 7, 2));
O_DECIMAL32P9S0_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL32, 9, 0));
O_DECIMAL64P15S10_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL64, 15, 10));
O_DECIMAL64P18S15_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL64, 18, 15));
O_DECIMAL128P38S20_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 38, 20));
O_DECIMAL128P30S2_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 30, 2));
}
@Test
public void timeDiff() {
assertEquals(-2534400.0, ScalarOperatorFunctions.timeDiff(O_DT_20101102_183010, O_DT_20101202_023010).getTime(),
1);
}
@Test
public void dateDiff() {
assertEquals(-1602,
ScalarOperatorFunctions.dateDiff(O_DT_20101102_183010, O_DT_20150323_092355).getInt());
assertEquals(-1572, ScalarOperatorFunctions.dateDiff(O_DT_20101202_023010, O_DT_20150323_092355).getInt());
}
@Test
public void yearsAdd() {
assertEquals("2025-03-23T09:23:55",
ScalarOperatorFunctions.yearsAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void monthsAdd() {
assertEquals("2016-01-23T09:23:55",
ScalarOperatorFunctions.monthsAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void daysAdd() {
assertEquals("2015-04-02T09:23:55",
ScalarOperatorFunctions.daysAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void hoursAdd() {
assertEquals("2015-03-23T19:23:55",
ScalarOperatorFunctions.hoursAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void minutesAdd() {
assertEquals("2015-03-23T09:33:55",
ScalarOperatorFunctions.minutesAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void secondsAdd() {
assertEquals("2015-03-23T09:24:05",
ScalarOperatorFunctions.secondsAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void dateTrunc() {
String[][] testCases = {
{"second", "2015-03-23 09:23:55", "2015-03-23T09:23:55"},
{"minute", "2015-03-23 09:23:55", "2015-03-23T09:23"},
{"hour", "2015-03-23 09:23:55", "2015-03-23T09:00"},
{"day", "2015-03-23 09:23:55", "2015-03-23T00:00"},
{"month", "2015-03-23 09:23:55", "2015-03-01T00:00"},
{"year", "2015-03-23 09:23:55", "2015-01-01T00:00"},
{"week", "2015-01-01 09:23:55", "2014-12-29T00:00"},
{"week", "2015-03-22 09:23:55", "2015-03-16T00:00"},
{"week", "2015-03-23 09:23:55", "2015-03-23T00:00"},
{"week", "2015-03-24 09:23:55", "2015-03-23T00:00"},
{"week", "2020-02-29 09:23:55", "2020-02-24T00:00"},
{"quarter", "2015-01-01 09:23:55", "2015-01-01T00:00"},
{"quarter", "2015-03-23 09:23:55", "2015-01-01T00:00"},
{"quarter", "2015-04-01 09:23:55", "2015-04-01T00:00"},
{"quarter", "2015-05-23 09:23:55", "2015-04-01T00:00"},
{"quarter", "2015-07-01 09:23:55", "2015-07-01T00:00"},
{"quarter", "2015-07-23 09:23:55", "2015-07-01T00:00"},
{"quarter", "2015-10-01 09:23:55", "2015-10-01T00:00"},
{"quarter", "2015-11-23 09:23:55", "2015-10-01T00:00"},
{"day", "2020-01-01 09:23:55", "2020-01-01T00:00"},
{"day", "2020-02-02 09:23:55", "2020-02-02T00:00"},
{"day", "2020-03-06 09:23:55", "2020-03-06T00:00"},
{"day", "2020-04-08 09:23:55", "2020-04-08T00:00"},
{"day", "2020-05-09 09:23:55", "2020-05-09T00:00"},
{"day", "2020-11-03 09:23:55", "2020-11-03T00:00"},
{"month", "2020-01-01 09:23:55", "2020-01-01T00:00"},
{"month", "2020-02-02 09:23:55", "2020-02-01T00:00"},
{"month", "2020-03-06 09:23:55", "2020-03-01T00:00"},
{"month", "2020-04-08 09:23:55", "2020-04-01T00:00"},
{"month", "2020-05-09 09:23:55", "2020-05-01T00:00"},
{"month", "2020-11-03 09:23:55", "2020-11-01T00:00"},
{"year", "2020-01-01 09:23:55", "2020-01-01T00:00"},
{"year", "2020-02-02 09:23:55", "2020-01-01T00:00"},
{"year", "2020-03-06 09:23:55", "2020-01-01T00:00"},
{"year", "2020-04-08 09:23:55", "2020-01-01T00:00"},
{"year", "2020-05-09 09:23:55", "2020-01-01T00:00"},
{"year", "2020-11-03 09:23:55", "2020-01-01T00:00"},
{"week", "2020-01-01 09:23:55", "2019-12-30T00:00"},
{"week", "2020-02-02 09:23:55", "2020-01-27T00:00"},
{"week", "2020-03-06 09:23:55", "2020-03-02T00:00"},
{"week", "2020-04-08 09:23:55", "2020-04-06T00:00"},
{"week", "2020-05-09 09:23:55", "2020-05-04T00:00"},
{"week", "2020-11-03 09:23:55", "2020-11-02T00:00"},
{"quarter", "2020-01-01 09:23:55", "2020-01-01T00:00"},
{"quarter", "2020-02-02 09:23:55", "2020-01-01T00:00"},
{"quarter", "2020-03-06 09:23:55", "2020-01-01T00:00"},
{"quarter", "2020-04-08 09:23:55", "2020-04-01T00:00"},
{"quarter", "2020-05-09 09:23:55", "2020-04-01T00:00"},
{"quarter", "2020-11-03 09:23:55", "2020-10-01T00:00"},
};
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
for (String[] tc : testCases) {
ConstantOperator fmt = ConstantOperator.createVarchar(tc[0]);
ConstantOperator date = ConstantOperator.createDatetime(LocalDateTime.parse(tc[1], formatter));
assertEquals(tc[2],
ScalarOperatorFunctions.dateTrunc(fmt, date).getDatetime().toString());
}
Assert.assertThrows("<ERROR> not supported in date_trunc format string", IllegalArgumentException.class,
() -> ScalarOperatorFunctions.dateTrunc(ConstantOperator.createVarchar("<ERROR>"), O_DT_20150323_092355)
.getVarchar());
}
@Test
@Test
public void dateParse() {
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013,05,10"), ConstantOperator.createVarchar("%Y,%m,%d"))
.getDatetime().toString());
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar(" 2013,05,10 "),
ConstantOperator.createVarchar("%Y,%m,%d"))
.getDatetime().toString());
assertEquals("2013-05-17T12:35:10", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).getDatetime().toString());
assertEquals("2013-01-17T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-1-17"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDatetime().toString());
assertEquals("2013-12-01T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013121"),
ConstantOperator.createVarchar("%Y%m%d")).getDatetime().toString());
assertEquals("2013-05-17T12:35:10.000123", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10.000123"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s.%f")).getDatetime().toString());
assertEquals("2013-05-17T12:35:10.000001", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10.000001"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s.%f")).getDatetime().toString());
assertEquals("2013-05-17T12:35:10", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10.00000"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s.%f")).getDatetime().toString());
assertEquals("2013-05-17T00:35:10", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 00:35:10"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).getDatetime().toString());
assertEquals("2013-05-17T23:35:10", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("abc 2013-05-17 fff 23:35:10 xyz"),
ConstantOperator.createVarchar("abc %Y-%m-%d fff %H:%i:%s xyz")).getDatetime().toString());
assertEquals("2019-05-09T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2019,129"), ConstantOperator.createVarchar("%Y,%j"))
.getDatetime().toString());
assertEquals("2019-05-09T12:10:45", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("12:10:45-20190509"),
ConstantOperator.createVarchar("%T-%Y%m%d")).getDatetime().toString());
assertEquals("2019-05-09T09:10:45", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("20190509-9:10:45"),
ConstantOperator.createVarchar("%Y%m%d-%k:%i:%S")).getDatetime().toString());
assertEquals("2020-02-21 00:00:00",
ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("2020-02-21"),
ConstantOperator.createVarchar("%Y-%m-%d")).toString());
assertEquals("2020-02-21 00:00:00",
ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("20-02-21"),
ConstantOperator.createVarchar("%y-%m-%d")).toString());
assertEquals("1998-02-21 00:00:00",
ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("98-02-21"),
ConstantOperator.createVarchar("%y-%m-%d")).toString());
Assert.assertThrows(DateTimeException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("201905"),
ConstantOperator.createVarchar("%Y%m")).getDatetime());
Assert.assertThrows(DateTimeException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("20190507"),
ConstantOperator.createVarchar("%Y%m")).getDatetime());
Assert.assertThrows(DateTimeParseException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2019-02-29"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDatetime());
Assert.assertThrows(DateTimeParseException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2019-02-29 11:12:13"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).getDatetime());
Assert.assertThrows(IllegalArgumentException.class,
() -> ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("2020-2-21"),
ConstantOperator.createVarchar("%w")).getVarchar());
Assert.assertThrows(IllegalArgumentException.class,
() -> ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("2020-02-21"),
ConstantOperator.createVarchar("%w")).getVarchar());
assertEquals("2013-01-17T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("\t 2013-1-17"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDate().toString());
assertEquals("2013-01-17T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("\n 2013-1-17"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDate().toString());
assertEquals("2013-01-17T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("\r 2013-1-17"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDate().toString());
Assert.assertThrows(DateTimeParseException.class,
() -> ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("\f 2020-02-21"),
ConstantOperator.createVarchar("%Y-%m-%d")).getVarchar());
Assert.assertThrows("Unable to obtain LocalDateTime", DateTimeException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10"),
ConstantOperator.createVarchar("%Y-%m-%d %h:%i:%s")).getDatetime());
assertEquals("2022-10-18T01:02:03", ScalarOperatorFunctions.dateParse(
ConstantOperator.createVarchar("2022-10-18 01:02:03"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).
getDatetime().toString());
assertEquals("2022-10-18T01:02", ScalarOperatorFunctions.dateParse(
ConstantOperator.createVarchar("2022-10-18 01:02:03"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i")).
getDatetime().toString());
Assert.assertThrows("Unable to obtain LocalDateTime", DateTimeException.class,
() -> ScalarOperatorFunctions.dateParse(
ConstantOperator.createVarchar("2022-10-18 01:02:03"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%s")).getDatetime());
}
@Test
public void str2Date() {
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("2013,05,10"), ConstantOperator.createVarchar("%Y,%m,%d"))
.getDate().toString());
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar(" 2013,05,10 "), ConstantOperator.createVarchar("%Y,%m,%d"))
.getDate().toString());
assertEquals("2013-05-17T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("2013-05-17 12:35:10"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).getDate().toString());
assertEquals("2013-05-17T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("13-05-17 12:35:10"),
ConstantOperator.createVarchar("%y-%m-%d %H:%i:%s")).getDate().toString());
assertEquals("1998-05-17T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("98-05-17 12:35:10"),
ConstantOperator.createVarchar("%y-%m-%d %H:%i:%s")).getDate().toString());
Assert.assertThrows(DateTimeParseException.class, () -> ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("2019-02-29"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDatetime());
}
@Test
public void yearsSub() {
assertEquals("2005-03-23T09:23:55",
ScalarOperatorFunctions.yearsSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void monthsSub() {
assertEquals("2014-05-23T09:23:55",
ScalarOperatorFunctions.monthsSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void daysSub() {
assertEquals("2015-03-13T09:23:55",
ScalarOperatorFunctions.daysSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void hoursSub() {
assertEquals("2015-03-22T23:23:55",
ScalarOperatorFunctions.hoursSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void minutesSub() {
assertEquals("2015-03-23T09:13:55",
ScalarOperatorFunctions.minutesSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void secondsSub() {
assertEquals("2015-03-23T09:23:45",
ScalarOperatorFunctions.secondsSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void year() {
ConstantOperator date = ConstantOperator.createDatetime(LocalDateTime.of(2000, 10, 21, 12, 0));
ConstantOperator result = ScalarOperatorFunctions.year(date);
assertEquals(Type.SMALLINT, result.getType());
assertEquals(2000, result.getSmallint());
}
@Test
public void month() {
assertEquals(3, ScalarOperatorFunctions.month(O_DT_20150323_092355).getTinyInt());
}
@Test
public void day() {
assertEquals(23, ScalarOperatorFunctions.day(O_DT_20150323_092355).getTinyInt());
}
@Test
public void unixTimestamp() {
ConstantOperator codt = ConstantOperator.createDatetime(LocalDateTime.of(2050, 3, 23, 9, 23, 55));
assertEquals(2531611435L,
ScalarOperatorFunctions.unixTimestamp(codt).getBigint());
assertEquals(1427073835L,
ScalarOperatorFunctions.unixTimestamp(O_DT_20150323_092355).getBigint());
}
@Test
public void convert_tz() {
ConstantOperator olddt = ConstantOperator.createDatetime(LocalDateTime.of(2019, 8, 1, 13, 21, 3));
assertEquals("2019-07-31T22:21:03",
ScalarOperatorFunctions.convert_tz(olddt,
ConstantOperator.createVarchar("Asia/Shanghai"),
ConstantOperator.createVarchar("America/Los_Angeles")).getDatetime().toString());
ConstantOperator oldd = ConstantOperator.createDate(LocalDateTime.of(2019, 8, 1, 0, 0, 0));
assertEquals("2019-07-31T09:00",
ScalarOperatorFunctions.convert_tz(oldd,
ConstantOperator.createVarchar("Asia/Shanghai"),
ConstantOperator.createVarchar("America/Los_Angeles")).getDatetime().toString());
}
@Test
public void fromUnixTime() throws AnalysisException {
assertEquals("1970-01-01 08:00:10",
ScalarOperatorFunctions.fromUnixTime(O_BI_10).getVarchar());
}
@Test
public void curDate() {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
LocalDateTime now = LocalDateTime.of(LocalDate.now(), LocalTime.of(0, 0, 0));
assertEquals(now, ScalarOperatorFunctions.curDate().getDate());
}
@Test
public void nextDay() {
assertEquals("2015-03-29T09:23:55", ScalarOperatorFunctions.nextDay(O_DT_20150323_092355,
ConstantOperator.createVarchar("Sunday")).getDate().toString());
Assert.assertThrows("undefine_dow not supported in next_day dow_string", IllegalArgumentException.class,
() -> ScalarOperatorFunctions.nextDay(O_DT_20150323_092355, ConstantOperator.createVarchar("undefine_dow"))
.getVarchar());
}
@Test
public void previousDay() {
assertEquals("2015-03-22T09:23:55", ScalarOperatorFunctions.previousDay(O_DT_20150323_092355,
ConstantOperator.createVarchar("Sunday")).getDate().toString());
Assert.assertThrows("undefine_dow not supported in previous_day dow_string", IllegalArgumentException.class,
() -> ScalarOperatorFunctions.previousDay(O_DT_20150323_092355, ConstantOperator.createVarchar("undefine_dow"))
.getVarchar());
}
@Test
public void makeDate() {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createNull(Type.INT),
ConstantOperator.createNull(Type.INT)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createNull(Type.INT),
ConstantOperator.createInt(1)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(1),
ConstantOperator.createNull(Type.INT)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(0)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(367)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(-1), ConstantOperator.createInt(1)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(10000), ConstantOperator.createInt(1)));
assertEquals(ConstantOperator.createDate(LocalDateTime.of(2000, 1, 1, 0, 0, 0)),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(1)));
assertEquals(ConstantOperator.createDate(LocalDateTime.of(2000, 12, 31, 0, 0, 0)),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(366)));
assertEquals(ConstantOperator.createDate(LocalDateTime.of(0, 1, 1, 0, 0, 0)),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(0), ConstantOperator.createInt(1)));
}
@Test
public void timeSlice() throws AnalysisException {
class Param {
final LocalDateTime dateTime;
final int interval;
final String unit;
final String boundary;
LocalDateTime expect;
String e;
public Param(LocalDateTime dateTime, int interval, String unit, LocalDateTime expect) {
this(dateTime, interval, unit, "floor", expect);
}
private Param(LocalDateTime dateTime, int interval, String unit, String boundary, LocalDateTime expect) {
this.dateTime = dateTime;
this.interval = interval;
this.unit = unit;
this.boundary = boundary;
this.expect = expect;
}
private Param(LocalDateTime dateTime, int interval, String unit, String boundary, String e) {
this.dateTime = dateTime;
this.interval = interval;
this.unit = unit;
this.boundary = boundary;
this.e = e;
}
}
List<Param> cases = Arrays.asList(
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "second", LocalDateTime.of(0001, 1, 1, 21, 22, 50)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "second", LocalDateTime.of(0001, 3, 2, 14, 17, 25)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "second", LocalDateTime.of(0001, 5, 6, 11, 54, 20)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "second", LocalDateTime.of(2022, 7, 8, 9, 13, 15)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "second", LocalDateTime.of(2022, 9, 9, 8, 8, 15)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "second", LocalDateTime.of(2022, 11, 3, 23, 41, 35)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "minute", LocalDateTime.of(0001, 1, 1, 21, 20, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "minute", LocalDateTime.of(0001, 3, 2, 14, 15, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "minute", LocalDateTime.of(0001, 5, 6, 11, 50, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "minute", LocalDateTime.of(2022, 7, 8, 9, 10, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "minute", LocalDateTime.of(2022, 9, 9, 8, 5, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "minute", LocalDateTime.of(2022, 11, 3, 23, 40, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "hour", LocalDateTime.of(0001, 1, 1, 20, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "hour", LocalDateTime.of(0001, 3, 2, 10, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "hour", LocalDateTime.of(0001, 5, 6, 10, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "hour", LocalDateTime.of(2022, 7, 8, 8, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "hour", LocalDateTime.of(2022, 9, 9, 6, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "hour", LocalDateTime.of(2022, 11, 3, 21, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "day", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "day", LocalDateTime.of(0001, 3, 2, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "day", LocalDateTime.of(0001, 5, 6, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "day", LocalDateTime.of(2022, 7, 5, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "day", LocalDateTime.of(2022, 9, 8, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "day", LocalDateTime.of(2022, 11, 2, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "month", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "month", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "month", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "month", LocalDateTime.of(2022, 4, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "month", LocalDateTime.of(2022, 9, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "month", LocalDateTime.of(2022, 9, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "year", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "year", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "year", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "year", LocalDateTime.of(2021, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "year", LocalDateTime.of(2021, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "year", LocalDateTime.of(2021, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "week", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "week", LocalDateTime.of(0001, 2, 5, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "week", LocalDateTime.of(0001, 4, 16, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "week", LocalDateTime.of(2022, 6, 20, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "week", LocalDateTime.of(2022, 8, 29, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "week", LocalDateTime.of(2022, 10, 3, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "quarter", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "quarter", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "quarter", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "quarter", LocalDateTime.of(2022, 4, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "quarter", LocalDateTime.of(2022, 4, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "quarter", LocalDateTime.of(2022, 4, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "second", "ceil", LocalDateTime.of(0001, 1, 1, 21, 22,
55)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "second", "ceil", LocalDateTime.of(0001, 3, 2, 14, 17,
30)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "second", "ceil", LocalDateTime.of(0001, 5, 6, 11, 54,
25)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "second", "ceil", LocalDateTime.of(2022, 7, 8, 9, 13,
20)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "second", "ceil", LocalDateTime.of(2022, 9, 9, 8, 8,
20)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "second", "ceil", LocalDateTime.of(2022, 11, 3, 23, 41,
40)),
new Param(LocalDateTime.of(0000, 01, 01, 00, 00, 00), 5, "hour", "floor",
"time used with time_slice can't before 0001-01-01 00:00:00"),
new Param(LocalDateTime.of(2023, 12, 31, 03, 12, 00), 2147483647, "minute", "floor",
LocalDateTime.of(0001, 01, 01, 00, 00, 00))
);
for (Param testCase : cases) {
try {
ConstantOperator result = ScalarOperatorFunctions.timeSlice(
ConstantOperator.createDatetime(testCase.dateTime),
ConstantOperator.createInt(testCase.interval),
ConstantOperator.createVarchar(testCase.unit),
ConstantOperator.createVarchar(testCase.boundary)
);
if (testCase.expect != null) {
assertEquals(testCase.expect, result.getDatetime());
} else {
Assert.fail();
}
} catch (AnalysisException e) {
assertTrue(e.getMessage().contains(testCase.e));
}
}
}
@Test
public void floor() {
assertEquals(100, ScalarOperatorFunctions.floor(O_FLOAT_100).getBigint());
}
@Test
public void addSmallInt() {
assertEquals(20,
ScalarOperatorFunctions.addSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void addInt() {
assertEquals(20,
ScalarOperatorFunctions.addInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void addBigInt() {
assertEquals(200, ScalarOperatorFunctions.addBigInt(O_BI_100, O_BI_100).getBigint());
}
@Test
public void addLargeInt() {
assertEquals("200",
ScalarOperatorFunctions.addLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void addDouble() {
assertEquals(200.0,
ScalarOperatorFunctions.addDouble(O_DOUBLE_100, O_DOUBLE_100).getDouble(), 1);
}
@Test
public void addDecimal() {
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toPlainString());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal()
.toPlainString());
assertTrue(
ScalarOperatorFunctions.addDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getType().isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal()
.toPlainString());
assertTrue(
ScalarOperatorFunctions.addDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getType().isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.addDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getType()
.isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.addDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getType()
.isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.addDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getType()
.isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.addDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void subtractSmallInt() {
assertEquals(0,
ScalarOperatorFunctions.subtractSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void subtractInt() {
assertEquals(0,
ScalarOperatorFunctions.subtractInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void subtractBigInt() {
assertEquals(0, ScalarOperatorFunctions.subtractBigInt(O_BI_100, O_BI_100).getBigint());
}
@Test
public void subtractDouble() {
assertEquals(0.0,
ScalarOperatorFunctions.subtractDouble(O_DOUBLE_100, O_DOUBLE_100).getDouble(), 1);
}
@Test
public void subtractDecimal() {
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toString());
assertTrue(ScalarOperatorFunctions.subtractDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void subtractLargeInt() {
assertEquals("0",
ScalarOperatorFunctions.subtractLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void multiplySmallInt() {
assertEquals(100,
ScalarOperatorFunctions.multiplySmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void multiplyInt() {
assertEquals(100,
ScalarOperatorFunctions.multiplyInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void multiplyBigInt() {
assertEquals(10000,
ScalarOperatorFunctions.multiplyBigInt(O_BI_100, O_BI_100).getBigint());
}
@Test
public void multiplyDouble() {
assertEquals(10000.0,
ScalarOperatorFunctions.multiplyDouble(O_DOUBLE_100, O_DOUBLE_100).getDouble(), 1);
}
@Test
public void multiplyDecimal() {
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void multiplyLargeInt() {
assertEquals("10000",
ScalarOperatorFunctions.multiplyLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void divideDouble() {
assertEquals(1.0,
ScalarOperatorFunctions.divideDouble(O_DOUBLE_100, O_DOUBLE_100).getDouble(), 1);
}
@Test
public void divideDecimal() {
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toString());
assertTrue(ScalarOperatorFunctions.divideDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void intDivideTinyInt() {
assertEquals(1, ScalarOperatorFunctions.intDivideTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void intDivideSmallInt() {
assertEquals(1, ScalarOperatorFunctions.intDivideSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void intDivideInt() {
assertEquals(1, ScalarOperatorFunctions.intDivideInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void intDivide() {
assertEquals(33, ScalarOperatorFunctions.intDivideBigint(O_BI_100, O_BI_3).getBigint());
}
@Test
public void intDivideLargeInt() {
assertEquals("1", ScalarOperatorFunctions.intDivideLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void modTinyInt() {
assertEquals(0, ScalarOperatorFunctions.modTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void modSMALLINT() {
assertEquals(0, ScalarOperatorFunctions.modSMALLINT(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void modInt() {
assertEquals(0, ScalarOperatorFunctions.modInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void modBigInt() {
assertEquals(0, ScalarOperatorFunctions.modBigInt(O_BI_100, O_BI_100).getBigint());
}
@Test
public void modLargeInt() {
assertEquals("0", ScalarOperatorFunctions.modLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void modDecimal() {
assertEquals("0", ScalarOperatorFunctions.modDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal().toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal().toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toString());
assertTrue(ScalarOperatorFunctions.modDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void bitandTinyInt() {
assertEquals(10, ScalarOperatorFunctions.bitandTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void bitandSmallInt() {
assertEquals(10, ScalarOperatorFunctions.bitandSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void bitandInt() {
assertEquals(10, ScalarOperatorFunctions.bitandInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void bitandBigint() {
assertEquals(100, ScalarOperatorFunctions.bitandBigint(O_BI_100, O_BI_100).getBigint());
}
@Test
public void bitandLargeInt() {
assertEquals("100", ScalarOperatorFunctions.bitandLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void bitorTinyInt() {
assertEquals(10, ScalarOperatorFunctions.bitorTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void bitorSmallInt() {
assertEquals(10, ScalarOperatorFunctions.bitorSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void bitorInt() {
assertEquals(10, ScalarOperatorFunctions.bitorInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void bitorBigint() {
assertEquals(100, ScalarOperatorFunctions.bitorBigint(O_BI_100, O_BI_100).getBigint());
}
@Test
public void bitorLargeInt() {
assertEquals("100", ScalarOperatorFunctions.bitorLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void bitxorTinyInt() {
assertEquals(0, ScalarOperatorFunctions.bitxorTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void bitxorSmallInt() {
assertEquals(0, ScalarOperatorFunctions.bitxorSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void bitxorInt() {
assertEquals(0, ScalarOperatorFunctions.bitxorInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void bitxorBigint() {
assertEquals(0, ScalarOperatorFunctions.bitxorBigint(O_BI_100, O_BI_100).getBigint());
}
@Test
public void bitxorLargeInt() {
assertEquals("0", ScalarOperatorFunctions.bitxorLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void bitShiftLeftTinyInt() {
assertEquals(80, ScalarOperatorFunctions.bitShiftLeftTinyInt(O_TI_10, O_BI_3).getTinyInt());
}
@Test
public void bitShiftLeftSmallInt() {
assertEquals(80, ScalarOperatorFunctions.bitShiftLeftSmallInt(O_SI_10, O_BI_3).getSmallint());
}
@Test
public void bitShiftLeftInt() {
assertEquals(80, ScalarOperatorFunctions.bitShiftLeftInt(O_INT_10, O_BI_3).getInt());
}
@Test
public void bitShiftLeftBigint() {
assertEquals(800, ScalarOperatorFunctions.bitShiftLeftBigint(O_BI_100, O_BI_3).getBigint());
}
@Test
public void bitShiftLeftLargeInt() {
assertEquals("800", ScalarOperatorFunctions.bitShiftLeftLargeInt(O_LI_100, O_BI_3).getLargeInt().toString());
}
@Test
public void bitShiftRightTinyInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightTinyInt(O_TI_10, O_BI_3).getTinyInt());
}
@Test
public void bitShiftRightSmallInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightSmallInt(O_SI_10, O_BI_3).getSmallint());
}
@Test
public void bitShiftRightInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightInt(O_INT_10, O_BI_3).getInt());
}
@Test
public void bitShiftRightBigint() {
assertEquals(12, ScalarOperatorFunctions.bitShiftRightBigint(O_BI_100, O_BI_3).getBigint());
}
@Test
public void bitShiftRightLargeInt() {
assertEquals("12", ScalarOperatorFunctions.bitShiftRightLargeInt(O_LI_100, O_BI_3).getLargeInt().toString());
}
@Test
public void bitShiftRightLogicalTinyInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightLogicalTinyInt(O_TI_10, O_BI_3).getTinyInt());
}
@Test
public void bitShiftRightLogicalSmallInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightLogicalSmallInt(O_SI_10, O_BI_3).getSmallint());
}
@Test
public void bitShiftRightLogicalInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightLogicalInt(O_INT_10, O_BI_3).getInt());
}
@Test
public void bitShiftRightLogicalBigint() {
assertEquals(12, ScalarOperatorFunctions.bitShiftRightLogicalBigint(O_BI_100, O_BI_3).getBigint());
}
@Test
public void bitShiftRightLogicalLargeInt() {
assertEquals("12",
ScalarOperatorFunctions.bitShiftRightLogicalLargeInt(O_LI_100, O_BI_3).getLargeInt().toString());
assertEquals("800",
ScalarOperatorFunctions.bitShiftRightLogicalLargeInt(O_LI_100, O_BI_NEG_3).getLargeInt().toString());
assertEquals("12",
ScalarOperatorFunctions.bitShiftRightLogicalLargeInt(O_LI_100, O_BI_131).getLargeInt().toString());
assertEquals("42535295865117307932921825928971026419",
ScalarOperatorFunctions.bitShiftRightLogicalLargeInt(O_LI_NEG_100, O_BI_3).getLargeInt().toString());
}
@Test
public void concat() {
ConstantOperator[] arg = {ConstantOperator.createVarchar("1"),
ConstantOperator.createVarchar("2"),
ConstantOperator.createVarchar("3")};
ConstantOperator result = ScalarOperatorFunctions.concat(arg);
assertEquals(Type.VARCHAR, result.getType());
assertEquals("123", result.getVarchar());
}
@Test
public void concat_ws() {
ConstantOperator[] arg = {ConstantOperator.createVarchar("1"),
ConstantOperator.createVarchar("2"),
ConstantOperator.createVarchar("3")};
ConstantOperator result = ScalarOperatorFunctions.concat_ws(ConstantOperator.createVarchar(","), arg);
assertEquals(Type.VARCHAR, result.getType());
assertEquals("1,2,3", result.getVarchar());
}
@Test
public void concat_ws_with_null() {
ConstantOperator[] argWithNull = {ConstantOperator.createVarchar("star"),
ConstantOperator.createNull(Type.VARCHAR),
ConstantOperator.createVarchar("cks")};
ConstantOperator result =
ScalarOperatorFunctions.concat_ws(ConstantOperator.createVarchar("ro"), argWithNull);
assertEquals(Type.VARCHAR, result.getType());
assertEquals("starrocks", result.getVarchar());
result = ScalarOperatorFunctions.concat_ws(ConstantOperator.createVarchar(","),
ConstantOperator.createNull(Type.VARCHAR));
assertEquals("", result.getVarchar());
ConstantOperator[] argWithoutNull = {ConstantOperator.createVarchar("star"),
ConstantOperator.createVarchar("cks")};
result = ScalarOperatorFunctions.concat_ws(ConstantOperator.createNull(Type.VARCHAR), argWithoutNull);
assertTrue(result.isNull());
}
@Test
public void fromUnixTime2() throws AnalysisException {
ConstantOperator date =
ScalarOperatorFunctions.fromUnixTime(O_BI_10, ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s"));
assertTrue(date.toString().matches("1970-01-01 0.*:00:10"));
}
@Test
public void testNonDeterministicFuncComp() {
CallOperator random = new CallOperator(FunctionSet.RANDOM, Type.DOUBLE, Lists.newArrayList());
CallOperator randomCopy = (CallOperator) random.clone();
assertEquals(random, randomCopy);
}
@Test
public void testUTCTimestamp() {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
LocalDateTime expected = Instant.ofEpochMilli(ctx.getStartTime() / 1000 * 1000)
.atZone(ZoneOffset.UTC).toLocalDateTime();
assertEquals(expected, ScalarOperatorFunctions.utcTimestamp().getDatetime());
}
@Test
public void testNow() {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
LocalDateTime expected = Instant.ofEpochMilli(ctx.getStartTime() / 1000 * 1000)
.atZone(TimeUtils.getTimeZone().toZoneId()).toLocalDateTime();
assertEquals(expected, ScalarOperatorFunctions.now().getDatetime());
}
@Test
public void testNowWithParameter() throws AnalysisException {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
LocalDateTime expected = ctx.getStartTimeInstant().atZone(TimeUtils.getTimeZone().toZoneId()).toLocalDateTime();
assertEquals(expected, ScalarOperatorFunctions.now(new ConstantOperator(6, Type.INT)).getDatetime());
}
@Test
public void testSubString() {
assertEquals("ab", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(1), ConstantOperator.createInt(2)).getVarchar());
assertEquals("abcd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(1)).getVarchar());
assertEquals("cd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(-2)).getVarchar());
assertEquals("c", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(-2), ConstantOperator.createInt(1)).getVarchar());
assertEquals("abcd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(1), ConstantOperator.createInt(4)).getVarchar());
assertEquals("abcd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(1), ConstantOperator.createInt(10)).getVarchar());
assertEquals("cd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(3), ConstantOperator.createInt(4)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(0), ConstantOperator.createInt(2)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(5), ConstantOperator.createInt(2)).getVarchar());
assertEquals("starrocks", ScalarOperatorFunctions.substring(
new ConstantOperator("starrockscluster", Type.VARCHAR),
new ConstantOperator(1, Type.INT),
new ConstantOperator(9, Type.INT)).getVarchar());
assertEquals("rocks", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(-5, Type.INT),
new ConstantOperator(5, Type.INT)).getVarchar());
assertEquals("s", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(-1, Type.INT),
new ConstantOperator(8, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(-100, Type.INT),
new ConstantOperator(5, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(0, Type.INT),
new ConstantOperator(5, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(-1, Type.INT),
new ConstantOperator(0, Type.INT)).getVarchar());
assertEquals("apple", ScalarOperatorFunctions.substring(
new ConstantOperator("apple", Type.VARCHAR),
new ConstantOperator(-5, Type.INT),
new ConstantOperator(5, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(0, Type.INT)).getVarchar());
assertEquals("starrocks", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(1, Type.INT)).getVarchar());
assertEquals("s", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(9, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(10, Type.INT)).getVarchar());
}
@Test
public void testUrlExtractParameter() {
assertEquals("100", ScalarOperatorFunctions.urlExtractParameter(
new ConstantOperator("https:
new ConstantOperator("k1", Type.VARCHAR)
).getVarchar());
assertEquals(ScalarOperatorFunctions.urlExtractParameter(
new ConstantOperator("1234i5", Type.VARCHAR),
new ConstantOperator("k1", Type.VARCHAR)),
ConstantOperator.createNull(Type.VARCHAR));
assertEquals(ScalarOperatorFunctions.urlExtractParameter(
new ConstantOperator("https:
new ConstantOperator("k3", Type.VARCHAR)),
ConstantOperator.createNull(Type.VARCHAR));
}
}
|
class ScalarOperatorFunctionsTest {
private static ConstantOperator O_DT_20101102_183010;
private static ConstantOperator O_DT_20101202_023010;
private static ConstantOperator O_DT_20150323_092355;
private static ConstantOperator O_TI_10;
private static ConstantOperator O_SI_10;
private static ConstantOperator O_INT_10;
private static ConstantOperator O_FLOAT_100;
private static ConstantOperator O_DOUBLE_100;
private static ConstantOperator O_BI_100;
private static ConstantOperator O_BI_3;
private static ConstantOperator O_BI_10;
private static ConstantOperator O_BI_131;
private static ConstantOperator O_BI_NEG_3;
private static ConstantOperator O_LI_100;
private static ConstantOperator O_LI_NEG_100;
private static ConstantOperator O_DECIMAL_100;
private static ConstantOperator O_DECIMAL32P7S2_100;
private static ConstantOperator O_DECIMAL32P9S0_100;
private static ConstantOperator O_DECIMAL64P18S15_100;
private static ConstantOperator O_DECIMAL64P15S10_100;
private static ConstantOperator O_DECIMAL128P38S20_100;
private static ConstantOperator O_DECIMAL128P30S2_100;
@Before
public void setUp() throws AnalysisException {
O_DT_20101102_183010 = ConstantOperator.createDatetime(LocalDateTime.of(2010, 11, 2, 18, 30, 10));
O_DT_20101202_023010 = ConstantOperator.createDatetime(LocalDateTime.of(2010, 12, 2, 2, 30, 10));
O_DT_20150323_092355 = ConstantOperator.createDatetime(LocalDateTime.of(2015, 3, 23, 9, 23, 55));
O_TI_10 = ConstantOperator.createTinyInt((byte) 10);
O_SI_10 = ConstantOperator.createSmallInt((short) 10);
O_INT_10 = ConstantOperator.createInt(10);
O_FLOAT_100 = ConstantOperator.createFloat(100);
O_DOUBLE_100 = ConstantOperator.createFloat(100);
O_BI_100 = ConstantOperator.createBigint(100);
O_BI_3 = ConstantOperator.createBigint(3);
O_BI_10 = ConstantOperator.createBigint(10);
O_BI_131 = ConstantOperator.createBigint(131);
O_BI_NEG_3 = ConstantOperator.createBigint(-3);
O_LI_100 = ConstantOperator.createLargeInt(new BigInteger("100"));
O_LI_NEG_100 = ConstantOperator.createLargeInt(new BigInteger("-100"));
O_DECIMAL_100 = ConstantOperator.createDecimal(new BigDecimal(100), Type.DECIMALV2);
O_DECIMAL32P7S2_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL32, 7, 2));
O_DECIMAL32P9S0_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL32, 9, 0));
O_DECIMAL64P15S10_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL64, 15, 10));
O_DECIMAL64P18S15_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL64, 18, 15));
O_DECIMAL128P38S20_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 38, 20));
O_DECIMAL128P30S2_100 = ConstantOperator.createDecimal(new BigDecimal(100),
ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 30, 2));
}
@Test
public void timeDiff() {
assertEquals(-2534400.0, ScalarOperatorFunctions.timeDiff(O_DT_20101102_183010, O_DT_20101202_023010).getTime(),
1);
}
@Test
public void dateDiff() {
assertEquals(-1602,
ScalarOperatorFunctions.dateDiff(O_DT_20101102_183010, O_DT_20150323_092355).getInt());
assertEquals(-1572, ScalarOperatorFunctions.dateDiff(O_DT_20101202_023010, O_DT_20150323_092355).getInt());
}
@Test
public void yearsAdd() {
assertEquals("2025-03-23T09:23:55",
ScalarOperatorFunctions.yearsAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void monthsAdd() {
assertEquals("2016-01-23T09:23:55",
ScalarOperatorFunctions.monthsAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void daysAdd() {
assertEquals("2015-04-02T09:23:55",
ScalarOperatorFunctions.daysAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void hoursAdd() {
assertEquals("2015-03-23T19:23:55",
ScalarOperatorFunctions.hoursAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void minutesAdd() {
assertEquals("2015-03-23T09:33:55",
ScalarOperatorFunctions.minutesAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void secondsAdd() {
assertEquals("2015-03-23T09:24:05",
ScalarOperatorFunctions.secondsAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void dateTrunc() {
String[][] testCases = {
{"second", "2015-03-23 09:23:55", "2015-03-23T09:23:55"},
{"minute", "2015-03-23 09:23:55", "2015-03-23T09:23"},
{"hour", "2015-03-23 09:23:55", "2015-03-23T09:00"},
{"day", "2015-03-23 09:23:55", "2015-03-23T00:00"},
{"month", "2015-03-23 09:23:55", "2015-03-01T00:00"},
{"year", "2015-03-23 09:23:55", "2015-01-01T00:00"},
{"week", "2015-01-01 09:23:55", "2014-12-29T00:00"},
{"week", "2015-03-22 09:23:55", "2015-03-16T00:00"},
{"week", "2015-03-23 09:23:55", "2015-03-23T00:00"},
{"week", "2015-03-24 09:23:55", "2015-03-23T00:00"},
{"week", "2020-02-29 09:23:55", "2020-02-24T00:00"},
{"quarter", "2015-01-01 09:23:55", "2015-01-01T00:00"},
{"quarter", "2015-03-23 09:23:55", "2015-01-01T00:00"},
{"quarter", "2015-04-01 09:23:55", "2015-04-01T00:00"},
{"quarter", "2015-05-23 09:23:55", "2015-04-01T00:00"},
{"quarter", "2015-07-01 09:23:55", "2015-07-01T00:00"},
{"quarter", "2015-07-23 09:23:55", "2015-07-01T00:00"},
{"quarter", "2015-10-01 09:23:55", "2015-10-01T00:00"},
{"quarter", "2015-11-23 09:23:55", "2015-10-01T00:00"},
{"day", "2020-01-01 09:23:55", "2020-01-01T00:00"},
{"day", "2020-02-02 09:23:55", "2020-02-02T00:00"},
{"day", "2020-03-06 09:23:55", "2020-03-06T00:00"},
{"day", "2020-04-08 09:23:55", "2020-04-08T00:00"},
{"day", "2020-05-09 09:23:55", "2020-05-09T00:00"},
{"day", "2020-11-03 09:23:55", "2020-11-03T00:00"},
{"month", "2020-01-01 09:23:55", "2020-01-01T00:00"},
{"month", "2020-02-02 09:23:55", "2020-02-01T00:00"},
{"month", "2020-03-06 09:23:55", "2020-03-01T00:00"},
{"month", "2020-04-08 09:23:55", "2020-04-01T00:00"},
{"month", "2020-05-09 09:23:55", "2020-05-01T00:00"},
{"month", "2020-11-03 09:23:55", "2020-11-01T00:00"},
{"year", "2020-01-01 09:23:55", "2020-01-01T00:00"},
{"year", "2020-02-02 09:23:55", "2020-01-01T00:00"},
{"year", "2020-03-06 09:23:55", "2020-01-01T00:00"},
{"year", "2020-04-08 09:23:55", "2020-01-01T00:00"},
{"year", "2020-05-09 09:23:55", "2020-01-01T00:00"},
{"year", "2020-11-03 09:23:55", "2020-01-01T00:00"},
{"week", "2020-01-01 09:23:55", "2019-12-30T00:00"},
{"week", "2020-02-02 09:23:55", "2020-01-27T00:00"},
{"week", "2020-03-06 09:23:55", "2020-03-02T00:00"},
{"week", "2020-04-08 09:23:55", "2020-04-06T00:00"},
{"week", "2020-05-09 09:23:55", "2020-05-04T00:00"},
{"week", "2020-11-03 09:23:55", "2020-11-02T00:00"},
{"quarter", "2020-01-01 09:23:55", "2020-01-01T00:00"},
{"quarter", "2020-02-02 09:23:55", "2020-01-01T00:00"},
{"quarter", "2020-03-06 09:23:55", "2020-01-01T00:00"},
{"quarter", "2020-04-08 09:23:55", "2020-04-01T00:00"},
{"quarter", "2020-05-09 09:23:55", "2020-04-01T00:00"},
{"quarter", "2020-11-03 09:23:55", "2020-10-01T00:00"},
};
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
for (String[] tc : testCases) {
ConstantOperator fmt = ConstantOperator.createVarchar(tc[0]);
ConstantOperator date = ConstantOperator.createDatetime(LocalDateTime.parse(tc[1], formatter));
assertEquals(tc[2],
ScalarOperatorFunctions.dateTrunc(fmt, date).getDatetime().toString());
}
Assert.assertThrows("<ERROR> not supported in date_trunc format string", IllegalArgumentException.class,
() -> ScalarOperatorFunctions.dateTrunc(ConstantOperator.createVarchar("<ERROR>"), O_DT_20150323_092355)
.getVarchar());
}
@Test
@Test
public void dateParse() {
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013,05,10"), ConstantOperator.createVarchar("%Y,%m,%d"))
.getDatetime().toString());
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar(" 2013,05,10 "),
ConstantOperator.createVarchar("%Y,%m,%d"))
.getDatetime().toString());
assertEquals("2013-05-17T12:35:10", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).getDatetime().toString());
assertEquals("2013-01-17T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-1-17"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDatetime().toString());
assertEquals("2013-12-01T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013121"),
ConstantOperator.createVarchar("%Y%m%d")).getDatetime().toString());
assertEquals("2013-05-17T12:35:10.000123", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10.000123"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s.%f")).getDatetime().toString());
assertEquals("2013-05-17T12:35:10.000001", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10.000001"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s.%f")).getDatetime().toString());
assertEquals("2013-05-17T12:35:10", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10.00000"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s.%f")).getDatetime().toString());
assertEquals("2013-05-17T00:35:10", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 00:35:10"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).getDatetime().toString());
assertEquals("2013-05-17T23:35:10", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("abc 2013-05-17 fff 23:35:10 xyz"),
ConstantOperator.createVarchar("abc %Y-%m-%d fff %H:%i:%s xyz")).getDatetime().toString());
assertEquals("2019-05-09T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2019,129"), ConstantOperator.createVarchar("%Y,%j"))
.getDatetime().toString());
assertEquals("2019-05-09T12:10:45", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("12:10:45-20190509"),
ConstantOperator.createVarchar("%T-%Y%m%d")).getDatetime().toString());
assertEquals("2019-05-09T09:10:45", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("20190509-9:10:45"),
ConstantOperator.createVarchar("%Y%m%d-%k:%i:%S")).getDatetime().toString());
assertEquals("2020-02-21 00:00:00",
ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("2020-02-21"),
ConstantOperator.createVarchar("%Y-%m-%d")).toString());
assertEquals("2020-02-21 00:00:00",
ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("20-02-21"),
ConstantOperator.createVarchar("%y-%m-%d")).toString());
assertEquals("1998-02-21 00:00:00",
ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("98-02-21"),
ConstantOperator.createVarchar("%y-%m-%d")).toString());
Assert.assertThrows(DateTimeException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("201905"),
ConstantOperator.createVarchar("%Y%m")).getDatetime());
Assert.assertThrows(DateTimeException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("20190507"),
ConstantOperator.createVarchar("%Y%m")).getDatetime());
Assert.assertThrows(DateTimeParseException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2019-02-29"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDatetime());
Assert.assertThrows(DateTimeParseException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2019-02-29 11:12:13"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).getDatetime());
Assert.assertThrows(IllegalArgumentException.class,
() -> ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("2020-2-21"),
ConstantOperator.createVarchar("%w")).getVarchar());
Assert.assertThrows(IllegalArgumentException.class,
() -> ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("2020-02-21"),
ConstantOperator.createVarchar("%w")).getVarchar());
assertEquals("2013-01-17T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("\t 2013-1-17"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDate().toString());
assertEquals("2013-01-17T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("\n 2013-1-17"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDate().toString());
assertEquals("2013-01-17T00:00", ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("\r 2013-1-17"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDate().toString());
Assert.assertThrows(DateTimeParseException.class,
() -> ScalarOperatorFunctions.dateParse(ConstantOperator.createVarchar("\f 2020-02-21"),
ConstantOperator.createVarchar("%Y-%m-%d")).getVarchar());
Assert.assertThrows("Unable to obtain LocalDateTime", DateTimeException.class, () -> ScalarOperatorFunctions
.dateParse(ConstantOperator.createVarchar("2013-05-17 12:35:10"),
ConstantOperator.createVarchar("%Y-%m-%d %h:%i:%s")).getDatetime());
assertEquals("2022-10-18T01:02:03", ScalarOperatorFunctions.dateParse(
ConstantOperator.createVarchar("2022-10-18 01:02:03"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).
getDatetime().toString());
assertEquals("2022-10-18T01:02", ScalarOperatorFunctions.dateParse(
ConstantOperator.createVarchar("2022-10-18 01:02:03"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i")).
getDatetime().toString());
Assert.assertThrows("Unable to obtain LocalDateTime", DateTimeException.class,
() -> ScalarOperatorFunctions.dateParse(
ConstantOperator.createVarchar("2022-10-18 01:02:03"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%s")).getDatetime());
}
@Test
public void str2Date() {
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("2013,05,10"), ConstantOperator.createVarchar("%Y,%m,%d"))
.getDate().toString());
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar(" 2013,05,10 "), ConstantOperator.createVarchar("%Y,%m,%d"))
.getDate().toString());
assertEquals("2013-05-17T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("2013-05-17 12:35:10"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).getDate().toString());
assertEquals("2013-05-17T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("13-05-17 12:35:10"),
ConstantOperator.createVarchar("%y-%m-%d %H:%i:%s")).getDate().toString());
assertEquals("1998-05-17T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("98-05-17 12:35:10"),
ConstantOperator.createVarchar("%y-%m-%d %H:%i:%s")).getDate().toString());
Assert.assertThrows(DateTimeParseException.class, () -> ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("2019-02-29"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDatetime());
}
@Test
public void yearsSub() {
assertEquals("2005-03-23T09:23:55",
ScalarOperatorFunctions.yearsSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void monthsSub() {
assertEquals("2014-05-23T09:23:55",
ScalarOperatorFunctions.monthsSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void daysSub() {
assertEquals("2015-03-13T09:23:55",
ScalarOperatorFunctions.daysSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void hoursSub() {
assertEquals("2015-03-22T23:23:55",
ScalarOperatorFunctions.hoursSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void minutesSub() {
assertEquals("2015-03-23T09:13:55",
ScalarOperatorFunctions.minutesSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void secondsSub() {
assertEquals("2015-03-23T09:23:45",
ScalarOperatorFunctions.secondsSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
@Test
public void year() {
ConstantOperator date = ConstantOperator.createDatetime(LocalDateTime.of(2000, 10, 21, 12, 0));
ConstantOperator result = ScalarOperatorFunctions.year(date);
assertEquals(Type.SMALLINT, result.getType());
assertEquals(2000, result.getSmallint());
}
@Test
public void month() {
assertEquals(3, ScalarOperatorFunctions.month(O_DT_20150323_092355).getTinyInt());
}
@Test
public void day() {
assertEquals(23, ScalarOperatorFunctions.day(O_DT_20150323_092355).getTinyInt());
}
@Test
public void unixTimestamp() {
ConstantOperator codt = ConstantOperator.createDatetime(LocalDateTime.of(2050, 3, 23, 9, 23, 55));
assertEquals(2531611435L,
ScalarOperatorFunctions.unixTimestamp(codt).getBigint());
assertEquals(1427073835L,
ScalarOperatorFunctions.unixTimestamp(O_DT_20150323_092355).getBigint());
}
@Test
public void convert_tz() {
ConstantOperator olddt = ConstantOperator.createDatetime(LocalDateTime.of(2019, 8, 1, 13, 21, 3));
assertEquals("2019-07-31T22:21:03",
ScalarOperatorFunctions.convert_tz(olddt,
ConstantOperator.createVarchar("Asia/Shanghai"),
ConstantOperator.createVarchar("America/Los_Angeles")).getDatetime().toString());
ConstantOperator oldd = ConstantOperator.createDate(LocalDateTime.of(2019, 8, 1, 0, 0, 0));
assertEquals("2019-07-31T09:00",
ScalarOperatorFunctions.convert_tz(oldd,
ConstantOperator.createVarchar("Asia/Shanghai"),
ConstantOperator.createVarchar("America/Los_Angeles")).getDatetime().toString());
}
@Test
public void fromUnixTime() throws AnalysisException {
assertEquals("1970-01-01 08:00:10",
ScalarOperatorFunctions.fromUnixTime(O_BI_10).getVarchar());
}
@Test
public void curDate() {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
LocalDateTime now = LocalDateTime.of(LocalDate.now(), LocalTime.of(0, 0, 0));
assertEquals(now, ScalarOperatorFunctions.curDate().getDate());
}
@Test
public void nextDay() {
assertEquals("2015-03-29T09:23:55", ScalarOperatorFunctions.nextDay(O_DT_20150323_092355,
ConstantOperator.createVarchar("Sunday")).getDate().toString());
Assert.assertThrows("undefine_dow not supported in next_day dow_string", IllegalArgumentException.class,
() -> ScalarOperatorFunctions.nextDay(O_DT_20150323_092355, ConstantOperator.createVarchar("undefine_dow"))
.getVarchar());
}
@Test
public void previousDay() {
assertEquals("2015-03-22T09:23:55", ScalarOperatorFunctions.previousDay(O_DT_20150323_092355,
ConstantOperator.createVarchar("Sunday")).getDate().toString());
Assert.assertThrows("undefine_dow not supported in previous_day dow_string", IllegalArgumentException.class,
() -> ScalarOperatorFunctions.previousDay(O_DT_20150323_092355, ConstantOperator.createVarchar("undefine_dow"))
.getVarchar());
}
@Test
public void makeDate() {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createNull(Type.INT),
ConstantOperator.createNull(Type.INT)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createNull(Type.INT),
ConstantOperator.createInt(1)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(1),
ConstantOperator.createNull(Type.INT)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(0)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(367)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(-1), ConstantOperator.createInt(1)));
assertEquals(ConstantOperator.createNull(Type.DATE),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(10000), ConstantOperator.createInt(1)));
assertEquals(ConstantOperator.createDate(LocalDateTime.of(2000, 1, 1, 0, 0, 0)),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(1)));
assertEquals(ConstantOperator.createDate(LocalDateTime.of(2000, 12, 31, 0, 0, 0)),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(366)));
assertEquals(ConstantOperator.createDate(LocalDateTime.of(0, 1, 1, 0, 0, 0)),
ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(0), ConstantOperator.createInt(1)));
}
@Test
public void timeSlice() throws AnalysisException {
class Param {
final LocalDateTime dateTime;
final int interval;
final String unit;
final String boundary;
LocalDateTime expect;
String e;
public Param(LocalDateTime dateTime, int interval, String unit, LocalDateTime expect) {
this(dateTime, interval, unit, "floor", expect);
}
private Param(LocalDateTime dateTime, int interval, String unit, String boundary, LocalDateTime expect) {
this.dateTime = dateTime;
this.interval = interval;
this.unit = unit;
this.boundary = boundary;
this.expect = expect;
}
private Param(LocalDateTime dateTime, int interval, String unit, String boundary, String e) {
this.dateTime = dateTime;
this.interval = interval;
this.unit = unit;
this.boundary = boundary;
this.e = e;
}
}
List<Param> cases = Arrays.asList(
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "second", LocalDateTime.of(0001, 1, 1, 21, 22, 50)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "second", LocalDateTime.of(0001, 3, 2, 14, 17, 25)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "second", LocalDateTime.of(0001, 5, 6, 11, 54, 20)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "second", LocalDateTime.of(2022, 7, 8, 9, 13, 15)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "second", LocalDateTime.of(2022, 9, 9, 8, 8, 15)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "second", LocalDateTime.of(2022, 11, 3, 23, 41, 35)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "minute", LocalDateTime.of(0001, 1, 1, 21, 20, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "minute", LocalDateTime.of(0001, 3, 2, 14, 15, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "minute", LocalDateTime.of(0001, 5, 6, 11, 50, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "minute", LocalDateTime.of(2022, 7, 8, 9, 10, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "minute", LocalDateTime.of(2022, 9, 9, 8, 5, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "minute", LocalDateTime.of(2022, 11, 3, 23, 40, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "hour", LocalDateTime.of(0001, 1, 1, 20, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "hour", LocalDateTime.of(0001, 3, 2, 10, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "hour", LocalDateTime.of(0001, 5, 6, 10, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "hour", LocalDateTime.of(2022, 7, 8, 8, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "hour", LocalDateTime.of(2022, 9, 9, 6, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "hour", LocalDateTime.of(2022, 11, 3, 21, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "day", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "day", LocalDateTime.of(0001, 3, 2, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "day", LocalDateTime.of(0001, 5, 6, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "day", LocalDateTime.of(2022, 7, 5, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "day", LocalDateTime.of(2022, 9, 8, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "day", LocalDateTime.of(2022, 11, 2, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "month", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "month", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "month", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "month", LocalDateTime.of(2022, 4, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "month", LocalDateTime.of(2022, 9, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "month", LocalDateTime.of(2022, 9, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "year", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "year", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "year", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "year", LocalDateTime.of(2021, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "year", LocalDateTime.of(2021, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "year", LocalDateTime.of(2021, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "week", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "week", LocalDateTime.of(0001, 2, 5, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "week", LocalDateTime.of(0001, 4, 16, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "week", LocalDateTime.of(2022, 6, 20, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "week", LocalDateTime.of(2022, 8, 29, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "week", LocalDateTime.of(2022, 10, 3, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "quarter", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "quarter", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "quarter", LocalDateTime.of(0001, 1, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "quarter", LocalDateTime.of(2022, 4, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "quarter", LocalDateTime.of(2022, 4, 1, 0, 0, 0)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "quarter", LocalDateTime.of(2022, 4, 1, 0, 0, 0)),
new Param(LocalDateTime.of(0001, 1, 1, 21, 22, 51), 5, "second", "ceil", LocalDateTime.of(0001, 1, 1, 21, 22,
55)),
new Param(LocalDateTime.of(0001, 3, 2, 14, 17, 28), 5, "second", "ceil", LocalDateTime.of(0001, 3, 2, 14, 17,
30)),
new Param(LocalDateTime.of(0001, 5, 6, 11, 54, 23), 5, "second", "ceil", LocalDateTime.of(0001, 5, 6, 11, 54,
25)),
new Param(LocalDateTime.of(2022, 7, 8, 9, 13, 19), 5, "second", "ceil", LocalDateTime.of(2022, 7, 8, 9, 13,
20)),
new Param(LocalDateTime.of(2022, 9, 9, 8, 8, 16), 5, "second", "ceil", LocalDateTime.of(2022, 9, 9, 8, 8,
20)),
new Param(LocalDateTime.of(2022, 11, 3, 23, 41, 37), 5, "second", "ceil", LocalDateTime.of(2022, 11, 3, 23, 41,
40)),
new Param(LocalDateTime.of(0000, 01, 01, 00, 00, 00), 5, "hour", "floor",
"time used with time_slice can't before 0001-01-01 00:00:00"),
new Param(LocalDateTime.of(2023, 12, 31, 03, 12, 00), 2147483647, "minute", "floor",
LocalDateTime.of(0001, 01, 01, 00, 00, 00))
);
for (Param testCase : cases) {
try {
ConstantOperator result = ScalarOperatorFunctions.timeSlice(
ConstantOperator.createDatetime(testCase.dateTime),
ConstantOperator.createInt(testCase.interval),
ConstantOperator.createVarchar(testCase.unit),
ConstantOperator.createVarchar(testCase.boundary)
);
if (testCase.expect != null) {
assertEquals(testCase.expect, result.getDatetime());
} else {
Assert.fail();
}
} catch (AnalysisException e) {
assertTrue(e.getMessage().contains(testCase.e));
}
}
}
@Test
public void floor() {
assertEquals(100, ScalarOperatorFunctions.floor(O_FLOAT_100).getBigint());
}
@Test
public void addSmallInt() {
assertEquals(20,
ScalarOperatorFunctions.addSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void addInt() {
assertEquals(20,
ScalarOperatorFunctions.addInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void addBigInt() {
assertEquals(200, ScalarOperatorFunctions.addBigInt(O_BI_100, O_BI_100).getBigint());
}
@Test
public void addLargeInt() {
assertEquals("200",
ScalarOperatorFunctions.addLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void addDouble() {
assertEquals(200.0,
ScalarOperatorFunctions.addDouble(O_DOUBLE_100, O_DOUBLE_100).getDouble(), 1);
}
@Test
public void addDecimal() {
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toPlainString());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal()
.toPlainString());
assertTrue(
ScalarOperatorFunctions.addDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getType().isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal()
.toPlainString());
assertTrue(
ScalarOperatorFunctions.addDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getType().isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.addDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getType()
.isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.addDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getType()
.isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.addDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getType()
.isDecimalV3());
assertEquals("200",
ScalarOperatorFunctions.addDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.addDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void subtractSmallInt() {
assertEquals(0,
ScalarOperatorFunctions.subtractSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void subtractInt() {
assertEquals(0,
ScalarOperatorFunctions.subtractInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void subtractBigInt() {
assertEquals(0, ScalarOperatorFunctions.subtractBigInt(O_BI_100, O_BI_100).getBigint());
}
@Test
public void subtractDouble() {
assertEquals(0.0,
ScalarOperatorFunctions.subtractDouble(O_DOUBLE_100, O_DOUBLE_100).getDouble(), 1);
}
@Test
public void subtractDecimal() {
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.subtractDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toString());
assertTrue(ScalarOperatorFunctions.subtractDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void subtractLargeInt() {
assertEquals("0",
ScalarOperatorFunctions.subtractLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void multiplySmallInt() {
assertEquals(100,
ScalarOperatorFunctions.multiplySmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void multiplyInt() {
assertEquals(100,
ScalarOperatorFunctions.multiplyInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void multiplyBigInt() {
assertEquals(10000,
ScalarOperatorFunctions.multiplyBigInt(O_BI_100, O_BI_100).getBigint());
}
@Test
public void multiplyDouble() {
assertEquals(10000.0,
ScalarOperatorFunctions.multiplyDouble(O_DOUBLE_100, O_DOUBLE_100).getDouble(), 1);
}
@Test
public void multiplyDecimal() {
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toPlainString());
assertEquals("10000",
ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toPlainString());
assertTrue(ScalarOperatorFunctions.multiplyDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void multiplyLargeInt() {
assertEquals("10000",
ScalarOperatorFunctions.multiplyLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void divideDouble() {
assertEquals(1.0,
ScalarOperatorFunctions.divideDouble(O_DOUBLE_100, O_DOUBLE_100).getDouble(), 1);
}
@Test
public void divideDecimal() {
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toString());
assertEquals("1",
ScalarOperatorFunctions.divideDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toString());
assertTrue(ScalarOperatorFunctions.divideDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void intDivideTinyInt() {
assertEquals(1, ScalarOperatorFunctions.intDivideTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void intDivideSmallInt() {
assertEquals(1, ScalarOperatorFunctions.intDivideSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void intDivideInt() {
assertEquals(1, ScalarOperatorFunctions.intDivideInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void intDivide() {
assertEquals(33, ScalarOperatorFunctions.intDivideBigint(O_BI_100, O_BI_3).getBigint());
}
@Test
public void intDivideLargeInt() {
assertEquals("1", ScalarOperatorFunctions.intDivideLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void modTinyInt() {
assertEquals(0, ScalarOperatorFunctions.modTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void modSMALLINT() {
assertEquals(0, ScalarOperatorFunctions.modSMALLINT(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void modInt() {
assertEquals(0, ScalarOperatorFunctions.modInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void modBigInt() {
assertEquals(0, ScalarOperatorFunctions.modBigInt(O_BI_100, O_BI_100).getBigint());
}
@Test
public void modLargeInt() {
assertEquals("0", ScalarOperatorFunctions.modLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void modDecimal() {
assertEquals("0", ScalarOperatorFunctions.modDecimal(O_DECIMAL_100, O_DECIMAL_100).getDecimal().toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL32P7S2_100, O_DECIMAL32P7S2_100).getDecimal().toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL32P9S0_100, O_DECIMAL32P9S0_100).getDecimal().toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL64P15S10_100, O_DECIMAL64P15S10_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL64P18S15_100, O_DECIMAL64P18S15_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL128P30S2_100, O_DECIMAL128P30S2_100).getDecimal()
.toString());
assertEquals("0",
ScalarOperatorFunctions.modDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getDecimal()
.toString());
assertTrue(ScalarOperatorFunctions.modDecimal(O_DECIMAL128P38S20_100, O_DECIMAL128P38S20_100).getType()
.isDecimalV3());
}
@Test
public void bitandTinyInt() {
assertEquals(10, ScalarOperatorFunctions.bitandTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void bitandSmallInt() {
assertEquals(10, ScalarOperatorFunctions.bitandSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void bitandInt() {
assertEquals(10, ScalarOperatorFunctions.bitandInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void bitandBigint() {
assertEquals(100, ScalarOperatorFunctions.bitandBigint(O_BI_100, O_BI_100).getBigint());
}
@Test
public void bitandLargeInt() {
assertEquals("100", ScalarOperatorFunctions.bitandLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void bitorTinyInt() {
assertEquals(10, ScalarOperatorFunctions.bitorTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void bitorSmallInt() {
assertEquals(10, ScalarOperatorFunctions.bitorSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void bitorInt() {
assertEquals(10, ScalarOperatorFunctions.bitorInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void bitorBigint() {
assertEquals(100, ScalarOperatorFunctions.bitorBigint(O_BI_100, O_BI_100).getBigint());
}
@Test
public void bitorLargeInt() {
assertEquals("100", ScalarOperatorFunctions.bitorLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void bitxorTinyInt() {
assertEquals(0, ScalarOperatorFunctions.bitxorTinyInt(O_TI_10, O_TI_10).getTinyInt());
}
@Test
public void bitxorSmallInt() {
assertEquals(0, ScalarOperatorFunctions.bitxorSmallInt(O_SI_10, O_SI_10).getSmallint());
}
@Test
public void bitxorInt() {
assertEquals(0, ScalarOperatorFunctions.bitxorInt(O_INT_10, O_INT_10).getInt());
}
@Test
public void bitxorBigint() {
assertEquals(0, ScalarOperatorFunctions.bitxorBigint(O_BI_100, O_BI_100).getBigint());
}
@Test
public void bitxorLargeInt() {
assertEquals("0", ScalarOperatorFunctions.bitxorLargeInt(O_LI_100, O_LI_100).getLargeInt().toString());
}
@Test
public void bitShiftLeftTinyInt() {
assertEquals(80, ScalarOperatorFunctions.bitShiftLeftTinyInt(O_TI_10, O_BI_3).getTinyInt());
}
@Test
public void bitShiftLeftSmallInt() {
assertEquals(80, ScalarOperatorFunctions.bitShiftLeftSmallInt(O_SI_10, O_BI_3).getSmallint());
}
@Test
public void bitShiftLeftInt() {
assertEquals(80, ScalarOperatorFunctions.bitShiftLeftInt(O_INT_10, O_BI_3).getInt());
}
@Test
public void bitShiftLeftBigint() {
assertEquals(800, ScalarOperatorFunctions.bitShiftLeftBigint(O_BI_100, O_BI_3).getBigint());
}
@Test
public void bitShiftLeftLargeInt() {
assertEquals("800", ScalarOperatorFunctions.bitShiftLeftLargeInt(O_LI_100, O_BI_3).getLargeInt().toString());
}
@Test
public void bitShiftRightTinyInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightTinyInt(O_TI_10, O_BI_3).getTinyInt());
}
@Test
public void bitShiftRightSmallInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightSmallInt(O_SI_10, O_BI_3).getSmallint());
}
@Test
public void bitShiftRightInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightInt(O_INT_10, O_BI_3).getInt());
}
@Test
public void bitShiftRightBigint() {
assertEquals(12, ScalarOperatorFunctions.bitShiftRightBigint(O_BI_100, O_BI_3).getBigint());
}
@Test
public void bitShiftRightLargeInt() {
assertEquals("12", ScalarOperatorFunctions.bitShiftRightLargeInt(O_LI_100, O_BI_3).getLargeInt().toString());
}
@Test
public void bitShiftRightLogicalTinyInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightLogicalTinyInt(O_TI_10, O_BI_3).getTinyInt());
}
@Test
public void bitShiftRightLogicalSmallInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightLogicalSmallInt(O_SI_10, O_BI_3).getSmallint());
}
@Test
public void bitShiftRightLogicalInt() {
assertEquals(1, ScalarOperatorFunctions.bitShiftRightLogicalInt(O_INT_10, O_BI_3).getInt());
}
@Test
public void bitShiftRightLogicalBigint() {
assertEquals(12, ScalarOperatorFunctions.bitShiftRightLogicalBigint(O_BI_100, O_BI_3).getBigint());
}
@Test
public void bitShiftRightLogicalLargeInt() {
assertEquals("12",
ScalarOperatorFunctions.bitShiftRightLogicalLargeInt(O_LI_100, O_BI_3).getLargeInt().toString());
assertEquals("800",
ScalarOperatorFunctions.bitShiftRightLogicalLargeInt(O_LI_100, O_BI_NEG_3).getLargeInt().toString());
assertEquals("12",
ScalarOperatorFunctions.bitShiftRightLogicalLargeInt(O_LI_100, O_BI_131).getLargeInt().toString());
assertEquals("42535295865117307932921825928971026419",
ScalarOperatorFunctions.bitShiftRightLogicalLargeInt(O_LI_NEG_100, O_BI_3).getLargeInt().toString());
}
@Test
public void concat() {
ConstantOperator[] arg = {ConstantOperator.createVarchar("1"),
ConstantOperator.createVarchar("2"),
ConstantOperator.createVarchar("3")};
ConstantOperator result = ScalarOperatorFunctions.concat(arg);
assertEquals(Type.VARCHAR, result.getType());
assertEquals("123", result.getVarchar());
}
@Test
public void concat_ws() {
ConstantOperator[] arg = {ConstantOperator.createVarchar("1"),
ConstantOperator.createVarchar("2"),
ConstantOperator.createVarchar("3")};
ConstantOperator result = ScalarOperatorFunctions.concat_ws(ConstantOperator.createVarchar(","), arg);
assertEquals(Type.VARCHAR, result.getType());
assertEquals("1,2,3", result.getVarchar());
}
@Test
public void concat_ws_with_null() {
ConstantOperator[] argWithNull = {ConstantOperator.createVarchar("star"),
ConstantOperator.createNull(Type.VARCHAR),
ConstantOperator.createVarchar("cks")};
ConstantOperator result =
ScalarOperatorFunctions.concat_ws(ConstantOperator.createVarchar("ro"), argWithNull);
assertEquals(Type.VARCHAR, result.getType());
assertEquals("starrocks", result.getVarchar());
result = ScalarOperatorFunctions.concat_ws(ConstantOperator.createVarchar(","),
ConstantOperator.createNull(Type.VARCHAR));
assertEquals("", result.getVarchar());
ConstantOperator[] argWithoutNull = {ConstantOperator.createVarchar("star"),
ConstantOperator.createVarchar("cks")};
result = ScalarOperatorFunctions.concat_ws(ConstantOperator.createNull(Type.VARCHAR), argWithoutNull);
assertTrue(result.isNull());
}
@Test
public void fromUnixTime2() throws AnalysisException {
ConstantOperator date =
ScalarOperatorFunctions.fromUnixTime(O_BI_10, ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s"));
assertTrue(date.toString().matches("1970-01-01 0.*:00:10"));
}
@Test
public void testNonDeterministicFuncComp() {
CallOperator random = new CallOperator(FunctionSet.RANDOM, Type.DOUBLE, Lists.newArrayList());
CallOperator randomCopy = (CallOperator) random.clone();
assertEquals(random, randomCopy);
}
@Test
public void testUTCTimestamp() {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
LocalDateTime expected = Instant.ofEpochMilli(ctx.getStartTime() / 1000 * 1000)
.atZone(ZoneOffset.UTC).toLocalDateTime();
assertEquals(expected, ScalarOperatorFunctions.utcTimestamp().getDatetime());
}
@Test
public void testNow() {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
LocalDateTime expected = Instant.ofEpochMilli(ctx.getStartTime() / 1000 * 1000)
.atZone(TimeUtils.getTimeZone().toZoneId()).toLocalDateTime();
assertEquals(expected, ScalarOperatorFunctions.now().getDatetime());
}
@Test
public void testNowWithParameter() throws AnalysisException {
ConnectContext ctx = new ConnectContext(null);
ctx.setThreadLocalInfo();
ctx.setStartTime();
LocalDateTime expected = ctx.getStartTimeInstant().atZone(TimeUtils.getTimeZone().toZoneId()).toLocalDateTime();
assertEquals(expected, ScalarOperatorFunctions.now(new ConstantOperator(6, Type.INT)).getDatetime());
}
@Test
public void testSubString() {
assertEquals("ab", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(1), ConstantOperator.createInt(2)).getVarchar());
assertEquals("abcd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(1)).getVarchar());
assertEquals("cd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(-2)).getVarchar());
assertEquals("c", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(-2), ConstantOperator.createInt(1)).getVarchar());
assertEquals("abcd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(1), ConstantOperator.createInt(4)).getVarchar());
assertEquals("abcd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(1), ConstantOperator.createInt(10)).getVarchar());
assertEquals("cd", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(3), ConstantOperator.createInt(4)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(0), ConstantOperator.createInt(2)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(ConstantOperator.createVarchar("abcd"),
ConstantOperator.createInt(5), ConstantOperator.createInt(2)).getVarchar());
assertEquals("starrocks", ScalarOperatorFunctions.substring(
new ConstantOperator("starrockscluster", Type.VARCHAR),
new ConstantOperator(1, Type.INT),
new ConstantOperator(9, Type.INT)).getVarchar());
assertEquals("rocks", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(-5, Type.INT),
new ConstantOperator(5, Type.INT)).getVarchar());
assertEquals("s", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(-1, Type.INT),
new ConstantOperator(8, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(-100, Type.INT),
new ConstantOperator(5, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(0, Type.INT),
new ConstantOperator(5, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(-1, Type.INT),
new ConstantOperator(0, Type.INT)).getVarchar());
assertEquals("apple", ScalarOperatorFunctions.substring(
new ConstantOperator("apple", Type.VARCHAR),
new ConstantOperator(-5, Type.INT),
new ConstantOperator(5, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(0, Type.INT)).getVarchar());
assertEquals("starrocks", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(1, Type.INT)).getVarchar());
assertEquals("s", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(9, Type.INT)).getVarchar());
assertEquals("", ScalarOperatorFunctions.substring(
new ConstantOperator("starrocks", Type.VARCHAR),
new ConstantOperator(10, Type.INT)).getVarchar());
}
@Test
public void testUrlExtractParameter() {
assertEquals("100", ScalarOperatorFunctions.urlExtractParameter(
new ConstantOperator("https:
new ConstantOperator("k1", Type.VARCHAR)
).getVarchar());
assertEquals(ScalarOperatorFunctions.urlExtractParameter(
new ConstantOperator("1234i5", Type.VARCHAR),
new ConstantOperator("k1", Type.VARCHAR)),
ConstantOperator.createNull(Type.VARCHAR));
assertEquals(ScalarOperatorFunctions.urlExtractParameter(
new ConstantOperator("https:
new ConstantOperator("k3", Type.VARCHAR)),
ConstantOperator.createNull(Type.VARCHAR));
}
}
|
This should be in an `else` block of `if (errorAfter` above. See the .NET version: https://github.com/Azure/azure-sdk-for-net/blob/main/common/Perf/Azure.Sample.Perf/Event/MockEventProcessor.cs#L131
|
private void process(int partition) {
MockEventContext mockEventContext = mockEventContexts[partition];
if (maxEventsPerSecond > 0) {
while (process) {
long elapsedTime = (System.nanoTime() - startTime);
if (errorAfter != null && !errorRaised
&& (errorAfter.compareTo(Duration.ofNanos(elapsedTime)) < 0)) {
errorLock.lock();
if (!errorRaised) {
processError(partition, new IllegalStateException("Test Exception"));
errorRaised = true;
}
}
int eventsSent = eventsRaised[partition];
double targetEventsSent = ((double) (elapsedTime / 1_000_000_000))
* maxEventsPerSecondPerPartition;
if (eventsSent < targetEventsSent) {
processEvent.accept(mockEventContext);
eventsRaised[partition]++;
} else {
try {
Thread.sleep((long) ((1 / maxEventsPerSecondPerPartition) * 1000));
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
} else {
while (process) {
if (errorAfter != null && !errorRaised
&& (errorAfter.compareTo(Duration.ofNanos((System.nanoTime() - startTime))) < 0)) {
errorLock.lock();
if (!errorRaised) {
processError(partition, new IllegalStateException("Test Exception"));
errorRaised = true;
}
}
processEvent.accept(mockEventContext);
eventsRaised[partition]++;
}
}
}
|
processEvent.accept(mockEventContext);
|
private void process(int partition) {
MockEventContext mockEventContext = mockEventContexts[partition];
if (maxEventsPerSecond > 0) {
while (process) {
long elapsedTime = (System.nanoTime() - startTime);
if (errorAfter != null && !errorRaised
&& (errorAfter.compareTo(Duration.ofNanos(elapsedTime)) < 0)) {
errorLock.lock();
try {
if (!errorRaised) {
processError(partition, new IllegalStateException("Test Exception"));
errorRaised = true;
}
} finally {
errorLock.unlock();
}
} else {
int eventsSent = eventsRaised[partition];
double targetEventsSent = ((double) (elapsedTime / 1_000_000_000))
* maxEventsPerSecondPerPartition;
if (eventsSent < targetEventsSent) {
processEvent.accept(mockEventContext);
eventsRaised[partition]++;
} else {
try {
Thread.sleep((long) ((1 / maxEventsPerSecondPerPartition) * 1000));
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
}
} else {
while (process) {
if (errorAfter != null && !errorRaised
&& (errorAfter.compareTo(Duration.ofNanos((System.nanoTime() - startTime))) < 0)) {
errorLock.lock();
try {
if (!errorRaised) {
processError(partition, new IllegalStateException("Test Exception"));
errorRaised = true;
}
} finally {
errorLock.unlock();
}
} else {
processEvent.accept(mockEventContext);
eventsRaised[partition]++;
}
}
}
}
|
class MockEventProcessor {
private final Consumer<MockErrorContext> processError;
private final Consumer<MockEventContext> processEvent;
private boolean process;
private final double maxEventsPerSecondPerPartition;
private final int maxEventsPerSecond;
private final int partitions;
private final Duration errorAfter;
private boolean errorRaised;
private final ReentrantLock errorLock;
private final MockEventContext[] mockEventContexts;
private int[] eventsRaised;
private long startTime;
private final AtomicReference<ScheduledFuture<?>> runner = new AtomicReference<>();
private final AtomicReference<ScheduledExecutorService> scheduler = new AtomicReference<>();
/**
* Creates an instance of a mock event processor
*
* @param partitions the number of partitions
* @param maxEventsPerSecond the maximum events per second to send, optional.
* @param errorAfter the duration after which processor should error out, optional.
* @param processError the consumer to process the error.
* @param processEvent the consumer to process the event.
*/
public MockEventProcessor(int partitions, int maxEventsPerSecond, Duration errorAfter,
Consumer<MockErrorContext> processError, Consumer<MockEventContext> processEvent) {
this.processError = processError;
this.processEvent = processEvent;
this.partitions = partitions;
this.maxEventsPerSecond = maxEventsPerSecond;
this.maxEventsPerSecondPerPartition = ((double) maxEventsPerSecond) / partitions;
this.errorAfter = errorAfter;
this.errorLock = new ReentrantLock();
mockEventContexts = new MockEventContext[partitions];
IntStream.range(0, partitions).boxed().forEach(integer -> {
mockEventContexts[integer] = new MockEventContext(integer, "Hello");
});
this.eventsRaised = new int[partitions];
}
/**
* Starts the event processor.
*/
public synchronized void start() {
eventsRaised = new int[eventsRaised.length];
process = true;
errorRaised = false;
startTime = System.nanoTime();
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
scheduler.set(executor);
Double jitterInMillis =
ThreadLocalRandom.current().nextDouble() * TimeUnit.SECONDS.toMillis(0);
runner.set(scheduler.get().schedule(this::processEvents,
jitterInMillis.longValue(), TimeUnit.MILLISECONDS));
}
private Mono<Void> processEvents() {
while (process) {
for (int i = 0; i < partitions; i++) {
process(i);
}
}
return Mono.empty();
}
private void processError(int partition, Throwable throwable) {
processError.accept(new MockErrorContext(partition, throwable));
stop();
}
/**
* Stops the Event Processor.
*/
public synchronized void stop() {
runner.get().cancel(true);
scheduler.get().shutdown();
this.process = false;
}
}
|
class MockEventProcessor {
private final Consumer<MockErrorContext> processError;
private final Consumer<MockEventContext> processEvent;
private volatile boolean process;
private final double maxEventsPerSecondPerPartition;
private final int maxEventsPerSecond;
private final int partitions;
private final Duration errorAfter;
private boolean errorRaised;
private final ReentrantLock errorLock;
private volatile boolean processPartitions;
private final MockEventContext[] mockEventContexts;
private int[] eventsRaised;
private long startTime;
private final AtomicReference<ScheduledFuture<?>> runner = new AtomicReference<>();
private final AtomicReference<ScheduledExecutorService> scheduler = new AtomicReference<>();
/**
* Creates an instance of a mock event processor
*
* @param partitions the number of partitions
* @param maxEventsPerSecond the maximum events per second to send, optional.
* @param errorAfter the duration after which processor should error out, optional.
* @param processError the consumer to process the error.
* @param processEvent the consumer to process the event.
*/
public MockEventProcessor(int partitions, int maxEventsPerSecond, Duration errorAfter,
Consumer<MockErrorContext> processError, Consumer<MockEventContext> processEvent) {
this.processError = processError;
this.processEvent = processEvent;
this.partitions = partitions;
this.maxEventsPerSecond = maxEventsPerSecond;
this.maxEventsPerSecondPerPartition = ((double) maxEventsPerSecond) / partitions;
this.errorAfter = errorAfter;
this.errorLock = new ReentrantLock();
this.processPartitions = true;
mockEventContexts = new MockEventContext[partitions];
IntStream.range(0, partitions).boxed().forEach(integer -> {
mockEventContexts[integer] = new MockEventContext(integer, "Hello");
});
this.eventsRaised = new int[partitions];
}
/**
* Starts the event processor.
*/
public synchronized void start() {
eventsRaised = new int[eventsRaised.length];
process = true;
errorRaised = false;
processPartitions = true;
startTime = System.nanoTime();
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
scheduler.set(executor);
runner.set(scheduler.get().schedule(this::processEvents,
0l, TimeUnit.MILLISECONDS));
}
private Mono<Void> processEvents() {
if (processPartitions) {
Flux.range(0, partitions)
.parallel()
.runOn(Schedulers.boundedElastic())
.subscribe(integer -> process(integer));
processPartitions = false;
}
return Mono.empty();
}
private void processError(int partition, Throwable throwable) {
processError.accept(new MockErrorContext(partition, throwable));
stop();
}
/**
* Stops the Event Processor.
*/
public synchronized void stop() {
runner.get().cancel(true);
scheduler.get().shutdown();
this.process = false;
}
}
|
nit: "a bit" is a little confusing since it's just waiting ditto below
|
public void testScheduledBudgetRefresh() throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(redistributeBudget);
budgetRefresher.start();
redistributeBudgetLatch.await();
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(0);
}
|
public void testScheduledBudgetRefresh() throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(redistributeBudget);
budgetRefresher.start();
redistributeBudgetLatch.await();
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(0);
}
|
class GetWorkBudgetRefresherTest {
private static final int WAIT_BUFFER = 10;
@Rule public transient Timeout globalTimeout = Timeout.seconds(600);
private GetWorkBudgetRefresher createBudgetRefresher(Runnable redistributeBudget) {
return createBudgetRefresher(false, redistributeBudget);
}
private GetWorkBudgetRefresher createBudgetRefresher(
boolean isBudgetRefreshPaused, Runnable redistributeBudget) {
return new GetWorkBudgetRefresher(() -> isBudgetRefreshPaused, redistributeBudget);
}
@Test
public void testStop_successfullyTerminates() throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(redistributeBudget);
budgetRefresher.start();
budgetRefresher.stop();
budgetRefresher.requestBudgetRefresh();
boolean redistributeBudgetRan =
redistributeBudgetLatch.await(WAIT_BUFFER, TimeUnit.MILLISECONDS);
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(1);
assertFalse(redistributeBudgetRan);
}
@Test
public void testRequestBudgetRefresh_triggersBudgetRefresh() throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(redistributeBudget);
budgetRefresher.start();
budgetRefresher.requestBudgetRefresh();
redistributeBudgetLatch.await();
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(0);
}
@Test
@Test
public void testTriggeredAndScheduledBudgetRefresh_concurrent() throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(2);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(redistributeBudget);
budgetRefresher.start();
Thread budgetRefreshTriggerThread = new Thread(budgetRefresher::requestBudgetRefresh);
budgetRefreshTriggerThread.start();
budgetRefreshTriggerThread.join();
redistributeBudgetLatch.await();
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(0);
}
@Test
public void testTriggeredBudgetRefresh_doesNotRunWhenBudgetRefreshPaused()
throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(true, redistributeBudget);
budgetRefresher.start();
budgetRefresher.requestBudgetRefresh();
boolean redistributeBudgetRan =
redistributeBudgetLatch.await(WAIT_BUFFER, TimeUnit.MILLISECONDS);
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(1);
assertFalse(redistributeBudgetRan);
}
@Test
public void testScheduledBudgetRefresh_doesNotRunWhenBudgetRefreshPaused()
throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(true, redistributeBudget);
budgetRefresher.start();
boolean redistributeBudgetRan =
redistributeBudgetLatch.await(
GetWorkBudgetRefresher.SCHEDULED_BUDGET_REFRESH_MILLIS + WAIT_BUFFER,
TimeUnit.MILLISECONDS);
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(1);
assertFalse(redistributeBudgetRan);
}
}
|
class GetWorkBudgetRefresherTest {
private static final int WAIT_BUFFER = 10;
@Rule public transient Timeout globalTimeout = Timeout.seconds(600);
private GetWorkBudgetRefresher createBudgetRefresher(Runnable redistributeBudget) {
return createBudgetRefresher(false, redistributeBudget);
}
private GetWorkBudgetRefresher createBudgetRefresher(
boolean isBudgetRefreshPaused, Runnable redistributeBudget) {
return new GetWorkBudgetRefresher(() -> isBudgetRefreshPaused, redistributeBudget);
}
@Test
public void testStop_successfullyTerminates() throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(redistributeBudget);
budgetRefresher.start();
budgetRefresher.stop();
budgetRefresher.requestBudgetRefresh();
boolean redistributeBudgetRan =
redistributeBudgetLatch.await(WAIT_BUFFER, TimeUnit.MILLISECONDS);
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(1);
assertFalse(redistributeBudgetRan);
}
@Test
public void testRequestBudgetRefresh_triggersBudgetRefresh() throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(redistributeBudget);
budgetRefresher.start();
budgetRefresher.requestBudgetRefresh();
redistributeBudgetLatch.await();
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(0);
}
@Test
@Test
public void testTriggeredAndScheduledBudgetRefresh_concurrent() throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(2);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(redistributeBudget);
budgetRefresher.start();
Thread budgetRefreshTriggerThread = new Thread(budgetRefresher::requestBudgetRefresh);
budgetRefreshTriggerThread.start();
budgetRefreshTriggerThread.join();
redistributeBudgetLatch.await();
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(0);
}
@Test
public void testTriggeredBudgetRefresh_doesNotRunWhenBudgetRefreshPaused()
throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(true, redistributeBudget);
budgetRefresher.start();
budgetRefresher.requestBudgetRefresh();
boolean redistributeBudgetRan =
redistributeBudgetLatch.await(WAIT_BUFFER, TimeUnit.MILLISECONDS);
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(1);
assertFalse(redistributeBudgetRan);
}
@Test
public void testScheduledBudgetRefresh_doesNotRunWhenBudgetRefreshPaused()
throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(true, redistributeBudget);
budgetRefresher.start();
boolean redistributeBudgetRan =
redistributeBudgetLatch.await(
GetWorkBudgetRefresher.SCHEDULED_BUDGET_REFRESH_MILLIS + WAIT_BUFFER,
TimeUnit.MILLISECONDS);
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(1);
assertFalse(redistributeBudgetRan);
}
}
|
|
Shall we also recheck formatting for all the code introduced?
|
public void visit(BLangFunction funcNode) {
SymbolEnv fucEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
if (!funcNode.interfaceFunction) {
addReturnIfNotPresent(funcNode);
}
funcNode.originalFuncSymbol = funcNode.symbol;
funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol);
funcNode.requiredParams = rewrite(funcNode.requiredParams, fucEnv);
funcNode.workers = rewrite(funcNode.workers, fucEnv);
List<BLangAnnotationAttachment> participantAnnotation
= funcNode.annAttachments.stream()
.filter(a -> Transactions.isTransactionsAnnotation(a.pkgAlias.value,
a.annotationName.value))
.collect(Collectors.toList());
funcNode.body = rewrite(funcNode.body, fucEnv);
funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
if (funcNode.returnTypeNode != null) {
funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env));
}
if (Symbols.isNative(funcNode.symbol)) {
funcNode.externalAnnAttachments.forEach(attachment -> rewrite(attachment, env));
}
if (participantAnnotation.isEmpty()) {
result = funcNode;
return;
}
result = desugarParticipantFunction(funcNode, participantAnnotation);
}
private BLangFunction desugarParticipantFunction(BLangFunction funcNode,
List<BLangAnnotationAttachment> participantAnnotation) {
BLangAnnotationAttachment annotation = participantAnnotation.get(0);
BLangBlockStmt onCommitBody = null;
BLangBlockStmt onAbortBody = null;
funcNode.requiredParams.forEach(bLangSimpleVariable -> bLangSimpleVariable.symbol.closure = true);
if (funcNode.receiver != null) {
funcNode.receiver.symbol.closure = true;
}
BType trxReturnType = BUnionType.create(null, symTable.errorType, symTable.anyType);
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangLambdaFunction commitFunc = createLambdaFunction(funcNode.pos, "$anonOnCommitFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangLambdaFunction abortFunc = createLambdaFunction(funcNode.pos, "$anonOnAbortFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangSimpleVariable onCommitTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, commitFunc.function.symbol));
BLangSimpleVariable onAbortTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, abortFunc.function.symbol));
BLangSimpleVarRef trxIdOnCommitRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onCommitTrxVar.symbol);
BLangSimpleVarRef trxIdOnAbortRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onAbortTrxVar.symbol);
List<BLangRecordLiteral.BLangRecordKeyValue> valuePairs =
((BLangRecordLiteral) annotation.expr).getKeyValuePairs();
for (BLangRecordLiteral.BLangRecordKeyValue keyValuePair : valuePairs) {
String func = (String) ((BLangLiteral) keyValuePair.getKey()).value;
switch (func) {
case Transactions.TRX_ONCOMMIT_FUNC:
BInvokableSymbol commitSym = (BInvokableSymbol) ((BLangSimpleVarRef) keyValuePair.valueExpr).symbol;
BLangInvocation onCommit = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, commitSym, Lists.of(trxIdOnCommitRef),
Collections.emptyList(), symResolver);
BLangStatement onCommitStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommit);
onCommitBody = ASTBuilderUtil.createBlockStmt(funcNode.pos, Lists.of(onCommitStmt));
break;
case Transactions.TRX_ONABORT_FUNC:
BInvokableSymbol abortSym = (BInvokableSymbol) ((BLangSimpleVarRef) keyValuePair.valueExpr).symbol;
BLangInvocation onAbort = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, abortSym, Lists.of(trxIdOnAbortRef),
Collections.emptyList(), symResolver);
BLangStatement onAbortStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbort);
onAbortBody = ASTBuilderUtil.createBlockStmt(funcNode.pos, Lists.of(onAbortStmt));
break;
}
}
if (onCommitBody == null) {
onCommitBody = ASTBuilderUtil.createBlockStmt(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommitBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
if (onAbortBody == null) {
onAbortBody = ASTBuilderUtil.createBlockStmt(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbortBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
commitFunc.function.body = onCommitBody;
commitFunc.function.requiredParams.add(onCommitTrxVar);
commitFunc.type = new BInvokableType(Lists.of(onCommitTrxVar.symbol.type),
commitFunc.function.symbol.type.getReturnType(), null);
commitFunc.function.symbol.type = commitFunc.type;
commitFunc.function.symbol.params = Lists.of(onCommitTrxVar.symbol);
abortFunc.function.body = onAbortBody;
abortFunc.function.requiredParams.add(onAbortTrxVar);
abortFunc.type = new BInvokableType(Lists.of(onAbortTrxVar.symbol.type),
abortFunc.function.symbol.type.getReturnType(), null);
abortFunc.function.symbol.type = abortFunc.type;
abortFunc.function.symbol.params = Lists.of(onAbortTrxVar.symbol);
BSymbol trxModSym = env.enclPkg.imports
.stream()
.filter(importPackage -> importPackage.symbol.
pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names.ORG_NAME_SEPARATOR.value
+ Names.TRANSACTION_PACKAGE.value))
.findAny().get().symbol;
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbol(symTable.pkgEnvMap.get(trxModSym),
getParticipantFunctionName(funcNode), SymTag.FUNCTION);
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.stringType,
getTransactionBlockId());
BLangLambdaFunction trxMainWrapperFunc = createLambdaFunction(funcNode.pos, "$anonTrxWrapperFunc$",
Collections.emptyList(),
funcNode.returnTypeNode,
funcNode.body);
funcNode.requiredParams.forEach(var -> trxMainWrapperFunc.function.closureVarSymbols
.add(new ClosureVarSymbol(var.symbol, var.pos)));
BLangBlockStmt trxMainBody = ASTBuilderUtil.createBlockStmt(funcNode.pos);
BLangLambdaFunction trxMainFunc
= createLambdaFunction(funcNode.pos, "$anonTrxParticipantFunc$", Collections.emptyList(),
trxReturnNode, trxMainBody);
trxMainWrapperFunc.cachedEnv = trxMainFunc.function.clonedEnv;
commitFunc.cachedEnv = env.createClone();
abortFunc.cachedEnv = env.createClone();
BVarSymbol wrapperSym = new BVarSymbol(0, names.fromString("$wrapper$1"), this.env.scope.owner.pkgID,
trxMainWrapperFunc.type, trxMainFunc.function.symbol);
BLangSimpleVariable wrapperFuncVar = ASTBuilderUtil.createVariable(funcNode.pos, "$wrapper$1",
trxMainWrapperFunc.type, trxMainWrapperFunc,
wrapperSym);
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(funcNode.pos, trxMainBody);
variableDef.var = wrapperFuncVar;
BLangSimpleVarRef wrapperVarRef = rewrite(ASTBuilderUtil.createVariableRef(variableDef.pos,
wrapperFuncVar.symbol), env);
BLangInvocation wrapperInvocation = new BFunctionPointerInvocation(trxMainWrapperFunc.pos, wrapperVarRef,
wrapperFuncVar.symbol,
trxMainWrapperFunc.function.symbol.retType);
BLangReturn wrapperReturn = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(wrapperInvocation, trxReturnNode.type));
trxMainWrapperFunc.function.receiver = funcNode.receiver;
trxMainFunc.function.receiver = funcNode.receiver;
trxMainBody.stmts.add(wrapperReturn);
rewrite(trxMainFunc.function, env);
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, trxMainFunc, commitFunc, abortFunc);
BLangInvocation participantInvocation
= ASTBuilderUtil.createInvocationExprMethod(funcNode.pos, invokableSymbol, requiredArgs,
Collections.emptyList(), symResolver);
participantInvocation.type = ((BInvokableType) invokableSymbol.type).retType;
BLangStatement stmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(participantInvocation, funcNode.symbol.retType));
funcNode.body = ASTBuilderUtil.createBlockStmt(funcNode.pos, Lists.of(rewrite(stmt, env)));
return funcNode;
}
private Name getParticipantFunctionName(BLangFunction function) {
if (Symbols.isFlagOn((function).symbol.flags, Flags.RESOURCE)) {
return TRX_REMOTE_PARTICIPANT_BEGIN_FUNCTION;
}
return TRX_LOCAL_PARTICIPANT_BEGIN_FUNCTION;
}
public void visit(BLangForever foreverStatement) {
result = streamingCodeDesugar.desugar(foreverStatement);
result = rewrite(result, env);
((BLangBlockStmt) result).stmts.stream().filter(stmt -> stmt.getKind() == NodeKind.VARIABLE_DEF)
.map(stmt -> (BLangSimpleVariableDef) stmt).forEach(varDef ->
((BLangBlockStmt) result).scope.define(varDef.var.symbol.name, varDef.var.symbol));
}
@Override
public void visit(BLangResource resourceNode) {
}
public void visit(BLangAnnotation annotationNode) {
annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env);
result = annAttachmentNode;
}
@Override
public void visit(BLangSimpleVariable varNode) {
if ((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
varNode.expr = null;
result = varNode;
return;
}
BLangExpression bLangExpression = rewriteExpr(varNode.expr);
if (bLangExpression != null) {
bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type);
}
varNode.expr = bLangExpression;
varNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = varNode;
}
@Override
public void visit(BLangTupleVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangRecordVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangErrorVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangBlockStmt block) {
SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env);
block.stmts = rewriteStmt(block.stmts, blockEnv);
result = block;
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
result = varDefNode;
}
@Override
public void visit(BLangTupleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
BLangTupleVariable tupleVariable = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varDefNode.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = tuple;
createVarDefStmts(tupleVariable, blockStmt, tuple.symbol, null);
createRestFieldVarDefStmts(tupleVariable, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable;
boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE;
DiagnosticPos pos = blockStmt.pos;
if (arrayVar != null) {
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = arrayVar.type;
arrayVar.expr = arrayExpr;
BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt);
arrayVarDef.var = arrayVar;
BLangExpression tupleExpr = parentTupleVariable.expr;
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol);
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size()
: parentTupleVariable.memberVariables.size());
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef,
createLengthInvocation(pos, arrayVarRef));
indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType);
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
@Override
public void visit(BLangRecordVariableDef varDefNode) {
BLangRecordVariable varNode = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varDefNode.pos, "$map$0", runTimeType,
null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = varDefNode.var.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = mapVariable;
createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorVariableDef varDefNode) {
BLangErrorVariable errorVariable = varDefNode.errorVariable;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner);
final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varDefNode.pos, errorVarSymbol.name.value,
symTable.errorType, null, errorVarSymbol);
error.expr = errorVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = error;
createVarDefStmts(errorVariable, blockStmt, error.symbol, null);
result = rewrite(blockStmt, env);
}
/**
* This method iterate through each member of the tupleVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* ((string, float) int)) ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var def creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangVariable> memberVars = parentTupleVariable.memberVariables;
for (int index = 0; index < memberVars.size(); index++) {
BLangVariable variable = memberVars.get(index);
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index);
if (NodeKind.VARIABLE == variable.getKind()) {
createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol,
parentIndexAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BType accessedElemType = symTable.errorType;
if (tupleVarSymbol.type.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) tupleVarSymbol.type;
accessedElemType = arrayType.eType;
}
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
}
}
}
/**
* Overloaded method to handle record variables.
* This method iterate through each member of the recordVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* type Foo record {
* string name;
* (int, string) age;
* Address address;
* };
*
* Foo {name: a, age: (b, c), address: d} = {record literal}
*
* a is a simple var, so a simple var def will be created.
*
* (b, c) is a tuple, so it is a recursive var def creation.
*
* d is a record, so it is a recursive var def creation.
*
*/
private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList;
for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) {
BLangVariable variable = recordFieldKeyValue.valueBindingPattern;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType,
recordFieldKeyValue.key.value);
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern,
parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
recordVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
}
}
if (parentRecordVariable.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1",
parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"),
this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
List<String> keysToRemove = parentRecordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam;
BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos,
parentBlockStmt);
restParamVarDef.var = restParam;
restParamVarDef.var.type = restParamType;
restParam.expr = varRef;
}
}
/**
* This method will create the relevant var def statements for reason and details of the error variable.
* The var def statements are created by creating the reason() and detail() builtin methods.
*/
private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) {
BVarSymbol convertedErrorVarSymbol;
if (parentIndexBasedAccess != null) {
BType prevType = parentIndexBasedAccess.type;
parentIndexBasedAccess.type = symTable.anyType;
BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++,
symTable.errorType,
addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType),
parentErrorVariable.pos);
parentIndexBasedAccess.type = prevType;
parentBlockStmt.addStatement(errorVarDef);
convertedErrorVarSymbol = errorVarDef.var.symbol;
} else {
convertedErrorVarSymbol = errorVariableSymbol;
}
parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos,
parentErrorVariable.reason.type, convertedErrorVarSymbol, null);
if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) {
parentErrorVariable.reason = null;
} else {
BLangSimpleVariableDef reasonVariableDef =
ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt);
reasonVariableDef.var = parentErrorVariable.reason;
}
if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty())
&& parentErrorVariable.restDetail == null) {
return;
}
BType detailMapType;
BType detailType = ((BErrorType) parentErrorVariable.type).detailType;
if (detailType.tag == TypeTags.MAP) {
detailMapType = detailType;
} else {
detailMapType = symTable.detailType;
}
parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction(
parentErrorVariable.pos, detailMapType, parentBlockStmt,
convertedErrorVarSymbol, null);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail",
parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos);
detailTempVarDef.type = parentErrorVariable.detailExpr.type;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) {
BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol);
createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar);
}
if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) {
DiagnosticPos pos = parentErrorVariable.restDetail.pos;
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(
pos, detailTempVarDef.var.symbol);
List<String> keysToRemove = parentErrorVariable.detail.stream()
.map(detail -> detail.key.getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove,
parentErrorVariable.restDetail.type, parentBlockStmt);
BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDefStmt.var = ASTBuilderUtil.createVariable(pos,
parentErrorVariable.restDetail.name.value,
filteredDetail.type,
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol),
parentErrorVariable.restDetail.symbol);
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos,
ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol),
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol));
parentBlockStmt.addStatement(assignmentStmt);
}
rewrite(parentBlockStmt, env);
}
private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos,
BType targetType) {
BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"),
this.env.enclPkg.packageID, targetType, this.env.scope.owner);
BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
BLangExpression expr;
if (targetType.tag == TypeTags.RECORD) {
expr = variableRef;
} else {
expr = addConversionExprIfRequired(variableRef, targetType);
}
BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr,
errorVarSym);
return ASTBuilderUtil.createVariableDef(pos, errorVar);
}
private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos,
List<String> keysToRemove, BType targetType,
BLangBlockStmt parentBlockStmt) {
BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType);
int restNum = annonVarCount++;
String name = "$map$ref$" + restNum;
BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name);
BLangInvocation entriesInvocation = generateMapEntriesInvocation(pos, typeCastExpr, mapVariable);
String entriesVarName = "$map$ref$entries$" + restNum;
BType entriesType = new BMapType(TypeTags.MAP,
new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null);
BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt,
addConversionExprIfRequired(entriesInvocation, entriesType),
entriesVarName);
BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos);
BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter);
String filteredEntriesName = "$filtered$detail$entries" + restNum;
BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation,
filteredEntriesName);
String filteredVarName = "$detail$filtered" + restNum;
BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos);
BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda);
BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt,
mapInvocation,
filteredVarName);
String filteredRestVarName = "$restVar$" + restNum;
BLangInvocation constructed = generateConstructFromInvocation(pos, targetType, filtered.symbol);
return defVariable(pos, targetType, parentBlockStmt,
addConversionExprIfRequired(constructed, targetType),
filteredRestVarName);
}
private BLangInvocation generateMapEntriesInvocation(DiagnosticPos pos, BLangExpression typeCastExpr,
BLangSimpleVariable detailMap) {
BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), typeCastExpr.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, detailMap.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(typeCastExpr.type, names.fromString("entries"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, detailMap.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
return invocationNode;
}
private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar,
BLangLambdaFunction backToMapLambda) {
BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(backToMapLambda);
return invocationNode;
}
private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) {
String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType anyType = new BLangValueType();
anyType.typeKind = TypeKind.ANY;
anyType.type = symTable.anyType;
function.returnTypeNode = anyType;
BLangBlockStmt functionBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
function.body = functionBlock;
BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos,
symTable.anyType, keyValSymbol, ASTBuilderUtil.createLiteral(pos, symTable.intType, (long) 1));
BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupSecondElem);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
symTable.anyType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return createLambdaFunction(function, functionSymbol);
}
private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos,
BLangSimpleVariable entriesInvocationVar,
BLangLambdaFunction filter) {
BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
BLangInvocation filterInvoke = invocationNode;
filterInvoke.requiredArgs.add(filter);
return filterInvoke;
}
private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt,
BLangExpression expression, String name) {
Name varName = names.fromString(name);
BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression,
new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner));
BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap);
constructedMap.type = varType;
parentBlockStmt.addStatement(constructedMap);
env.scope.define(varName, detailMap.symbol);
return detailMap;
}
private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt,
BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BLangExpression detailEntryVar) {
if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
BLangSimpleVariableDef errorDetailVar = createVarDef(
((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value,
detailEntry.valueBindingPattern.type,
detailEntryVar,
detailEntry.valueBindingPattern.pos);
parentBlockStmt.addStatement(errorDetailVar);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef(
detailEntry.valueBindingPattern.pos,
(BLangRecordVariable) detailEntry.valueBindingPattern);
recordVariableDef.var.expr = detailEntryVar;
recordVariableDef.type = symTable.recordType;
parentBlockStmt.addStatement(recordVariableDef);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef(
detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern);
parentBlockStmt.addStatement(tupleVariableDef);
}
}
private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BVarSymbol tempDetailVarSymbol) {
BLangExpression detailEntryVar = createIndexBasedAccessExpr(
detailEntry.valueBindingPattern.type,
detailEntry.valueBindingPattern.pos,
createStringLiteral(detailEntry.key.pos, detailEntry.key.value),
tempDetailVarSymbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
return detailEntryVar;
}
private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) {
BLangExpression concatExpr = null;
BLangExpression currentExpr;
for (BLangExpression expr : exprs) {
currentExpr = expr;
if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) {
currentExpr = getToStringInvocationOnExpr(expr);
}
if (concatExpr == null) {
concatExpr = currentExpr;
continue;
}
concatExpr =
ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr,
concatExpr.type.tag == TypeTags.XML ||
currentExpr.type.tag == TypeTags.XML ?
symTable.xmlType : symTable.stringType,
OperatorKind.ADD, null);
}
return concatExpr;
}
private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope
.lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol;
List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{
add(addConversionExprIfRequired(expression, symbol.params.get(0).type));
}};
return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(),
symResolver);
}
private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BType detailType,
BLangBlockStmt parentBlockStmt,
BVarSymbol errorVarySymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangInvocation detailInvocation = createInvocationNode(
ERROR_DETAIL_FUNCTION_NAME, new ArrayList<>(), detailType);
detailInvocation.builtInMethod = BLangBuiltInMethod.getFromString(ERROR_DETAIL_FUNCTION_NAME);
if (parentIndexBasedAccess != null) {
detailInvocation.expr = addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType);
detailInvocation.symbol = symResolver.lookupLangLibMethod(parentIndexBasedAccess.type,
names.fromString(ERROR_DETAIL_FUNCTION_NAME));
detailInvocation.requiredArgs = Lists.of(parentIndexBasedAccess);
} else {
detailInvocation.expr = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
detailInvocation.symbol = symResolver.lookupLangLibMethod(errorVarySymbol.type,
names.fromString(ERROR_DETAIL_FUNCTION_NAME));
detailInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, errorVarySymbol));
}
detailInvocation.type = detailInvocation.symbol.type.getReturnType();
return detailInvocation;
}
private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType,
BVarSymbol errorVarSymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangInvocation reasonInvocation = createInvocationNode(ERROR_REASON_FUNCTION_NAME,
new ArrayList<>(), reasonType);
reasonInvocation.builtInMethod = BLangBuiltInMethod.getFromString(ERROR_REASON_FUNCTION_NAME);
if (parentIndexBasedAccess != null) {
reasonInvocation.expr = addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType);
reasonInvocation.symbol = symResolver.lookupLangLibMethod(parentIndexBasedAccess.type,
names.fromString(ERROR_REASON_FUNCTION_NAME));
reasonInvocation.requiredArgs = Lists.of(parentIndexBasedAccess);
} else {
reasonInvocation.expr = ASTBuilderUtil.createVariableRef(pos, errorVarSymbol);
reasonInvocation.symbol = symResolver.lookupLangLibMethod(errorVarSymbol.type,
names.fromString(ERROR_REASON_FUNCTION_NAME));
reasonInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, errorVarSymbol));
}
reasonInvocation.type = reasonInvocation.symbol.type.getReturnType();
return reasonInvocation;
}
private BLangInvocation generateConstructFromInvocation(DiagnosticPos pos,
BType targetType,
BVarSymbol source) {
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangInvocation invocationNode = createInvocationNode(CONSTRUCT_FROM, new ArrayList<>(), typedescType);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
invocationNode.expr = typedescExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CONSTRUCT_FROM));
invocationNode.requiredArgs = Lists.of(typedescExpr, ASTBuilderUtil.createVariableRef(pos, source));
invocationNode.type = BUnionType.create(null, targetType, symTable.errorType);
return invocationNode;
}
private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVarRef recordVarRef, DiagnosticPos pos) {
BLangFunction function = ASTBuilderUtil.createFunction(pos, "$anonFunc$" + lambdaFunctionCount++);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangBlockStmt functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol);
BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos,
symTable.anyType, keyValSymbol, ASTBuilderUtil.createLiteral(pos, symTable.intType, (long) 0));
BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupFirstElem);
for (BLangRecordVarRefKeyValue variableKeyValueNode : recordVarRef.recordRefFields) {
createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, variableKeyValueNode.variableName.getValue());
}
BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock);
return createLambdaFunction(function, functionSymbol);
}
private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) {
String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangBlockStmt functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol);
BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos,
symTable.anyType, keyValSymbol, ASTBuilderUtil.createLiteral(pos, symTable.intType, (long) 0));
BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupFirstElem);
for (String toRemoveItem : toRemoveList) {
createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem);
}
BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock);
return createLambdaFunction(function, functionSymbol);
}
private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) {
List<String> fieldNamesToRemove = recordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
return createFuncToFilterOutRestParam(fieldNamesToRemove, pos);
}
private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockStmt blockStmt, String key) {
BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol);
BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt);
BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock);
returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false);
ifStmt.body = ifBlock;
BLangGroupExpr groupExpr = new BLangGroupExpr();
groupExpr.type = symTable.booleanType;
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted,
ASTBuilderUtil.createLiteral(pos, symTable.stringType, key),
symTable.booleanType, OperatorKind.EQUAL, null);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type);
groupExpr.expression = binaryExpr;
ifStmt.expr = groupExpr;
}
BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = function;
lambdaFunction.type = functionSymbol.type;
return lambdaFunction;
}
private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function,
BLangBlockStmt functionBlock) {
BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
symTable.booleanType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return functionSymbol;
}
private BLangBlockStmt createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function,
BVarSymbol keyValSymbol) {
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType booleanTypeKind = new BLangValueType();
booleanTypeKind.typeKind = TypeKind.BOOLEAN;
booleanTypeKind.type = symTable.booleanType;
function.returnTypeNode = booleanTypeKind;
BLangBlockStmt functionBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
function.body = functionBlock;
return functionBlock;
}
private BTupleType getStringAnyTupleType() {
ArrayList<BType> typeList = new ArrayList<BType>() {{
add(symTable.stringType);
add(symTable.anyType);
}};
return new BTupleType(typeList);
}
/**
* This method creates a simple variable def and assigns and array expression based on the given indexExpr.
*
* case 1: when there is no parent array access expression, but with the indexExpr : 1
* string s = x[1];
*
* case 2: when there is a parent array expression : x[2] and indexExpr : 3
* string s = x[2][3];
*
* case 3: when there is no parent array access expression, but with the indexExpr : name
* string s = x[name];
*
* case 4: when there is a parent map expression : x[name] and indexExpr : fName
* string s = x[name][fName];
*
* case 5: when there is a parent map expression : x[name] and indexExpr : 1
* string s = x[name][1];
*/
private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt,
BLangLiteral indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
Name varName = names.fromIdNode(simpleVariable.name);
if (varName == Names.IGNORE) {
return;
}
final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos,
parentBlockStmt);
simpleVariableDef.var = simpleVariable;
simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
}
@Override
public void visit(BLangAssignment assignNode) {
if (safeNavigateLHS(assignNode.varRef)) {
BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef;
accessExpr.leafNode = true;
result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment);
result = rewrite(result, env);
return;
}
assignNode.varRef = rewriteExpr(assignNode.varRef);
assignNode.expr = rewriteExpr(assignNode.expr);
assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type);
result = assignNode;
}
@Override
public void visit(BLangTupleDestructure tupleDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos,
blockStmt);
variableDef.var = tuple;
createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null);
createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
BLangTupleVarRef tupleVarRef = tupleDestructure.varRef;
DiagnosticPos pos = blockStmt.pos;
if (tupleVarRef.restParam != null) {
BLangExpression tupleExpr = tupleDestructure.expr;
BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam;
BArrayType restParamType = (BArrayType) restParam.type;
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = restParamType;
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = arrayExpr;
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) tupleVarRef.expressions.size();
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam,
createLengthInvocation(pos, restParam));
indexAccessExpr.type = restParamType.eType;
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) {
BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver
.lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME));
BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol,
Lists.of(collection), symResolver);
lengthInvocation.argExprs = lengthInvocation.requiredArgs;
lengthInvocation.type = lengthInvokableSymbol.type.getReturnType();
return lengthInvocation;
}
/**
* This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements.
* This method does the check for node kind of each member and call the related var ref creation method.
*
* Example:
* ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var ref creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangExpression> expressions = parentTupleVariable.expressions;
for (int index = 0; index < expressions.size(); index++) {
BLangExpression expression = expressions.get(index);
if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) {
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index);
createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr,
tupleVarSymbol, parentIndexAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
}
}
}
/**
* This method creates a assignment statement and assigns and array expression based on the given indexExpr.
*
*/
private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt,
BLangExpression indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName);
if (varName == Names.IGNORE) {
return;
}
}
BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type);
final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos,
parentBlockStmt);
assignmentStmt.varRef = simpleVarRef;
assignmentStmt.expr = assignmentExpr;
}
private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) {
BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos,
symTable.anyType, tupleVarSymbol, indexExpr);
arrayAccess.originalType = varType;
if (parentExpr != null) {
arrayAccess.expr = parentExpr;
}
final BLangExpression assignmentExpr;
if (types.isValueType(varType)) {
BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
castExpr.expr = arrayAccess;
castExpr.conversionSymbol = Symbols.createUnboxValueTypeOpSymbol(symTable.anyType, varType);
castExpr.type = varType;
assignmentExpr = castExpr;
} else {
assignmentExpr = arrayAccess;
}
return assignmentExpr;
}
@Override
public void visit(BLangRecordDestructure recordDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
String name = "$map$0";
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType,
null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = recordDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.
createVariableDefStmt(recordDestructure.pos, blockStmt);
variableDef.var = mapVariable;
createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorDestructure errorDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos);
String name = "$error$";
final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name,
symTable.errorType, null, new BVarSymbol(0, names.fromString(name),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
errorVar.expr = errorDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos,
blockStmt);
variableDef.var = errorVar;
createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null);
result = rewrite(blockStmt, env);
}
private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields;
for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) {
BLangExpression variableReference = varRefKeyValue.variableReference;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType,
varRefKeyValue.variableName.getValue());
if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) {
createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) {
BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) {
BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
symTable.tupleType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos,
symTable.errorType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol,
arrayAccessExpr);
}
}
if (parentRecordVarRef.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType,
null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam;
List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream()
.map(field -> field.variableName.value)
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = varRef;
}
}
private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF ||
names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) {
BLangAssignment reasonAssignment = ASTBuilderUtil
.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt);
reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos,
symTable.stringType, errorVarySymbol, parentIndexAccessExpr);
reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type);
reasonAssignment.varRef = parentErrorVarRef.reason;
}
if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) {
return;
}
BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos,
((BErrorType) parentErrorVarRef.type).detailType, parentBlockStmt, errorVarySymbol,
parentIndexAccessExpr);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++,
symTable.detailType, errorDetailBuiltinFunction,
parentErrorVarRef.pos);
detailTempVarDef.type = symTable.detailType;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
List<String> extractedKeys = new ArrayList<>();
for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) {
extractedKeys.add(detail.name.value);
BLangVariableReference ref = (BLangVariableReference) detail.expr;
BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos,
createStringLiteral(detail.name.pos, detail.name.value),
detailTempVarDef.var.symbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt);
detailAssignment.varRef = ref;
detailAssignment.expr = detailEntryVar;
}
if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) {
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
detailTempVarDef.var.symbol);
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos,
extractedKeys,
parentErrorVarRef.restVar.type, parentBlockStmt);
BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos,
parentBlockStmt);
restAssignment.varRef = parentErrorVarRef.restVar;
restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
filteredDetail.symbol);
}
BErrorType errorType = (BErrorType) parentErrorVarRef.type;
if (errorType.detailType.getKind() == TypeKind.RECORD) {
BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol;
tsymbol.initializerFunc = createRecordInitFunc();
tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol);
}
}
private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) {
if (parentErrorVarRef.restVar == null) {
return true;
}
if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value));
}
return false;
}
@Override
public void visit(BLangAbort abortNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(abortNode.pos, symTable.intType, -1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangRetry retryNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(retryNode.pos, symTable.intType, 1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangContinue nextNode) {
result = nextNode;
}
@Override
public void visit(BLangBreak breakNode) {
result = breakNode;
}
@Override
public void visit(BLangReturn returnNode) {
if (returnNode.expr != null) {
returnNode.expr = rewriteExpr(returnNode.expr);
}
result = returnNode;
}
@Override
public void visit(BLangPanic panicNode) {
panicNode.expr = rewriteExpr(panicNode.expr);
result = panicNode;
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env);
result = xmlnsStmtNode;
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
BLangXMLNS generatedXMLNSNode;
xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);
BSymbol ownerSymbol = xmlnsNode.symbol.owner;
if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
generatedXMLNSNode = new BLangLocalXMLNS();
} else {
generatedXMLNSNode = new BLangPackageXMLNS();
}
generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI;
generatedXMLNSNode.prefix = xmlnsNode.prefix;
generatedXMLNSNode.symbol = xmlnsNode.symbol;
result = generatedXMLNSNode;
}
public void visit(BLangCompoundAssignment compoundAssignment) {
BLangVariableReference varRef = compoundAssignment.varRef;
if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol);
varRef.lhsVar = true;
}
result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef),
rewriteExpr(compoundAssignment.modifiedExpr));
return;
}
List<BLangStatement> statements = new ArrayList<>();
List<BLangSimpleVarRef> varRefs = new ArrayList<>();
List<BType> types = new ArrayList<>();
do {
indexExprNumber += 1;
BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + indexExprNumber + "$",
((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr,
compoundAssignment.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos,
tempIndexVarDef.var.symbol);
statements.add(0, tempIndexVarDef);
varRefs.add(0, tempVarRef);
types.add(0, varRef.type);
varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr;
} while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR);
BLangVariableReference var = varRef;
for (int ref = 0; ref < varRefs.size(); ref++) {
var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref));
var.type = types.get(ref);
}
var.type = compoundAssignment.varRef.type;
BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var,
compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null);
rhsExpression.type = compoundAssignment.modifiedExpr.type;
BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var,
rhsExpression);
statements.add(assignStmt);
BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements);
result = rewrite(bLangBlockStmt, env);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
exprStmtNode.expr = rewriteExpr(exprStmtNode.expr);
result = exprStmtNode;
}
@Override
public void visit(BLangIf ifNode) {
ifNode.expr = rewriteExpr(ifNode.expr);
ifNode.body = rewrite(ifNode.body, env);
ifNode.elseStmt = rewrite(ifNode.elseStmt, env);
result = ifNode;
}
@Override
public void visit(BLangMatch matchStmt) {
BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
matchBlockStmt.pos = matchStmt.pos;
String matchExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos,
matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0,
names.fromString(matchExprVarName),
this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner));
BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar);
matchBlockStmt.stmts.add(matchExprVarDef);
matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar));
rewrite(matchBlockStmt, this.env);
result = matchBlockStmt;
}
@Override
public void visit(BLangForeach foreach) {
BLangBlockStmt blockNode;
BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID,
foreach.collection.type, this.env.scope.owner);
BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$",
foreach.collection.type, foreach.collection, dataSymbol);
BLangSimpleVariableDef dataVariableDefinition = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable);
BVarSymbol collectionSymbol = dataVariable.symbol;
switch (foreach.collection.type.tag) {
case TypeTags.STRING:
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.XML:
case TypeTags.MAP:
case TypeTags.TABLE:
case TypeTags.RECORD:
BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach, collectionSymbol);
blockNode = desugarForeachToWhile(foreach, iteratorVarDef);
blockNode.stmts.add(0, dataVariableDefinition);
break;
case TypeTags.OBJECT:
blockNode = desugarForeachToWhile(foreach, dataVariableDefinition);
break;
default:
blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.stmts.add(0, dataVariableDefinition);
break;
}
rewrite(blockNode, this.env);
result = blockNode;
}
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner);
BLangSimpleVariableDef resultVariableDefinition =
getIteratorNextVariableDefinition(foreach, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, resultReferenceInWhile, getUserDefineTypeNode(foreach.resultType));
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach, resultSymbol);
valueAccessExpr.expr =
addConversionExprIfRequired(valueAccessExpr.expr, types.getSafeType(valueAccessExpr.expr.type,
true, false));
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangAssignment resultAssignment =
getIteratorNextAssignment(foreach, iteratorSymbol, resultSymbol);
whileNode.body.stmts.add(1, resultAssignment);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(resultVariableDefinition);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.type = type;
return recordType;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
Optional<BAttachedFunction> nextFunc = iteratorSymbol.attachedFuncs.stream()
.filter(bAttachedFunction -> bAttachedFunction.funcName.value.equals(BLangBuiltInMethod.NEXT.getName()))
.findFirst();
return nextFunc.orElse(null);
}
@Override
public void visit(BLangWhile whileNode) {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
@Override
public void visit(BLangLock lockNode) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = ASTBuilderUtil
.createStatementExpression(lockNode.body, nilLiteral);
statementExpression.type = symTable.nilType;
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.type = nillableError;
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError, this.env.scope.owner);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr, nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
if (!lockStmt.lockVariables.isEmpty()) {
lockStmt.fieldVariables.entrySet().removeIf(entry -> lockStmt.lockVariables.contains(entry.getKey()));
}
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
@Override
public void visit(BLangTransaction transactionNode) {
DiagnosticPos pos = transactionNode.pos;
BType trxReturnType = symTable.intType;
BType otherReturnType = symTable.nilType;
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangType otherReturnNode = ASTBuilderUtil.createTypeNode(otherReturnType);
DiagnosticPos invPos = transactionNode.pos;
/* transaction block code will be desugar to function which returns int. Return value determines the status of
the transaction code.
ex.
0 = successful
1 = retry
-1 = abort
Since transaction block code doesn't return anything, we need to add return statement at end of the
block unless we have abort or retry statement.
*/
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src,
invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol);
BLangStatement statement = null;
if (!transactionNode.transactionBody.stmts.isEmpty()) {
statement = transactionNode.transactionBody.stmts.get(transactionNode.transactionBody.stmts.size() - 1);
}
if (statement == null || !(statement.getKind() == NodeKind.ABORT) && !(statement.getKind() == NodeKind.ABORT)) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(returnStmtPos, trxReturnType, 0L);
transactionNode.transactionBody.addStatement(returnStmt);
}
if (transactionNode.abortedBody == null) {
transactionNode.abortedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.committedBody == null) {
transactionNode.committedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.onRetryBody == null) {
transactionNode.onRetryBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.retryCount == null) {
transactionNode.retryCount = ASTBuilderUtil.createLiteral(pos, symTable.intType, 3L);
}
BLangLambdaFunction trxMainFunc = createLambdaFunction(pos, "$anonTrxMainFunc$",
Collections.emptyList(),
trxReturnNode, transactionNode.transactionBody);
BLangLambdaFunction trxOnRetryFunc = createLambdaFunction(pos, "$anonTrxOnRetryFunc$",
Collections.emptyList(),
otherReturnNode, transactionNode.onRetryBody);
BLangLambdaFunction trxCommittedFunc = createLambdaFunction(pos, "$anonTrxCommittedFunc$",
Collections.emptyList(),
otherReturnNode, transactionNode.committedBody);
BLangLambdaFunction trxAbortedFunc = createLambdaFunction(pos, "$anonTrxAbortedFunc$",
Collections.emptyList(),
otherReturnNode, transactionNode.abortedBody);
trxMainFunc.cachedEnv = env.createClone();
trxOnRetryFunc.cachedEnv = env.createClone();
trxCommittedFunc.cachedEnv = env.createClone();
trxAbortedFunc.cachedEnv = env.createClone();
BSymbol trxModSym = env.enclPkg.imports
.stream()
.filter(importPackage ->
importPackage.symbol.pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names
.ORG_NAME_SEPARATOR.value + Names.TRANSACTION_PACKAGE.value))
.findAny().get().symbol;
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbol(symTable.pkgEnvMap.get(trxModSym),
TRX_INITIATOR_BEGIN_FUNCTION,
SymTag.FUNCTION);
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(pos, symTable.stringType,
getTransactionBlockId());
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, transactionNode.retryCount, trxMainFunc,
trxOnRetryFunc,
trxCommittedFunc, trxAbortedFunc);
BLangInvocation trxInvocation = ASTBuilderUtil.createInvocationExprMethod(pos, invokableSymbol,
requiredArgs,
Collections.emptyList(),
symResolver);
BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pos, ASTBuilderUtil.createBlockStmt(pos));
stmt.expr = trxInvocation;
result = rewrite(stmt, env);
}
private String getTransactionBlockId() {
return String.valueOf(env.enclPkg.packageID.orgName) + "$" + env.enclPkg.packageID.name + "$"
+ transactionIndex++;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType,
BLangBlockStmt lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null);
return lambdaFunction;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
TypeNode returnType) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
func.desugared = false;
lambdaFunction.pos = pos;
return lambdaFunction;
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.type = literalExpr.type;
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replaceAll(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
if (listConstructor.type.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type));
result = rewriteExpr(expr);
} else if (getElementType(listConstructor.type).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
if (arrayLiteral.type.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type));
return;
} else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type);
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
return;
}
tupleLiteral.exprs.forEach(expr -> {
BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type;
types.setImplicitCastExpr(expr, expType, symTable.anyType);
});
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
if (groupExpr.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = groupExpr.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
result = rewriteExpr(groupExpr.expression);
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
recordLiteral.keyValuePairs.sort((v1, v2) -> Boolean.compare(v1.key.computedKey, v2.key.computedKey));
recordLiteral.keyValuePairs.forEach(keyValue -> {
BLangExpression keyExpr = keyValue.key.expr;
if (!keyValue.key.computedKey && keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
keyValue.key.expr = createStringLiteral(varRef.pos, varRef.variableName.value);
} else {
keyValue.key.expr = rewriteExpr(keyValue.key.expr);
}
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
});
BLangExpression expr;
if (recordLiteral.type.tag == TypeTags.RECORD) {
expr = new BLangStructLiteral(recordLiteral.pos, recordLiteral.keyValuePairs, recordLiteral.type);
} else if (recordLiteral.type.tag == TypeTags.MAP) {
expr = new BLangMapLiteral(recordLiteral.pos, recordLiteral.keyValuePairs, recordLiteral.type);
} else {
expr = new BLangJSONLiteral(recordLiteral.pos, recordLiteral.keyValuePairs, recordLiteral.type);
}
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTableLiteral tableLiteral) {
tableLiteral.tableDataRows = rewriteExprs(tableLiteral.tableDataRows);
List<String> keyColumns = new ArrayList<>();
for (BLangTableLiteral.BLangTableColumn column : tableLiteral.columns) {
if (column.flagSet.contains(TableColumnFlag.PRIMARYKEY)) {
keyColumns.add(column.columnName);
}
}
BLangArrayLiteral keyColumnsArrayLiteral = createArrayLiteralExprNode();
keyColumnsArrayLiteral.exprs = keyColumns.stream()
.map(expr -> ASTBuilderUtil.createLiteral(tableLiteral.pos, symTable.stringType, expr))
.collect(Collectors.toList());
keyColumnsArrayLiteral.type = new BArrayType(symTable.stringType);
tableLiteral.keyColumnsArrayLiteral = keyColumnsArrayLiteral;
List<String> indexColumns = new ArrayList<>();
for (BLangTableLiteral.BLangTableColumn column : tableLiteral.columns) {
if (column.flagSet.contains(TableColumnFlag.INDEX)) {
indexColumns.add(column.columnName);
}
}
BLangArrayLiteral indexColumnsArrayLiteral = createArrayLiteralExprNode();
indexColumnsArrayLiteral.exprs = indexColumns.stream()
.map(expr -> ASTBuilderUtil.createLiteral(tableLiteral.pos, symTable.stringType, expr))
.collect(Collectors.toList());
indexColumnsArrayLiteral.type = new BArrayType(symTable.stringType);
tableLiteral.indexColumnsArrayLiteral = indexColumnsArrayLiteral;
result = tableLiteral;
}
private void addReferenceVariablesToArgs(List<BLangExpression> args, List<BLangExpression> varRefs) {
BLangArrayLiteral localRefs = createArrayLiteralExprNode();
varRefs.forEach(varRef -> localRefs.exprs.add(rewrite(varRef, env)));
args.add(localRefs);
}
private void addFunctionPointersToArgs(List<BLangExpression> args, List<StreamingQueryStatementNode>
streamingStmts) {
BLangArrayLiteral funcPointers = createArrayLiteralExprNode();
for (StreamingQueryStatementNode stmt : streamingStmts) {
funcPointers.exprs.add(rewrite((BLangExpression) stmt.getStreamingAction().getInvokableBody(), env));
}
args.add(funcPointers);
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.type = symTable.stringType;
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
enclLocks.peek().addLockVariable((BVarSymbol) varRefExpr.symbol);
}
}
genVarRefExpr.type = varRefExpr.type;
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.lhsVar)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
genVarRefExpr.type = varRefExpr.symbol.type;
result = genVarRefExpr;
return;
}
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
BType targetType = genVarRefExpr.type;
genVarRefExpr.type = genVarRefExpr.symbol.type;
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = fieldAccessExpr.expr.type;
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value);
int varRefTypeTag = varRefType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
addToLocks((BLangStructFieldAccessExpr) targetVarRef);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (types.isLax(varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else if (varRefTypeTag == TypeTags.XML) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.lhsVar = fieldAccessExpr.lhsVar;
targetVarRef.type = fieldAccessExpr.type;
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
private void addToLocks(BLangStructFieldAccessExpr targetVarRef) {
if (enclLocks.isEmpty()) {
return;
}
if (targetVarRef.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF
|| ((BLangSimpleVarRef) targetVarRef.expr).symbol.owner.getKind() == SymbolKind.PACKAGE
|| !Names.SELF.equals(((BLangLocalVarRef) targetVarRef.expr).symbol.name)) {
return;
}
if (targetVarRef.indexExpr.getKind() == NodeKind.LITERAL) {
String field = (String) ((BLangLiteral) targetVarRef.indexExpr).value;
enclLocks.peek().addFieldVariable((BVarSymbol) ((BLangLocalVarRef) targetVarRef.expr).varSymbol, field);
}
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangVariableReference targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType varRefType = indexAccessExpr.expr.type;
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.XML) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.lhsVar = indexAccessExpr.lhsVar;
targetVarRef.type = indexAccessExpr.type;
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
BLangInvocation genIExpr = iExpr;
if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) {
result = rewriteErrorConstructor(iExpr);
}
reorderArguments(iExpr);
iExpr.requiredArgs = rewriteExprs(iExpr.requiredArgs);
iExpr.restArgs = rewriteExprs(iExpr.restArgs);
if (iExpr.functionPointerInvocation) {
visitFunctionPointerInvocation(iExpr);
return;
}
iExpr.expr = rewriteExpr(iExpr.expr);
if (iExpr.builtinMethodInvocation) {
visitBuiltInMethodInvocation(iExpr);
return;
}
result = genIExpr;
if (iExpr.expr == null) {
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
if (iExpr.exprSymbol == null) {
return;
}
iExpr.expr = ASTBuilderUtil.createVariableRef(iExpr.pos, iExpr.exprSymbol);
iExpr.expr = rewriteExpr(iExpr.expr);
}
switch (iExpr.expr.type.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!iExpr.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(iExpr.requiredArgs);
argExprs.add(0, iExpr.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(iExpr.pos, argExprs, iExpr.restArgs, iExpr.symbol,
iExpr.type, iExpr.expr, iExpr.async);
attachedFunctionInvocation.actionInvocation = iExpr.actionInvocation;
attachedFunctionInvocation.name = iExpr.name;
result = genIExpr = attachedFunctionInvocation;
}
break;
}
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
}
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) {
BType originalInvType = genIExpr.type;
genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType;
BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType);
if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
this.result = expr;
return;
}
BOperatorSymbol conversionSymbol = Symbols
.createCastOperatorSymbol(genIExpr.type, originalInvType, symTable.errorType, false, true,
InstructionCodes.NOP, null, null);
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.expr = genIExpr;
conversionExpr.targetType = originalInvType;
conversionExpr.conversionSymbol = conversionSymbol;
conversionExpr.type = originalInvType;
conversionExpr.pos = genIExpr.pos;
this.result = conversionExpr;
}
}
private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) {
BLangExpression reasonExpr = iExpr.requiredArgs.get(0);
if (reasonExpr.impConversionExpr != null &&
reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) {
reasonExpr.impConversionExpr = null;
}
reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType);
reasonExpr = rewriteExpr(reasonExpr);
iExpr.requiredArgs.remove(0);
iExpr.requiredArgs.add(reasonExpr);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos,
((BErrorType) iExpr.symbol.type).detailType);
List<BLangExpression> namedArgs = iExpr.requiredArgs.stream()
.filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR)
.collect(Collectors.toList());
if (namedArgs.isEmpty()) {
errorDetail = visitUtilMethodInvocation(iExpr.pos,
BLangBuiltInMethod.FREEZE, Lists.of(rewriteExpr(recordLiteral)));
} else {
for (BLangExpression arg : namedArgs) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
BLangRecordLiteral.BLangRecordKeyValue member = new BLangRecordLiteral.BLangRecordKeyValue();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (recordLiteral.type.tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type);
}
recordLiteral.keyValuePairs.add(member);
iExpr.requiredArgs.remove(arg);
}
recordLiteral = rewriteExpr(recordLiteral);
BLangExpression cloned = visitCloneInvocation(recordLiteral, ((BErrorType) iExpr.symbol.type).detailType);
errorDetail = visitUtilMethodInvocation(iExpr.pos, BLangBuiltInMethod.FREEZE, Lists.of(cloned));
}
iExpr.requiredArgs.add(errorDetail);
return iExpr;
}
public void visit(BLangTypeInit typeInitExpr) {
switch (typeInitExpr.type.tag) {
case TypeTags.STREAM:
result = new BLangStreamLiteral(typeInitExpr.pos, typeInitExpr.type);
break;
default:
if (typeInitExpr.type.tag == TypeTags.OBJECT && typeInitExpr.initInvocation.symbol == null) {
typeInitExpr.initInvocation.symbol =
((BObjectTypeSymbol) typeInitExpr.type.tsymbol).initializerFunc.symbol;
}
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.type);
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol;
if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitExpr.initInvocation;
typeInitExpr.initInvocation.name.value = Names.USER_DEFINED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, objVarRef);
stmtExpr.type = objVarRef.symbol.type;
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type,
typeInitExpr.initInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = resultVarRef.symbol.type;
return stmtExpr;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) {
BSymbol objSym = symResolver.lookupSymbol(env, names.fromString(name), SymTag.VARIABLE);
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr,
(BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos);
objVarDef.var = objVar;
objVarDef.type = objVar.type;
return objVarDef;
}
private BType getObjectType(BType type) {
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init conext");
}
private BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = symTable.errorType;
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = ternaryExpr.type;
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type);
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (trapExpr.expr.type.tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type);
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.rhsExpr.type,
symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.lhsExpr.type,
symTable.intType);
return;
}
}
if (lhsExprTypeTag == rhsExprTypeTag) {
return;
}
if (lhsExprTypeTag == TypeTags.STRING && binaryExpr.opKind == OperatorKind.ADD) {
if (rhsExprTypeTag == TypeTags.XML) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.rhsExpr.type,
binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.STRING && binaryExpr.opKind == OperatorKind.ADD) {
if (lhsExprTypeTag == TypeTags.XML) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.rhsExpr.type,
binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.rhsExpr.type,
binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type);
}
}
private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.type = symTable.intRangeType;
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.parent == null || binaryExpr.parent.type == null) {
return;
}
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.type.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr);
matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos,
rewriteExpr(elvisExpr.rhsExpr)));
matchExpr.type = elvisExpr.type;
matchExpr.pos = elvisExpr.pos;
result = rewriteExpr(matchExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final DiagnosticPos pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == unaryExpr.type.tag) {
binaryExpr.type = symTable.byteType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.type = symTable.intType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.type = bLangArrowFunction.expression.type;
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.type = bLangArrowFunction.funcType;
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.retType = funcNode.returnTypeNode.type;
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.type = new BInvokableType(paramTypes, funcNode.returnTypeNode.type, null);
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
bLangArrowFunction.function = lambdaFunction.function;
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
attributesItr.remove();
}
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type);
if (workerSendNode.keyExpr != null) {
workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr);
}
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type);
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
if (workerReceiveNode.keyExpr != null) {
workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr);
}
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr);
xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr);
if (xmlAttributeAccessExpr.indexExpr != null
&& xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) {
((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true;
}
xmlAttributeAccessExpr.desugared = true;
if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) {
result = xmlAttributeAccessExpr;
} else {
result = rewriteExpr(xmlAttributeAccessExpr);
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangJSONLiteral jsonLiteral) {
result = jsonLiteral;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
public void visit(BLangStreamLiteral streamLiteral) {
result = streamLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr accessExpr) {
result = accessExpr;
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
if (!intRangeExpression.includeStart) {
intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr);
}
if (!intRangeExpression.includeEnd) {
intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr);
}
intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr);
intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr);
result = intRangeExpression;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
public void visit(BLangTableQueryExpression tableQueryExpression) {
inMemoryTableQueryBuilder.visit(tableQueryExpression);
/*replace the table expression with a function invocation,
so that we manually call a native function "queryTable". */
result = createInvocationFromTableExpr(tableQueryExpression);
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
addMatchExprDefaultCase(bLangMatchExpression);
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
matchTempResultVarName, bLangMatchExpression.type, null,
new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID,
bLangMatchExpression.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef =
ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar);
tempResultVarDef.desugared = true;
BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef));
List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>();
for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
pattern.expr = rewriteExpr(pattern.expr);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt));
patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody));
}
stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr,
patternClauses));
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
BLangStatementExpression statementExpr = ASTBuilderUtil.createStatementExpression(stmts, tempResultVarRef);
statementExpr.type = bLangMatchExpression.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
String checkedExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos,
checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0,
names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar);
checkedExprVarDef.desugared = true;
BLangMatchTypedBindingPatternClause patternSuccessCase =
getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true,
checkedExprVar.symbol, null);
BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos,
this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr,
new ArrayList<BLangMatchTypedBindingPatternClause>() {{
add(patternSuccessCase);
add(patternErrorCase);
}});
BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos,
new ArrayList<BLangStatement>() {{
add(checkedExprVarDef);
add(matchStmt);
}});
BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef(
checkedExpr.pos, checkedExprVar.symbol);
BLangStatementExpression statementExpr = ASTBuilderUtil.createStatementExpression(
generatedStmtBlock, tempCheckedExprVarRef);
statementExpr.type = checkedExpr.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.type)) {
addConversionExprIfRequired(expr, symTable.anyType);
}
typeTestExpr.expr = rewriteExpr(expr);
result = typeTestExpr;
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.type = annotAccessExpr.type;
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type),
annotAccessExpr.type, null), null,
InstructionCodes.ANNOT_ACCESS);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = constantRef;
}
private BLangSimpleVariableDef getIteratorVariableDefinition(BLangForeach foreach, BVarSymbol collectionSymbol) {
BLangIdentifier iterateIdentifier =
ASTBuilderUtil.createIdentifier(foreach.pos, BLangBuiltInMethod.ITERATE.getName());
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(foreach.pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = foreach.pos;
iteratorInvocation.name = iterateIdentifier;
iteratorInvocation.expr = dataReference;
BInvokableSymbol langLibMethodSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type,
names.fromIdNode(iterateIdentifier));
iteratorInvocation.symbol = langLibMethodSymbol;
iteratorInvocation.type = langLibMethodSymbol.retType;
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = true;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
langLibMethodSymbol.retType, this.env.scope.owner);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(foreach.pos, "$iterator$",
langLibMethodSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(foreach.pos, iteratorVariable);
}
private BLangSimpleVariableDef getIteratorNextVariableDefinition(BLangForeach foreach,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(foreach, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(foreach.pos, "$result$",
foreach.nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(foreach.pos, resultVariable);
}
private BLangAssignment getIteratorNextAssignment(BLangForeach foreach,
BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangInvocation nextInvocation = createIteratorNextInvocation(foreach, iteratorSymbol);
nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false);
return ASTBuilderUtil.createAssignmentStmt(foreach.pos, resultReferenceInAssignment, nextInvocation, false);
}
private BLangInvocation createIteratorNextInvocation(BLangForeach foreach, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier =
ASTBuilderUtil.createIdentifier(foreach.pos, BLangBuiltInMethod.NEXT.getName());
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(foreach.pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = foreach.pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(foreach.pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.type = nextFuncSymbol.retType;
return nextInvocation;
}
private BLangIf getIfStatement(BLangForeach foreach, BVarSymbol resultSymbol,
BLangTypeTestExpr typeTestExpressionNode, BLangBlockStmt ifStatementBody) {
BLangIf ifStatement = (BLangIf) TreeBuilder.createIfElseStatementNode();
ifStatement.pos = foreach.pos;
ifStatement.type = symTable.booleanType;
ifStatement.expr = typeTestExpressionNode;
ifStatement.body = ifStatementBody;
return ifStatement;
}
private BLangTypeTestExpr getTypeTestExpression(BLangForeach foreach, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInTypeTest = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = foreach.pos;
nilTypeNode.type = symTable.nilType;
nilTypeNode.typeKind = TypeKind.NIL;
BLangTypeTestExpr typeTestExpressionNode = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpressionNode.pos = foreach.pos;
typeTestExpressionNode.expr = resultReferenceInTypeTest;
typeTestExpressionNode.typeNode = nilTypeNode;
typeTestExpressionNode.type = symTable.booleanType;
return typeTestExpressionNode;
}
private BLangFieldBasedAccess getValueAccessExpression(BLangForeach foreach, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(foreach.pos, "value");
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = foreach.pos;
fieldBasedAccessExpression.type = foreach.varType;
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type;
return fieldBasedAccessExpression;
}
private BlockNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockNode blockNode = TreeBuilder.createBlockNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.expression.pos;
returnNode.setExpression(bLangArrowFunction.expression);
blockNode.addStatement(returnNode);
return blockNode;
}
private BLangInvocation createInvocationFromTableExpr(BLangTableQueryExpression tableQueryExpression) {
List<BLangExpression> args = new ArrayList<>();
String functionName = QUERY_TABLE_WITHOUT_JOIN_CLAUSE;
args.add(getSQLPreparedStatement(tableQueryExpression));
args.add(getFromTableVarRef(tableQueryExpression));
BType retType = tableQueryExpression.type;
BLangExpression joinTable = getJoinTableVarRef(tableQueryExpression);
if (joinTable != null) {
args.add(joinTable);
functionName = QUERY_TABLE_WITH_JOIN_CLAUSE;
}
args.add(getSQLStatementParameters(tableQueryExpression));
args.add(getReturnType(tableQueryExpression));
BInvokableSymbol symbol =
(BInvokableSymbol) symTable.langTableModuleSymbol.scope.lookup(names.fromString(functionName)).symbol;
BLangInvocation invocation =
ASTBuilderUtil.createInvocationExprForMethod(tableQueryExpression.pos, symbol, args, symResolver);
invocation.argExprs = args;
invocation.type = retType;
return invocation;
}
private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.type = retType;
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangLiteral getSQLPreparedStatement(BLangTableQueryExpression
tableQueryExpression) {
BLangLiteral sqlQueryLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
sqlQueryLiteral.value = tableQueryExpression.getSqlQuery();
sqlQueryLiteral.type = symTable.stringType;
return sqlQueryLiteral;
}
private BLangStructLiteral getReturnType(BLangTableQueryExpression
tableQueryExpression) {
BTableType tableType = (BTableType) tableQueryExpression.type;
BStructureType structType = (BStructureType) tableType.constraint;
return new BLangStructLiteral(tableQueryExpression.pos, new ArrayList<>(), structType);
}
private BLangArrayLiteral getSQLStatementParameters(BLangTableQueryExpression tableQueryExpression) {
BLangArrayLiteral expr = createArrayLiteralExprNode();
List<BLangExpression> params = tableQueryExpression.getParams();
params.stream().map(param -> (BLangLiteral) param).forEach(literal -> {
Object value = literal.getValue();
int type = TypeTags.STRING;
if (value instanceof Integer || value instanceof Long) {
type = TypeTags.INT;
} else if (value instanceof Double || value instanceof Float) {
type = TypeTags.FLOAT;
} else if (value instanceof Boolean) {
type = TypeTags.BOOLEAN;
} else if (value instanceof Object[]) {
type = TypeTags.ARRAY;
}
literal.type = symTable.getTypeFromTag(type);
types.setImplicitCastExpr(literal, new BType(type, null), symTable.anyType);
expr.exprs.add(literal.impConversionExpr);
});
return expr;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.type = new BArrayType(symTable.anyType);
return expr;
}
private BLangExpression getJoinTableVarRef(BLangTableQueryExpression tableQueryExpression) {
JoinStreamingInput joinStreamingInput = tableQueryExpression.getTableQuery().getJoinStreamingInput();
BLangExpression joinTable = null;
if (joinStreamingInput != null) {
joinTable = (BLangExpression) joinStreamingInput.getStreamingInput().getStreamReference();
joinTable = rewrite(joinTable, env);
}
return joinTable;
}
private BLangExpression getFromTableVarRef(BLangTableQueryExpression tableQueryExpression) {
BLangExpression fromTable = (BLangExpression) tableQueryExpression.getTableQuery().getStreamingInput()
.getStreamReference();
return rewrite(fromTable, env);
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangVariableReference expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.type = iExpr.symbol.type;
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private void visitBuiltInMethodInvocation(BLangInvocation iExpr) {
switch (iExpr.builtInMethod) {
case IS_NAN:
if (iExpr.expr.type.tag == TypeTags.FLOAT) {
BOperatorSymbol notEqSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.NOT_EQUAL, symTable.floatType, symTable.floatType);
BLangBinaryExpr binaryExprNaN = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, iExpr.expr,
symTable.booleanType,
OperatorKind.NOT_EQUAL,
notEqSymbol);
result = rewriteExpr(binaryExprNaN);
} else {
BOperatorSymbol greaterEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.GREATER_EQUAL, symTable.decimalType, symTable.decimalType);
BOperatorSymbol lessThanSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.LESS_THAN, symTable.decimalType, symTable.decimalType);
BOperatorSymbol orSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.OR, symTable.booleanType, symTable.booleanType);
BOperatorSymbol notSymbol = (BOperatorSymbol) symResolver.resolveUnaryOperator(
iExpr.pos, OperatorKind.NOT, symTable.booleanType);
BLangLiteral literalZero = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.decimalType, "0");
BLangBinaryExpr binaryExprLHS = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, literalZero,
symTable.booleanType,
OperatorKind.GREATER_EQUAL,
greaterEqualSymbol);
BLangBinaryExpr binaryExprRHS = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, literalZero,
symTable.booleanType,
OperatorKind.LESS_THAN,
lessThanSymbol);
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, binaryExprLHS,
binaryExprRHS,
symTable.booleanType,
OperatorKind.OR, orSymbol);
BLangUnaryExpr finalExprNaN = ASTBuilderUtil.createUnaryExpr(iExpr.pos, binaryExpr,
symTable.booleanType,
OperatorKind.NOT, notSymbol);
result = rewriteExpr(finalExprNaN);
}
break;
case IS_FINITE:
if (iExpr.expr.type.tag == TypeTags.FLOAT) {
BOperatorSymbol equalSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.EQUAL, symTable.floatType, symTable.floatType);
BOperatorSymbol notEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.NOT_EQUAL, symTable.floatType, symTable.floatType);
BOperatorSymbol andEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.AND, symTable.booleanType, symTable.booleanType);
BLangBinaryExpr binaryExprLHS = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, iExpr.expr,
symTable.booleanType,
OperatorKind.EQUAL, equalSymbol);
BLangLiteral posInfLiteral = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.floatType,
Double.POSITIVE_INFINITY);
BLangBinaryExpr nestedLHSExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, posInfLiteral,
iExpr.expr, symTable.booleanType,
OperatorKind.NOT_EQUAL,
notEqualSymbol);
BLangLiteral negInfLiteral = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.floatType,
Double.NEGATIVE_INFINITY);
BLangBinaryExpr nestedRHSExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, negInfLiteral,
iExpr.expr, symTable.booleanType,
OperatorKind.NOT_EQUAL,
notEqualSymbol);
BLangBinaryExpr binaryExprRHS = ASTBuilderUtil.createBinaryExpr(iExpr.pos, nestedLHSExpr,
nestedRHSExpr,
symTable.booleanType,
OperatorKind.AND, andEqualSymbol);
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, binaryExprLHS,
binaryExprRHS, symTable.booleanType,
OperatorKind.AND, andEqualSymbol);
result = rewriteExpr(binaryExpr);
} else {
BOperatorSymbol isEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.EQUAL, symTable.decimalType, symTable.decimalType);
BLangBinaryExpr finalExprFinite = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, iExpr.expr,
symTable.booleanType,
OperatorKind.EQUAL, isEqualSymbol);
result = rewriteExpr(finalExprFinite);
}
break;
case IS_INFINITE:
if (iExpr.expr.type.tag == TypeTags.FLOAT) {
BOperatorSymbol eqSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.EQUAL, symTable.floatType, symTable.floatType);
BOperatorSymbol orSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.OR, symTable.booleanType, symTable.booleanType);
BLangLiteral posInflitExpr = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.floatType,
Double.POSITIVE_INFINITY);
BLangBinaryExpr binaryExprPosInf = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr,
posInflitExpr, symTable.booleanType,
OperatorKind.EQUAL, eqSymbol);
BLangLiteral negInflitExpr = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.floatType,
Double.NEGATIVE_INFINITY);
BLangBinaryExpr binaryExprNegInf = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr,
negInflitExpr, symTable.booleanType,
OperatorKind.EQUAL, eqSymbol);
BLangBinaryExpr binaryExprInf = ASTBuilderUtil.createBinaryExpr(iExpr.pos, binaryExprPosInf,
binaryExprNegInf,
symTable.booleanType,
OperatorKind.OR, orSymbol);
result = rewriteExpr(binaryExprInf);
} else {
BLangLiteral literalZero = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.decimalType, "0");
BLangLiteral literalOne = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.decimalType, "1");
BOperatorSymbol isEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.EQUAL, symTable.decimalType, symTable.decimalType);
BOperatorSymbol divideSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.DIV, symTable.decimalType, symTable.decimalType);
BLangBinaryExpr divideExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, literalOne, iExpr.expr,
symTable.decimalType,
OperatorKind.DIV, divideSymbol);
BLangBinaryExpr finalExprInf = ASTBuilderUtil.createBinaryExpr(iExpr.pos, divideExpr, literalZero,
symTable.booleanType,
OperatorKind.EQUAL, isEqualSymbol);
result = rewriteExpr(finalExprInf);
}
break;
case CLONE:
result = visitCloneInvocation(iExpr.expr, iExpr.type);
break;
case LENGTH:
result = visitLengthInvocation(iExpr);
break;
case FREEZE:
case IS_FROZEN:
visitFreezeBuiltInMethodInvocation(iExpr);
break;
case STAMP:
result = visitTypeConversionInvocation(iExpr.expr.pos, iExpr.builtInMethod, iExpr.expr,
iExpr.requiredArgs.get(0), iExpr.type);
break;
case CONVERT:
result = visitConvertInvocation(iExpr);
break;
case DETAIL:
result = visitDetailInvocation(iExpr);
break;
case REASON:
case ITERATE:
result = visitUtilMethodInvocation(iExpr.expr.pos, iExpr.builtInMethod, Lists.of(iExpr.expr));
break;
case CALL:
visitCallBuiltInMethodInvocation(iExpr);
break;
case NEXT:
if (isJvmTarget) {
result = visitNextBuiltInMethodInvocation(iExpr);
} else {
result = new BLangBuiltInMethodInvocation(iExpr, iExpr.builtInMethod);
}
break;
default:
throw new IllegalStateException();
}
}
BLangInvocation visitUtilMethodInvocation(DiagnosticPos pos, BLangBuiltInMethod builtInMethod,
List<BLangExpression> requiredArgs) {
BInvokableSymbol invokableSymbol
= (BInvokableSymbol) symResolver.lookupSymbol(symTable.pkgEnvMap.get(symTable.utilsPackageSymbol),
names.fromString(builtInMethod.getName()),
SymTag.FUNCTION);
for (int i = 0; i < invokableSymbol.params.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), invokableSymbol.params.get(i).type));
}
BLangInvocation invocationExprMethod = ASTBuilderUtil
.createInvocationExprMethod(pos, invokableSymbol, requiredArgs,
new ArrayList<>(), symResolver);
return rewrite(invocationExprMethod, env);
}
private BLangExpression visitNextBuiltInMethodInvocation(BLangInvocation iExpr) {
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbol(symTable.pkgEnvMap.get(symTable.utilsPackageSymbol),
names.fromString(iExpr.builtInMethod.getName()), SymTag.FUNCTION);
List<BLangExpression> requiredArgs = Lists.of(iExpr.expr);
BLangExpression invocationExprMethod = ASTBuilderUtil.createInvocationExprMethod(iExpr.pos, invokableSymbol,
requiredArgs, new ArrayList<>(), symResolver);
invocationExprMethod = addConversionExprIfRequired(invocationExprMethod, iExpr.type);
return rewriteExpr(invocationExprMethod);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
return addConversionExprIfRequired(visitUtilMethodInvocation(expr.pos, BLangBuiltInMethod.CLONE,
Lists.of(expr)), lhsType);
}
private BLangExpression visitCloneAndStampInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
BLangInvocation cloned = visitUtilMethodInvocation(expr.pos, BLangBuiltInMethod.CLONE, Lists.of(expr));
return addConversionExprIfRequired(visitStampInvocation(cloned, lhsType, expr.pos), lhsType);
}
private BLangInvocation visitStampInvocation(BLangExpression expression, BType typeToStamp, DiagnosticPos pos) {
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = typeToStamp;
typedescExpr.type = symTable.typeDesc;
return visitUtilMethodInvocation(pos, BLangBuiltInMethod.STAMP, Lists.of(typedescExpr, expression));
}
private BLangExpression visitConvertInvocation(BLangInvocation iExpr) {
BType targetType = iExpr.type;
if (iExpr.expr instanceof BLangTypedescExpr) {
targetType = ((BLangTypedescExpr) iExpr.expr).resolvedType;
}
BLangExpression inputTypeCastExpr = iExpr.requiredArgs.get(0);
if (types.isValueType(iExpr.requiredArgs.get(0).type)) {
inputTypeCastExpr = createTypeCastExpr(iExpr.requiredArgs.get(0), iExpr.requiredArgs.get(0).type,
symTable.anydataType);
}
BLangBuiltInMethod convertMethod;
if (types.isValueType(targetType)) {
convertMethod = BLangBuiltInMethod.SIMPLE_VALUE_CONVERT;
} else {
convertMethod = BLangBuiltInMethod.CONVERT;
}
BLangExpression invocationExpr =
visitTypeConversionInvocation(iExpr.expr.pos, convertMethod, iExpr.expr, inputTypeCastExpr, iExpr.type);
return invocationExpr;
}
private BLangExpression visitDetailInvocation(BLangInvocation iExpr) {
BLangInvocation utilMethod = visitUtilMethodInvocation(iExpr.expr.pos, iExpr.builtInMethod,
Lists.of(iExpr.expr));
utilMethod.type = iExpr.type;
return utilMethod;
}
private BLangExpression visitTypeConversionInvocation(DiagnosticPos pos, BLangBuiltInMethod builtInMethod,
BLangExpression typeDesc, BLangExpression valExpr,
BType lhType) {
return addConversionExprIfRequired(visitUtilMethodInvocation(pos, builtInMethod, Lists.of(typeDesc, valExpr)),
lhType);
}
private BLangExpression visitLengthInvocation(BLangInvocation iExpr) {
return visitUtilMethodInvocation(iExpr.pos, BLangBuiltInMethod.LENGTH, Lists.of(iExpr.expr));
}
private void visitFreezeBuiltInMethodInvocation(BLangInvocation iExpr) {
if (types.isValueType(iExpr.expr.type)) {
if (iExpr.builtInMethod == BLangBuiltInMethod.FREEZE) {
result = iExpr.expr;
} else {
result = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.booleanType, true);
}
return;
}
result = addConversionExprIfRequired(visitUtilMethodInvocation(iExpr.pos, iExpr.builtInMethod,
Lists.of(iExpr.expr)), iExpr.type);
}
private void visitCallBuiltInMethodInvocation(BLangInvocation iExpr) {
BLangExpression expr = iExpr.expr;
if (iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
iExpr.symbol = ((BLangVariableReference) iExpr.expr).symbol;
iExpr.expr = null;
} else if (iExpr.expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
iExpr.symbol = ((BLangVariableReference) ((BLangTypeConversionExpr) iExpr.expr).expr).symbol;
iExpr.expr = null;
} else {
iExpr.expr = ((BLangAccessExpression) iExpr.expr).expr;
}
Name funcPointerName = iExpr.symbol.name;
iExpr.name = ASTBuilderUtil.createIdentifier(iExpr.pos, funcPointerName.value);
iExpr.builtinMethodInvocation = false;
iExpr.functionPointerInvocation = true;
result = new BFunctionPointerInvocation(iExpr, expr);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatementLink link = new BLangStatementLink();
link.parent = currentLink;
currentLink = link;
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
link.statement = stmt;
stmt.statementLink = link;
currentLink = link.parent;
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType sourceType, BType targetType) {
BOperatorSymbol symbol = (BOperatorSymbol) symResolver.resolveConversionOperator(sourceType, targetType);
return createTypeCastExpr(expr, targetType, symbol);
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType,
BOperatorSymbol symbol) {
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.type = targetType;
conversionExpr.targetType = targetType;
conversionExpr.conversionSymbol = symbol;
return conversionExpr;
}
private BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol)) {
return;
}
BLangBlockStmt blockStmt = invokableNode.body;
if (invokableNode.workers.size() == 0 &&
invokableNode.symbol.type.getReturnType().isNullable()
&& (blockStmt.stmts.size() < 1 ||
blockStmt.stmts.get(blockStmt.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
DiagnosticPos invPos = invokableNode.pos;
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src,
invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol);
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
blockStmt.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol);
}
if (invokableSymbol.restParam == null) {
return;
}
if (iExpr.restArgs.size() == 1 && iExpr.restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.exprs = iExpr.restArgs;
arrayLiteral.type = invokableSymbol.restParam.type;
iExpr.restArgs = new ArrayList<>();
iExpr.restArgs.add(arrayLiteral);
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new HashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
int i = 0;
for (; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.get(param.name.value));
} else {
BLangExpression expr = new BLangIgnoreExpr();
expr.type = param.type;
args.add(expr);
}
}
iExpr.requiredArgs = args;
}
private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(
DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType;
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<BType>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos,
patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0,
names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternFailureCaseVar.symbol);
BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode();
patternBlockFailureCase.pos = pos;
if (!isCheckPanicExpr && returnOnError) {
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pos;
returnStmt.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(returnStmt);
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(panicNode);
}
return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase);
}
private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType,
boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) {
String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match";
BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos,
patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0,
names.fromString(patternSuccessCaseVarName),
this.env.scope.owner.pkgID, lhsType, this.env.scope.owner));
BLangExpression varRefExpr;
if (isVarDef) {
varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol);
} else {
varRefExpr = lhsExpr;
}
BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternSuccessCaseVar.symbol);
BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos,
varRefExpr, patternSuccessCaseVarRef, false);
BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos,
new ArrayList<BLangStatement>() {{
add(assignmentStmtSuccessCase);
}});
return ASTBuilderUtil.createMatchStatementPattern(pos,
patternSuccessCaseVar, patternBlockSuccessCase);
}
private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) {
List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses;
BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar);
BLangIf currentIfNode = parentIfNode;
for (int i = 1; i < patterns.size(); i++) {
BLangMatchBindingPatternClause patternClause = patterns.get(i);
if (i == patterns.size() - 1 && patternClause.isLastPattern) {
currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar);
} else {
currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar);
currentIfNode = (BLangIf) currentIfNode.elseStmt;
}
}
return parentIfNode;
}
/**
* Generate an if-else statement from the given match statement.
*
* @param pattern match pattern statement node
* @param matchExprVar variable node of the match expression
* @return if else statement node
*/
private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) {
BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol);
if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) {
BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar);
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null);
}
BType expectedType = matchExprVar.type;
if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern;
expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType);
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol);
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
if (structuredPattern.typeGuardExpr != null) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos);
blockStmt.addStatement(varDef);
blockStmt.addStatement(varDefStmt);
BLangStatementExpression stmtExpr = ASTBuilderUtil
.createStatementExpression(blockStmt, structuredPattern.typeGuardExpr);
stmtExpr.type = symTable.booleanType;
ifCondition = ASTBuilderUtil
.createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.AND, symTable.booleanType,
symTable.booleanType));
} else {
structuredPattern.body.stmts.add(0, varDef);
structuredPattern.body.stmts.add(1, varDefStmt);
}
}
BLangIf ifNode = ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null);
return ifNode;
}
private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body;
BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern;
if (patternClause.variable.name.value.equals(Names.IGNORE.value)) {
return patternClause.body;
}
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos,
matchExprVar.symbol);
BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type);
BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "",
patternClause.variable.type, patternVarExpr, patternClause.variable.symbol);
BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar);
patternClause.body.stmts.add(0, patternVarDef);
body = patternClause.body;
return body;
}
private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body = pattern.body;
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol);
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
structuredPattern.body.stmts.add(0, varDefStmt);
body = structuredPattern.body;
}
return body;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.type;
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
return expr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BOperatorSymbol conversionSymbol;
if (types.isValueType(lhsType)) {
conversionSymbol = Symbols.createUnboxValueTypeOpSymbol(rhsType, lhsType);
} else if (lhsType.tag == TypeTags.UNION && types.isSubTypeOfBaseType(lhsType, TypeTags.ERROR)) {
conversionSymbol = Symbols.createCastOperatorSymbol(rhsType, symTable.errorType, symTable.errorType, false,
true, InstructionCodes.NOP, null, null);
lhsType = symTable.errorType;
} else if (lhsType.tag == TypeTags.UNION || rhsType.tag == TypeTags.UNION) {
conversionSymbol = Symbols.createCastOperatorSymbol(rhsType, lhsType, symTable.errorType, false, true,
InstructionCodes.NOP, null, null);
} else if (lhsType.tag == TypeTags.MAP || rhsType.tag == TypeTags.MAP) {
conversionSymbol = Symbols.createCastOperatorSymbol(rhsType, lhsType, symTable.errorType, false, true,
InstructionCodes.NOP, null, null);
} else if (lhsType.tag == TypeTags.TABLE || rhsType.tag == TypeTags.TABLE) {
conversionSymbol = Symbols.createCastOperatorSymbol(rhsType, lhsType, symTable.errorType, false, true,
InstructionCodes.NOP, null, null);
} else {
conversionSymbol = (BOperatorSymbol) symResolver.resolveCastOperator(expr, rhsType, lhsType);
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.conversionSymbol = conversionSymbol;
conversionExpr.type = lhsType;
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
return conversionExpr;
}
private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol) {
BType patternType;
switch (patternClause.getKind()) {
case MATCH_STATIC_PATTERN_CLAUSE:
BLangMatchStaticBindingPatternClause staticPattern =
(BLangMatchStaticBindingPatternClause) patternClause;
patternType = staticPattern.literal.type;
break;
case MATCH_STRUCTURED_PATTERN_CLAUSE:
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) patternClause;
patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable);
break;
default:
BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause;
patternType = simplePattern.variable.type;
break;
}
BLangExpression binaryExpr;
BType[] memberTypes;
if (patternType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) patternType;
memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
} else {
memberTypes = new BType[1];
memberTypes[0] = patternType;
}
if (memberTypes.length == 1) {
binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
} else {
BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]);
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
for (int i = 2; i < memberTypes.length; i++) {
lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]);
rhsExpr = binaryExpr;
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
}
}
return binaryExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
List<BField> fields = new ArrayList<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName,
env.enclPkg.symbol.pkgID, fieldType, recordSymbol);
fields.add(new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint :
symTable.anydataType;
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(typeDefFields, recordVarType);
recordTypeNode.pos = bindingPatternVariable.pos;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
recordTypeNode.symbol = recordSymbol;
recordTypeNode.initFunction = createInitFunctionForStructureType(recordTypeNode, env,
Names.INIT_FUNCTION_SUFFIX);
recordSymbol.scope.define(recordSymbol.initializerFunc.symbol.name, recordSymbol.initializerFunc.symbol);
createTypeDefinition(recordVarType, recordSymbol, recordTypeNode);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorCount++),
env.enclPkg.symbol.pkgID,
null, null);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
createTypeDefinition(detailType, detailType.tsymbol, recordTypeNode);
}
BErrorType errorType = new BErrorType(errorTypeSymbol,
((BErrorType) errorVariable.type).reasonType,
detailType);
errorTypeSymbol.type = errorType;
createTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType));
return errorType;
}
return bindingPatternVariable.type;
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(
Flags.PUBLIC,
names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID,
symTable.pureType,
null);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.type,
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return createRecordTypeNode(fieldList, detailType);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorNo + "$detailType"),
env.enclPkg.symbol.pkgID, null, null);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(
Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol);
detailRecordType.fields.add(new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordTypeSymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType);
}
private BLangRecordTypeNode createRecordTypeNode(List<BLangSimpleVariable> typeDefFields,
BRecordType recordVarType) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
recordTypeNode.type = recordVarType;
recordTypeNode.fields = typeDefFields;
return recordTypeNode;
}
private BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = errorType;
return errorTypeNode;
}
private void createTypeDefinition(BType type, BTypeSymbol symbol, BLangType typeNode) {
BLangTypeDefinition typeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
env.enclPkg.addTypeDefinition(typeDefinition);
typeDefinition.typeNode = typeNode;
typeDefinition.type = type;
typeDefinition.symbol = symbol;
}
private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol, BType patternType) {
DiagnosticPos pos = patternClause.pos;
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) {
BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause;
return createBinaryExpression(pos, varRef, pattern.literal);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) {
return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType);
}
if (patternType == symTable.nilType) {
BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null);
return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType,
OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL,
symTable.anyType, symTable.nilType));
} else {
return createIsAssignableExpression(pos, varSymbol, patternType);
}
}
private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.type = symTable.anyType;
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type,
binaryExpr);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos,
BVarSymbol varSymbol,
BType patternType) {
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names);
}
private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(variable.pos, function.receiver.symbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, variable.name);
fieldAccess.symbol = variable.symbol;
fieldAccess.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes;
List<BType> unmatchedTypes = new ArrayList<>();
if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type;
exprTypes = new ArrayList<>(unionType.getMemberTypes());
} else {
exprTypes = Lists.of(bLangMatchExpression.type);
}
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
if (this.types.isAssignable(type, pattern.variable.type)) {
assignable = true;
break;
}
}
if (!assignable) {
unmatchedTypes.add(type);
}
}
if (unmatchedTypes.isEmpty()) {
return;
}
BType defaultPatternType;
if (unmatchedTypes.size() == 1) {
defaultPatternType = unmatchedTypes.get(0);
} else {
defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes));
}
String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default";
BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName),
this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner));
BLangMatchExprPatternClause defaultPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
defaultPattern.variable = patternMatchCaseVar;
defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol);
defaultPattern.pos = bLangMatchExpression.pos;
bLangMatchExpression.patternClauses.add(defaultPattern);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.lhsVar || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.type;
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName,
accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar);
BLangMatch matcEXpr = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr));
BLangStatementExpression stmtExpression = ASTBuilderUtil.createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.type = originalExprType;
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successPattern = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!accessExpr.errorSafeNavigation && !accessExpr.nilSafeNavigation) {
accessExpr.type = accessExpr.originalType;
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type);
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>());
if (accessExpr.nilSafeNavigation) {
matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar));
matchStmt.type = type;
}
if (accessExpr.errorSafeNavigation) {
matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar));
matchStmt.type = type;
matchStmt.pos = accessExpr.pos;
}
BLangMatchTypedBindingPatternClause successPattern =
getSuccessPattern(accessExpr, tempResultVar, accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
this.matchStmtStack.push(matchStmt);
if (this.successPattern != null) {
this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt));
}
this.successPattern = successPattern;
}
private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName,
symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, errorPatternVar, patternBody);
return errorPattern;
}
private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos,
BLangExpression expr) {
String nullPatternVarName = IGNORE.toString();
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType,
null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangMatchExprPatternClause nullPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
nullPattern.variable = errorPatternVar;
nullPattern.expr = expr;
nullPattern.pos = pos;
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName,
symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, nullPatternVar, patternBody);
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getSuccessPattern(BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
BType type = types.getSafeType(accessExpr.expr.type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
BVarSymbol successPatternSymbol;
if (type.tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
} else {
successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
accessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol);
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.type = accessExpr.originalType;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(accessExpr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause successPattern =
ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody);
this.safeNavigationAssignment = assignmentStmt;
return successPattern;
}
private boolean safeNavigateLHS(BLangExpression expr) {
if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
return false;
}
BLangExpression varRef = ((BLangAccessExpression) expr).expr;
if (varRef.type.isNullable()) {
return true;
}
return safeNavigateLHS(varRef);
}
private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr,
boolean safeAssignment) {
this.accessExprStack = new Stack<>();
List<BLangStatement> stmts = new ArrayList<>();
createLHSSafeNavigation(stmts, accessExpr.expr);
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos,
cloneExpression(accessExpr), rhsExpr);
stmts.add(assignment);
return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts);
}
private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) {
NodeKind kind = expr.getKind();
boolean root = false;
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
BLangAccessExpression accessExpr = (BLangAccessExpression) expr;
createLHSSafeNavigation(stmts, accessExpr.expr);
accessExpr.expr = accessExprStack.pop();
} else {
root = true;
}
if (expr.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expr;
BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value
+ "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner);
BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos,
interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol);
BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos,
intermediateVariable);
stmts.add(intermediateVariableDefinition);
expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol);
}
if (expr.type.isNullable()) {
BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode());
isNillTest.type = symTable.booleanType;
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos);
expr = cloneExpression(expr);
expr.type = types.getSafeType(expr.type, true, false);
if (isDefaultableMappingType(expr.type) && !root) {
BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
jsonLiteral.type = expr.type;
jsonLiteral.pos = expr.pos;
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos,
expr, jsonLiteral);
thenStmt.addStatement(assignment);
} else {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = ERROR_REASON_NULL_REFERENCE_ERROR;
literal.type = symTable.stringType;
BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
errorCtorInvocation.pos = expr.pos;
errorCtorInvocation.argExprs.add(literal);
errorCtorInvocation.requiredArgs.add(literal);
errorCtorInvocation.type = symTable.errorType;
errorCtorInvocation.symbol = symTable.errorConstructor;
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.expr = errorCtorInvocation;
panicNode.pos = expr.pos;
thenStmt.addStatement(panicNode);
}
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null);
stmts.add(ifelse);
}
accessExprStack.push(expr);
}
private BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.type = symTable.nilType;
return nillTypeNode;
}
private BLangVariableReference cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case INVOCATION:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangVariableReference varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression((BLangVariableReference) originalAccessExpr.expr);
}
varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false);
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
case INVOCATION:
accessExpr = null;
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.lhsVar = originalAccessExpr.lhsVar;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.type = originalAccessExpr.originalType;
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangExpression getDefaultValueExpr(BLangAccessExpression accessExpr) {
BType fieldType = accessExpr.originalType;
BType type = types.getSafeType(accessExpr.expr.type, true, false);
switch (type.tag) {
case TypeTags.JSON:
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR &&
((BLangIndexBasedAccess) accessExpr).indexExpr.type.tag == TypeTags.INT) {
return new BLangJSONArrayLiteral(new ArrayList<>(), new BArrayType(fieldType));
}
return new BLangJSONLiteral(accessExpr.pos, new ArrayList<>(), fieldType);
case TypeTags.MAP:
return new BLangMapLiteral(accessExpr.pos, new ArrayList<>(), type);
case TypeTags.RECORD:
return new BLangRecordLiteral(accessExpr.pos, type);
default:
throw new IllegalStateException();
}
}
private BLangExpression getDefaultValueLiteral(DefaultValueLiteral defaultValue, int paramTypeTag) {
if (defaultValue == null || defaultValue.getValue() == null) {
return getNullLiteral();
}
Object value = defaultValue.getValue();
int literalTypeTag = defaultValue.getLiteralTypeTag();
if (value instanceof Long) {
switch (paramTypeTag) {
case TypeTags.FLOAT:
return getFloatLiteral(((Long) value).doubleValue());
case TypeTags.DECIMAL:
return getDecimalLiteral(String.valueOf(value));
default:
return getIntLiteral((Long) value);
}
}
if (value instanceof String) {
switch (paramTypeTag) {
case TypeTags.FLOAT:
return getFloatLiteral(Double.parseDouble((String) value));
case TypeTags.DECIMAL:
return getDecimalLiteral(String.valueOf(value));
case TypeTags.FINITE:
case TypeTags.UNION:
if (literalTypeTag == TypeTags.FLOAT) {
return getFloatLiteral(Double.parseDouble((String) value));
}
return getStringLiteral((String) value);
default:
return getStringLiteral((String) value);
}
}
if (value instanceof Boolean) {
return getBooleanLiteral((Boolean) value);
}
throw new IllegalStateException("Unsupported default value type " + paramTypeTag);
}
private BLangExpression getDefaultValue(int paramTypeTag) {
switch (paramTypeTag) {
case TypeTags.STRING:
return getStringLiteral("");
case TypeTags.BOOLEAN:
return getBooleanLiteral(false);
case TypeTags.FLOAT:
return getFloatLiteral(0.0);
case TypeTags.BYTE:
case TypeTags.INT:
return getIntLiteral(0);
case TypeTags.DECIMAL:
return getDecimalLiteral("0.0");
case TypeTags.FINITE:
case TypeTags.RECORD:
case TypeTags.OBJECT:
case TypeTags.UNION:
default:
return getNullLiteral();
}
}
private BLangLiteral getStringLiteral(String value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.stringType;
return literal;
}
private BLangLiteral getIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.intType;
return literal;
}
private BLangLiteral getFloatLiteral(double value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.floatType;
return literal;
}
private BLangLiteral getDecimalLiteral(String value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.decimalType;
return literal;
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.booleanType;
return literal;
}
private BLangLiteral getNullLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.type = symTable.nilType;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
default:
return false;
}
}
private BLangFunction createInitFunctionForStructureType(BLangStructureTypeNode structureTypeNode, SymbolEnv env,
Name suffix) {
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithNilReturn(structureTypeNode.pos, Names.EMPTY.value, suffix);
initFunction.receiver = ASTBuilderUtil.createReceiver(structureTypeNode.pos, structureTypeNode.type);
BVarSymbol receiverSymbol = new BVarSymbol(Flags.asMask(EnumSet.noneOf(Flag.class)),
names.fromIdNode(initFunction.receiver.name),
env.enclPkg.symbol.pkgID, structureTypeNode.type, null);
initFunction.receiver.symbol = receiverSymbol;
initFunction.type = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
initFunction.attachedFunction = true;
initFunction.flagSet.add(Flag.ATTACHED);
Name funcSymbolName = names.fromString(Symbols.getAttachedFuncSymbolName(
structureTypeNode.type.tsymbol.name.value, Names.USER_DEFINED_INIT_SUFFIX.value));
initFunction.symbol = Symbols
.createFunctionSymbol(Flags.asMask(initFunction.flagSet), funcSymbolName, env.enclPkg.symbol.pkgID,
initFunction.type, structureTypeNode.symbol.scope.owner,
initFunction.body != null);
initFunction.symbol.scope = new Scope(initFunction.symbol);
initFunction.symbol.scope.define(receiverSymbol.name, receiverSymbol);
initFunction.symbol.receiverSymbol = receiverSymbol;
receiverSymbol.owner = initFunction.symbol;
initFunction.symbol.retType = symTable.nilType;
initFunction.symbol.taintTable = new HashMap<>();
TaintRecord taintRecord = new TaintRecord(TaintRecord.TaintedStatus.UNTAINTED, new ArrayList<>());
initFunction.symbol.taintTable.put(TaintAnalyzer.ALL_UNTAINTED_TABLE_ENTRY_INDEX, taintRecord);
BStructureTypeSymbol typeSymbol = ((BStructureTypeSymbol) structureTypeNode.type.tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(suffix, initFunction.symbol,
(BInvokableType) initFunction.type);
structureTypeNode.initFunction = initFunction;
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = binaryExpr.type;
result = rewriteExpr(stmtExpr);
}
/**
* Split packahe init function into several smaller functions.
*
* @param packageNode package node
* @param env symbol environment
* @return initial init function but trimmed in size
*/
private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) {
int methodSize = INIT_METHOD_SPLIT_SIZE;
if (packageNode.initFunction.body.stmts.size() < methodSize || !isJvmTarget) {
return packageNode.initFunction;
}
BLangFunction initFunction = packageNode.initFunction;
List<BLangFunction> generatedFunctions = new ArrayList<>();
List<BLangStatement> stmts = new ArrayList<>();
stmts.addAll(initFunction.body.stmts);
initFunction.body.stmts.clear();
BLangFunction newFunc = initFunction;
int varDefIndex = 0;
for (int i = 0; i < stmts.size(); i++) {
if (stmts.get(i).getKind() == NodeKind.VARIABLE_DEF) {
break;
}
varDefIndex++;
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
symTable.rootScope.define(names.fromIdNode(newFunc.name) , newFunc.symbol);
}
newFunc.body.stmts.add(stmts.get(i));
}
List<BLangStatement> chunkStmts = new ArrayList<>();
for (int i = varDefIndex; i < stmts.size(); i++) {
BLangStatement stmt = stmts.get(i);
chunkStmts.add(stmt);
varDefIndex++;
if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) &&
(newFunc.body.stmts.size() + chunkStmts.size() > methodSize)) {
if (newFunc.body.stmts.size() + chunkStmts.size() > methodSize) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
symTable.rootScope.define(names.fromIdNode(newFunc.name) , newFunc.symbol);
}
newFunc.body.stmts.addAll(chunkStmts);
chunkStmts.clear();
} else if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) &&
Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags,
Flags.LISTENER)
) {
break;
}
}
newFunc.body.stmts.addAll(chunkStmts);
for (int i = varDefIndex; i < stmts.size(); i++) {
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
symTable.rootScope.define(names.fromIdNode(newFunc.name) , newFunc.symbol);
}
newFunc.body.stmts.add(stmts.get(i));
}
generatedFunctions.add(newFunc);
for (int j = 0; j < generatedFunctions.size() - 1; j++) {
BLangFunction thisFunction = generatedFunctions.get(j);
BLangCheckedExpr checkedExpr =
ASTBuilderUtil.createCheckExpr(initFunction.pos,
createInvocationNode(generatedFunctions.get(j + 1).name.value,
new ArrayList<>(), symTable.errorOrNilType),
symTable.nilType);
checkedExpr.equivalentErrorTypeList.add(symTable.errorType);
BLangExpressionStmt expressionStmt = ASTBuilderUtil.createExpressionStmt(thisFunction.pos,
thisFunction.body);
expressionStmt.expr = checkedExpr;
expressionStmt.expr.pos = initFunction.pos;
if (j > 0) {
thisFunction = rewrite(thisFunction, env);
packageNode.functions.add(thisFunction);
packageNode.topLevelNodes.add(thisFunction);
}
}
if (generatedFunctions.size() > 1) {
BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1);
lastFunc = rewrite(lastFunc, env);
packageNode.functions.add(lastFunc);
packageNode.topLevelNodes.add(lastFunc);
}
return generatedFunctions.get(0);
}
/**
* Create an intermediate package init function.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
new Name(Names.INIT_FUNCTION_SUFFIX.value
+ this.initFuncIndex++), symTable);
createInvokableSymbol(initFunction, env);
return initFunction;
}
}
|
rewriteExpr(compoundAssignment.modifiedExpr));
|
public void visit(BLangFunction funcNode) {
SymbolEnv fucEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
if (!funcNode.interfaceFunction) {
addReturnIfNotPresent(funcNode);
}
funcNode.originalFuncSymbol = funcNode.symbol;
funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol);
funcNode.requiredParams = rewrite(funcNode.requiredParams, fucEnv);
funcNode.workers = rewrite(funcNode.workers, fucEnv);
List<BLangAnnotationAttachment> participantAnnotation
= funcNode.annAttachments.stream()
.filter(a -> Transactions.isTransactionsAnnotation(a.pkgAlias.value,
a.annotationName.value))
.collect(Collectors.toList());
funcNode.body = rewrite(funcNode.body, fucEnv);
funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
if (funcNode.returnTypeNode != null) {
funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env));
}
if (Symbols.isNative(funcNode.symbol)) {
funcNode.externalAnnAttachments.forEach(attachment -> rewrite(attachment, env));
}
if (participantAnnotation.isEmpty()) {
result = funcNode;
return;
}
result = desugarParticipantFunction(funcNode, participantAnnotation);
}
private BLangFunction desugarParticipantFunction(BLangFunction funcNode,
List<BLangAnnotationAttachment> participantAnnotation) {
BLangAnnotationAttachment annotation = participantAnnotation.get(0);
BLangBlockStmt onCommitBody = null;
BLangBlockStmt onAbortBody = null;
funcNode.requiredParams.forEach(bLangSimpleVariable -> bLangSimpleVariable.symbol.closure = true);
if (funcNode.receiver != null) {
funcNode.receiver.symbol.closure = true;
}
BType trxReturnType = BUnionType.create(null, symTable.errorType, symTable.anyType);
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangLambdaFunction commitFunc = createLambdaFunction(funcNode.pos, "$anonOnCommitFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangLambdaFunction abortFunc = createLambdaFunction(funcNode.pos, "$anonOnAbortFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangSimpleVariable onCommitTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, commitFunc.function.symbol));
BLangSimpleVariable onAbortTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, abortFunc.function.symbol));
BLangSimpleVarRef trxIdOnCommitRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onCommitTrxVar.symbol);
BLangSimpleVarRef trxIdOnAbortRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onAbortTrxVar.symbol);
List<BLangRecordLiteral.BLangRecordKeyValue> valuePairs =
((BLangRecordLiteral) annotation.expr).getKeyValuePairs();
for (BLangRecordLiteral.BLangRecordKeyValue keyValuePair : valuePairs) {
String func = (String) ((BLangLiteral) keyValuePair.getKey()).value;
switch (func) {
case Transactions.TRX_ONCOMMIT_FUNC:
BInvokableSymbol commitSym = (BInvokableSymbol) ((BLangSimpleVarRef) keyValuePair.valueExpr).symbol;
BLangInvocation onCommit = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, commitSym, Lists.of(trxIdOnCommitRef),
Collections.emptyList(), symResolver);
BLangStatement onCommitStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommit);
onCommitBody = ASTBuilderUtil.createBlockStmt(funcNode.pos, Lists.of(onCommitStmt));
break;
case Transactions.TRX_ONABORT_FUNC:
BInvokableSymbol abortSym = (BInvokableSymbol) ((BLangSimpleVarRef) keyValuePair.valueExpr).symbol;
BLangInvocation onAbort = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, abortSym, Lists.of(trxIdOnAbortRef),
Collections.emptyList(), symResolver);
BLangStatement onAbortStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbort);
onAbortBody = ASTBuilderUtil.createBlockStmt(funcNode.pos, Lists.of(onAbortStmt));
break;
}
}
if (onCommitBody == null) {
onCommitBody = ASTBuilderUtil.createBlockStmt(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommitBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
if (onAbortBody == null) {
onAbortBody = ASTBuilderUtil.createBlockStmt(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbortBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
commitFunc.function.body = onCommitBody;
commitFunc.function.requiredParams.add(onCommitTrxVar);
commitFunc.type = new BInvokableType(Lists.of(onCommitTrxVar.symbol.type),
commitFunc.function.symbol.type.getReturnType(), null);
commitFunc.function.symbol.type = commitFunc.type;
commitFunc.function.symbol.params = Lists.of(onCommitTrxVar.symbol);
abortFunc.function.body = onAbortBody;
abortFunc.function.requiredParams.add(onAbortTrxVar);
abortFunc.type = new BInvokableType(Lists.of(onAbortTrxVar.symbol.type),
abortFunc.function.symbol.type.getReturnType(), null);
abortFunc.function.symbol.type = abortFunc.type;
abortFunc.function.symbol.params = Lists.of(onAbortTrxVar.symbol);
BSymbol trxModSym = env.enclPkg.imports
.stream()
.filter(importPackage -> importPackage.symbol.
pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names.ORG_NAME_SEPARATOR.value
+ Names.TRANSACTION_PACKAGE.value))
.findAny().get().symbol;
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbol(symTable.pkgEnvMap.get(trxModSym),
getParticipantFunctionName(funcNode), SymTag.FUNCTION);
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.stringType,
getTransactionBlockId());
BLangLambdaFunction trxMainWrapperFunc = createLambdaFunction(funcNode.pos, "$anonTrxWrapperFunc$",
Collections.emptyList(),
funcNode.returnTypeNode,
funcNode.body);
funcNode.requiredParams.forEach(var -> trxMainWrapperFunc.function.closureVarSymbols
.add(new ClosureVarSymbol(var.symbol, var.pos)));
BLangBlockStmt trxMainBody = ASTBuilderUtil.createBlockStmt(funcNode.pos);
BLangLambdaFunction trxMainFunc
= createLambdaFunction(funcNode.pos, "$anonTrxParticipantFunc$", Collections.emptyList(),
trxReturnNode, trxMainBody);
trxMainWrapperFunc.cachedEnv = trxMainFunc.function.clonedEnv;
commitFunc.cachedEnv = env.createClone();
abortFunc.cachedEnv = env.createClone();
BVarSymbol wrapperSym = new BVarSymbol(0, names.fromString("$wrapper$1"), this.env.scope.owner.pkgID,
trxMainWrapperFunc.type, trxMainFunc.function.symbol);
BLangSimpleVariable wrapperFuncVar = ASTBuilderUtil.createVariable(funcNode.pos, "$wrapper$1",
trxMainWrapperFunc.type, trxMainWrapperFunc,
wrapperSym);
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(funcNode.pos, trxMainBody);
variableDef.var = wrapperFuncVar;
BLangSimpleVarRef wrapperVarRef = rewrite(ASTBuilderUtil.createVariableRef(variableDef.pos,
wrapperFuncVar.symbol), env);
BLangInvocation wrapperInvocation = new BFunctionPointerInvocation(trxMainWrapperFunc.pos, wrapperVarRef,
wrapperFuncVar.symbol,
trxMainWrapperFunc.function.symbol.retType);
BLangReturn wrapperReturn = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(wrapperInvocation, trxReturnNode.type));
trxMainWrapperFunc.function.receiver = funcNode.receiver;
trxMainFunc.function.receiver = funcNode.receiver;
trxMainBody.stmts.add(wrapperReturn);
rewrite(trxMainFunc.function, env);
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, trxMainFunc, commitFunc, abortFunc);
BLangInvocation participantInvocation
= ASTBuilderUtil.createInvocationExprMethod(funcNode.pos, invokableSymbol, requiredArgs,
Collections.emptyList(), symResolver);
participantInvocation.type = ((BInvokableType) invokableSymbol.type).retType;
BLangStatement stmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(participantInvocation, funcNode.symbol.retType));
funcNode.body = ASTBuilderUtil.createBlockStmt(funcNode.pos, Lists.of(rewrite(stmt, env)));
return funcNode;
}
private Name getParticipantFunctionName(BLangFunction function) {
if (Symbols.isFlagOn((function).symbol.flags, Flags.RESOURCE)) {
return TRX_REMOTE_PARTICIPANT_BEGIN_FUNCTION;
}
return TRX_LOCAL_PARTICIPANT_BEGIN_FUNCTION;
}
public void visit(BLangForever foreverStatement) {
result = streamingCodeDesugar.desugar(foreverStatement);
result = rewrite(result, env);
((BLangBlockStmt) result).stmts.stream().filter(stmt -> stmt.getKind() == NodeKind.VARIABLE_DEF)
.map(stmt -> (BLangSimpleVariableDef) stmt).forEach(varDef ->
((BLangBlockStmt) result).scope.define(varDef.var.symbol.name, varDef.var.symbol));
}
@Override
public void visit(BLangResource resourceNode) {
}
public void visit(BLangAnnotation annotationNode) {
annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env);
result = annAttachmentNode;
}
@Override
public void visit(BLangSimpleVariable varNode) {
if ((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
varNode.expr = null;
result = varNode;
return;
}
BLangExpression bLangExpression = rewriteExpr(varNode.expr);
if (bLangExpression != null) {
bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type);
}
varNode.expr = bLangExpression;
varNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = varNode;
}
@Override
public void visit(BLangTupleVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangRecordVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangErrorVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangBlockStmt block) {
SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env);
block.stmts = rewriteStmt(block.stmts, blockEnv);
result = block;
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
result = varDefNode;
}
@Override
public void visit(BLangTupleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
BLangTupleVariable tupleVariable = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varDefNode.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = tuple;
createVarDefStmts(tupleVariable, blockStmt, tuple.symbol, null);
createRestFieldVarDefStmts(tupleVariable, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable;
boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE;
DiagnosticPos pos = blockStmt.pos;
if (arrayVar != null) {
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = arrayVar.type;
arrayVar.expr = arrayExpr;
BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt);
arrayVarDef.var = arrayVar;
BLangExpression tupleExpr = parentTupleVariable.expr;
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol);
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size()
: parentTupleVariable.memberVariables.size());
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef,
createLengthInvocation(pos, arrayVarRef));
indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType);
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
@Override
public void visit(BLangRecordVariableDef varDefNode) {
BLangRecordVariable varNode = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varDefNode.pos, "$map$0", runTimeType,
null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = varDefNode.var.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = mapVariable;
createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorVariableDef varDefNode) {
BLangErrorVariable errorVariable = varDefNode.errorVariable;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner);
final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varDefNode.pos, errorVarSymbol.name.value,
symTable.errorType, null, errorVarSymbol);
error.expr = errorVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = error;
createVarDefStmts(errorVariable, blockStmt, error.symbol, null);
result = rewrite(blockStmt, env);
}
/**
* This method iterate through each member of the tupleVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* ((string, float) int)) ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var def creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangVariable> memberVars = parentTupleVariable.memberVariables;
for (int index = 0; index < memberVars.size(); index++) {
BLangVariable variable = memberVars.get(index);
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index);
if (NodeKind.VARIABLE == variable.getKind()) {
createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol,
parentIndexAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BType accessedElemType = symTable.errorType;
if (tupleVarSymbol.type.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) tupleVarSymbol.type;
accessedElemType = arrayType.eType;
}
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
}
}
}
/**
* Overloaded method to handle record variables.
* This method iterate through each member of the recordVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* type Foo record {
* string name;
* (int, string) age;
* Address address;
* };
*
* Foo {name: a, age: (b, c), address: d} = {record literal}
*
* a is a simple var, so a simple var def will be created.
*
* (b, c) is a tuple, so it is a recursive var def creation.
*
* d is a record, so it is a recursive var def creation.
*
*/
private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList;
for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) {
BLangVariable variable = recordFieldKeyValue.valueBindingPattern;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType,
recordFieldKeyValue.key.value);
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern,
parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
recordVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
}
}
if (parentRecordVariable.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1",
parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"),
this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
List<String> keysToRemove = parentRecordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam;
BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos,
parentBlockStmt);
restParamVarDef.var = restParam;
restParamVarDef.var.type = restParamType;
restParam.expr = varRef;
}
}
/**
* This method will create the relevant var def statements for reason and details of the error variable.
* The var def statements are created by creating the reason() and detail() builtin methods.
*/
private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) {
BVarSymbol convertedErrorVarSymbol;
if (parentIndexBasedAccess != null) {
BType prevType = parentIndexBasedAccess.type;
parentIndexBasedAccess.type = symTable.anyType;
BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++,
symTable.errorType,
addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType),
parentErrorVariable.pos);
parentIndexBasedAccess.type = prevType;
parentBlockStmt.addStatement(errorVarDef);
convertedErrorVarSymbol = errorVarDef.var.symbol;
} else {
convertedErrorVarSymbol = errorVariableSymbol;
}
parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos,
parentErrorVariable.reason.type, convertedErrorVarSymbol, null);
if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) {
parentErrorVariable.reason = null;
} else {
BLangSimpleVariableDef reasonVariableDef =
ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt);
reasonVariableDef.var = parentErrorVariable.reason;
}
if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty())
&& parentErrorVariable.restDetail == null) {
return;
}
BType detailMapType;
BType detailType = ((BErrorType) parentErrorVariable.type).detailType;
if (detailType.tag == TypeTags.MAP) {
detailMapType = detailType;
} else {
detailMapType = symTable.detailType;
}
parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction(
parentErrorVariable.pos, detailMapType, parentBlockStmt,
convertedErrorVarSymbol, null);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail",
parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos);
detailTempVarDef.type = parentErrorVariable.detailExpr.type;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) {
BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol);
createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar);
}
if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) {
DiagnosticPos pos = parentErrorVariable.restDetail.pos;
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(
pos, detailTempVarDef.var.symbol);
List<String> keysToRemove = parentErrorVariable.detail.stream()
.map(detail -> detail.key.getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove,
parentErrorVariable.restDetail.type, parentBlockStmt);
BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDefStmt.var = ASTBuilderUtil.createVariable(pos,
parentErrorVariable.restDetail.name.value,
filteredDetail.type,
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol),
parentErrorVariable.restDetail.symbol);
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos,
ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol),
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol));
parentBlockStmt.addStatement(assignmentStmt);
}
rewrite(parentBlockStmt, env);
}
private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos,
BType targetType) {
BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"),
this.env.enclPkg.packageID, targetType, this.env.scope.owner);
BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
BLangExpression expr;
if (targetType.tag == TypeTags.RECORD) {
expr = variableRef;
} else {
expr = addConversionExprIfRequired(variableRef, targetType);
}
BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr,
errorVarSym);
return ASTBuilderUtil.createVariableDef(pos, errorVar);
}
private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos,
List<String> keysToRemove, BType targetType,
BLangBlockStmt parentBlockStmt) {
BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType);
int restNum = annonVarCount++;
String name = "$map$ref$" + restNum;
BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name);
BLangInvocation entriesInvocation = generateMapEntriesInvocation(pos, typeCastExpr, mapVariable);
String entriesVarName = "$map$ref$entries$" + restNum;
BType entriesType = new BMapType(TypeTags.MAP,
new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null);
BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt,
addConversionExprIfRequired(entriesInvocation, entriesType),
entriesVarName);
BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos);
BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter);
String filteredEntriesName = "$filtered$detail$entries" + restNum;
BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation,
filteredEntriesName);
String filteredVarName = "$detail$filtered" + restNum;
BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos);
BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda);
BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt,
mapInvocation,
filteredVarName);
String filteredRestVarName = "$restVar$" + restNum;
BLangInvocation constructed = generateConstructFromInvocation(pos, targetType, filtered.symbol);
return defVariable(pos, targetType, parentBlockStmt,
addConversionExprIfRequired(constructed, targetType),
filteredRestVarName);
}
private BLangInvocation generateMapEntriesInvocation(DiagnosticPos pos, BLangExpression typeCastExpr,
BLangSimpleVariable detailMap) {
BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), typeCastExpr.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, detailMap.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(typeCastExpr.type, names.fromString("entries"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, detailMap.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
return invocationNode;
}
private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar,
BLangLambdaFunction backToMapLambda) {
BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(backToMapLambda);
return invocationNode;
}
private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) {
String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType anyType = new BLangValueType();
anyType.typeKind = TypeKind.ANY;
anyType.type = symTable.anyType;
function.returnTypeNode = anyType;
BLangBlockStmt functionBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
function.body = functionBlock;
BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos,
symTable.anyType, keyValSymbol, ASTBuilderUtil.createLiteral(pos, symTable.intType, (long) 1));
BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupSecondElem);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
symTable.anyType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return createLambdaFunction(function, functionSymbol);
}
private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos,
BLangSimpleVariable entriesInvocationVar,
BLangLambdaFunction filter) {
BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
BLangInvocation filterInvoke = invocationNode;
filterInvoke.requiredArgs.add(filter);
return filterInvoke;
}
private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt,
BLangExpression expression, String name) {
Name varName = names.fromString(name);
BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression,
new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner));
BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap);
constructedMap.type = varType;
parentBlockStmt.addStatement(constructedMap);
env.scope.define(varName, detailMap.symbol);
return detailMap;
}
private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt,
BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BLangExpression detailEntryVar) {
if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
BLangSimpleVariableDef errorDetailVar = createVarDef(
((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value,
detailEntry.valueBindingPattern.type,
detailEntryVar,
detailEntry.valueBindingPattern.pos);
parentBlockStmt.addStatement(errorDetailVar);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef(
detailEntry.valueBindingPattern.pos,
(BLangRecordVariable) detailEntry.valueBindingPattern);
recordVariableDef.var.expr = detailEntryVar;
recordVariableDef.type = symTable.recordType;
parentBlockStmt.addStatement(recordVariableDef);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef(
detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern);
parentBlockStmt.addStatement(tupleVariableDef);
}
}
private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BVarSymbol tempDetailVarSymbol) {
BLangExpression detailEntryVar = createIndexBasedAccessExpr(
detailEntry.valueBindingPattern.type,
detailEntry.valueBindingPattern.pos,
createStringLiteral(detailEntry.key.pos, detailEntry.key.value),
tempDetailVarSymbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
return detailEntryVar;
}
private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) {
BLangExpression concatExpr = null;
BLangExpression currentExpr;
for (BLangExpression expr : exprs) {
currentExpr = expr;
if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) {
currentExpr = getToStringInvocationOnExpr(expr);
}
if (concatExpr == null) {
concatExpr = currentExpr;
continue;
}
concatExpr =
ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr,
concatExpr.type.tag == TypeTags.XML ||
currentExpr.type.tag == TypeTags.XML ?
symTable.xmlType : symTable.stringType,
OperatorKind.ADD, null);
}
return concatExpr;
}
private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope
.lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol;
List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{
add(addConversionExprIfRequired(expression, symbol.params.get(0).type));
}};
return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(),
symResolver);
}
private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BType detailType,
BLangBlockStmt parentBlockStmt,
BVarSymbol errorVarySymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangInvocation detailInvocation = createInvocationNode(
ERROR_DETAIL_FUNCTION_NAME, new ArrayList<>(), detailType);
detailInvocation.builtInMethod = BLangBuiltInMethod.getFromString(ERROR_DETAIL_FUNCTION_NAME);
if (parentIndexBasedAccess != null) {
detailInvocation.expr = addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType);
detailInvocation.symbol = symResolver.lookupLangLibMethod(parentIndexBasedAccess.type,
names.fromString(ERROR_DETAIL_FUNCTION_NAME));
detailInvocation.requiredArgs = Lists.of(parentIndexBasedAccess);
} else {
detailInvocation.expr = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
detailInvocation.symbol = symResolver.lookupLangLibMethod(errorVarySymbol.type,
names.fromString(ERROR_DETAIL_FUNCTION_NAME));
detailInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, errorVarySymbol));
}
detailInvocation.type = detailInvocation.symbol.type.getReturnType();
return detailInvocation;
}
private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType,
BVarSymbol errorVarSymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangInvocation reasonInvocation = createInvocationNode(ERROR_REASON_FUNCTION_NAME,
new ArrayList<>(), reasonType);
reasonInvocation.builtInMethod = BLangBuiltInMethod.getFromString(ERROR_REASON_FUNCTION_NAME);
if (parentIndexBasedAccess != null) {
reasonInvocation.expr = addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType);
reasonInvocation.symbol = symResolver.lookupLangLibMethod(parentIndexBasedAccess.type,
names.fromString(ERROR_REASON_FUNCTION_NAME));
reasonInvocation.requiredArgs = Lists.of(parentIndexBasedAccess);
} else {
reasonInvocation.expr = ASTBuilderUtil.createVariableRef(pos, errorVarSymbol);
reasonInvocation.symbol = symResolver.lookupLangLibMethod(errorVarSymbol.type,
names.fromString(ERROR_REASON_FUNCTION_NAME));
reasonInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, errorVarSymbol));
}
reasonInvocation.type = reasonInvocation.symbol.type.getReturnType();
return reasonInvocation;
}
private BLangInvocation generateConstructFromInvocation(DiagnosticPos pos,
BType targetType,
BVarSymbol source) {
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangInvocation invocationNode = createInvocationNode(CONSTRUCT_FROM, new ArrayList<>(), typedescType);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
invocationNode.expr = typedescExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CONSTRUCT_FROM));
invocationNode.requiredArgs = Lists.of(typedescExpr, ASTBuilderUtil.createVariableRef(pos, source));
invocationNode.type = BUnionType.create(null, targetType, symTable.errorType);
return invocationNode;
}
private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVarRef recordVarRef, DiagnosticPos pos) {
BLangFunction function = ASTBuilderUtil.createFunction(pos, "$anonFunc$" + lambdaFunctionCount++);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangBlockStmt functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol);
BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos,
symTable.anyType, keyValSymbol, ASTBuilderUtil.createLiteral(pos, symTable.intType, (long) 0));
BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupFirstElem);
for (BLangRecordVarRefKeyValue variableKeyValueNode : recordVarRef.recordRefFields) {
createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, variableKeyValueNode.variableName.getValue());
}
BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock);
return createLambdaFunction(function, functionSymbol);
}
private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) {
String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangBlockStmt functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol);
BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos,
symTable.anyType, keyValSymbol, ASTBuilderUtil.createLiteral(pos, symTable.intType, (long) 0));
BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupFirstElem);
for (String toRemoveItem : toRemoveList) {
createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem);
}
BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock);
return createLambdaFunction(function, functionSymbol);
}
private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) {
List<String> fieldNamesToRemove = recordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
return createFuncToFilterOutRestParam(fieldNamesToRemove, pos);
}
private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockStmt blockStmt, String key) {
BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol);
BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt);
BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock);
returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false);
ifStmt.body = ifBlock;
BLangGroupExpr groupExpr = new BLangGroupExpr();
groupExpr.type = symTable.booleanType;
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted,
ASTBuilderUtil.createLiteral(pos, symTable.stringType, key),
symTable.booleanType, OperatorKind.EQUAL, null);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type);
groupExpr.expression = binaryExpr;
ifStmt.expr = groupExpr;
}
BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = function;
lambdaFunction.type = functionSymbol.type;
return lambdaFunction;
}
private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function,
BLangBlockStmt functionBlock) {
BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
symTable.booleanType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return functionSymbol;
}
private BLangBlockStmt createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function,
BVarSymbol keyValSymbol) {
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType booleanTypeKind = new BLangValueType();
booleanTypeKind.typeKind = TypeKind.BOOLEAN;
booleanTypeKind.type = symTable.booleanType;
function.returnTypeNode = booleanTypeKind;
BLangBlockStmt functionBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
function.body = functionBlock;
return functionBlock;
}
private BTupleType getStringAnyTupleType() {
ArrayList<BType> typeList = new ArrayList<BType>() {{
add(symTable.stringType);
add(symTable.anyType);
}};
return new BTupleType(typeList);
}
/**
* This method creates a simple variable def and assigns and array expression based on the given indexExpr.
*
* case 1: when there is no parent array access expression, but with the indexExpr : 1
* string s = x[1];
*
* case 2: when there is a parent array expression : x[2] and indexExpr : 3
* string s = x[2][3];
*
* case 3: when there is no parent array access expression, but with the indexExpr : name
* string s = x[name];
*
* case 4: when there is a parent map expression : x[name] and indexExpr : fName
* string s = x[name][fName];
*
* case 5: when there is a parent map expression : x[name] and indexExpr : 1
* string s = x[name][1];
*/
private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt,
BLangLiteral indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
Name varName = names.fromIdNode(simpleVariable.name);
if (varName == Names.IGNORE) {
return;
}
final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos,
parentBlockStmt);
simpleVariableDef.var = simpleVariable;
simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
}
@Override
public void visit(BLangAssignment assignNode) {
if (safeNavigateLHS(assignNode.varRef)) {
BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef;
accessExpr.leafNode = true;
result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment);
result = rewrite(result, env);
return;
}
assignNode.varRef = rewriteExpr(assignNode.varRef);
assignNode.expr = rewriteExpr(assignNode.expr);
assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type);
result = assignNode;
}
@Override
public void visit(BLangTupleDestructure tupleDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos,
blockStmt);
variableDef.var = tuple;
createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null);
createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
BLangTupleVarRef tupleVarRef = tupleDestructure.varRef;
DiagnosticPos pos = blockStmt.pos;
if (tupleVarRef.restParam != null) {
BLangExpression tupleExpr = tupleDestructure.expr;
BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam;
BArrayType restParamType = (BArrayType) restParam.type;
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = restParamType;
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = arrayExpr;
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) tupleVarRef.expressions.size();
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam,
createLengthInvocation(pos, restParam));
indexAccessExpr.type = restParamType.eType;
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) {
BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver
.lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME));
BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol,
Lists.of(collection), symResolver);
lengthInvocation.argExprs = lengthInvocation.requiredArgs;
lengthInvocation.type = lengthInvokableSymbol.type.getReturnType();
return lengthInvocation;
}
/**
* This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements.
* This method does the check for node kind of each member and call the related var ref creation method.
*
* Example:
* ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var ref creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangExpression> expressions = parentTupleVariable.expressions;
for (int index = 0; index < expressions.size(); index++) {
BLangExpression expression = expressions.get(index);
if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) {
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index);
createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr,
tupleVarSymbol, parentIndexAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
}
}
}
/**
* This method creates a assignment statement and assigns and array expression based on the given indexExpr.
*
*/
private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt,
BLangExpression indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName);
if (varName == Names.IGNORE) {
return;
}
}
BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type);
final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos,
parentBlockStmt);
assignmentStmt.varRef = simpleVarRef;
assignmentStmt.expr = assignmentExpr;
}
private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) {
BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos,
symTable.anyType, tupleVarSymbol, indexExpr);
arrayAccess.originalType = varType;
if (parentExpr != null) {
arrayAccess.expr = parentExpr;
}
final BLangExpression assignmentExpr;
if (types.isValueType(varType)) {
BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
castExpr.expr = arrayAccess;
castExpr.conversionSymbol = Symbols.createUnboxValueTypeOpSymbol(symTable.anyType, varType);
castExpr.type = varType;
assignmentExpr = castExpr;
} else {
assignmentExpr = arrayAccess;
}
return assignmentExpr;
}
@Override
public void visit(BLangRecordDestructure recordDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
String name = "$map$0";
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType,
null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = recordDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.
createVariableDefStmt(recordDestructure.pos, blockStmt);
variableDef.var = mapVariable;
createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorDestructure errorDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos);
String name = "$error$";
final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name,
symTable.errorType, null, new BVarSymbol(0, names.fromString(name),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
errorVar.expr = errorDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos,
blockStmt);
variableDef.var = errorVar;
createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null);
result = rewrite(blockStmt, env);
}
private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields;
for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) {
BLangExpression variableReference = varRefKeyValue.variableReference;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType,
varRefKeyValue.variableName.getValue());
if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) {
createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) {
BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) {
BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
symTable.tupleType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos,
symTable.errorType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol,
arrayAccessExpr);
}
}
if (parentRecordVarRef.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType,
null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam;
List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream()
.map(field -> field.variableName.value)
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = varRef;
}
}
private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF ||
names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) {
BLangAssignment reasonAssignment = ASTBuilderUtil
.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt);
reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos,
symTable.stringType, errorVarySymbol, parentIndexAccessExpr);
reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type);
reasonAssignment.varRef = parentErrorVarRef.reason;
}
if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) {
return;
}
BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos,
((BErrorType) parentErrorVarRef.type).detailType, parentBlockStmt, errorVarySymbol,
parentIndexAccessExpr);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++,
symTable.detailType, errorDetailBuiltinFunction,
parentErrorVarRef.pos);
detailTempVarDef.type = symTable.detailType;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
List<String> extractedKeys = new ArrayList<>();
for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) {
extractedKeys.add(detail.name.value);
BLangVariableReference ref = (BLangVariableReference) detail.expr;
BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos,
createStringLiteral(detail.name.pos, detail.name.value),
detailTempVarDef.var.symbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt);
detailAssignment.varRef = ref;
detailAssignment.expr = detailEntryVar;
}
if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) {
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
detailTempVarDef.var.symbol);
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos,
extractedKeys,
parentErrorVarRef.restVar.type, parentBlockStmt);
BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos,
parentBlockStmt);
restAssignment.varRef = parentErrorVarRef.restVar;
restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
filteredDetail.symbol);
}
BErrorType errorType = (BErrorType) parentErrorVarRef.type;
if (errorType.detailType.getKind() == TypeKind.RECORD) {
BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol;
tsymbol.initializerFunc = createRecordInitFunc();
tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol);
}
}
private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) {
if (parentErrorVarRef.restVar == null) {
return true;
}
if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value));
}
return false;
}
@Override
public void visit(BLangAbort abortNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(abortNode.pos, symTable.intType, -1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangRetry retryNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(retryNode.pos, symTable.intType, 1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangContinue nextNode) {
result = nextNode;
}
@Override
public void visit(BLangBreak breakNode) {
result = breakNode;
}
@Override
public void visit(BLangReturn returnNode) {
if (returnNode.expr != null) {
returnNode.expr = rewriteExpr(returnNode.expr);
}
result = returnNode;
}
@Override
public void visit(BLangPanic panicNode) {
panicNode.expr = rewriteExpr(panicNode.expr);
result = panicNode;
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env);
result = xmlnsStmtNode;
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
BLangXMLNS generatedXMLNSNode;
xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);
BSymbol ownerSymbol = xmlnsNode.symbol.owner;
if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
generatedXMLNSNode = new BLangLocalXMLNS();
} else {
generatedXMLNSNode = new BLangPackageXMLNS();
}
generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI;
generatedXMLNSNode.prefix = xmlnsNode.prefix;
generatedXMLNSNode.symbol = xmlnsNode.symbol;
result = generatedXMLNSNode;
}
public void visit(BLangCompoundAssignment compoundAssignment) {
BLangVariableReference varRef = compoundAssignment.varRef;
if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol);
varRef.lhsVar = true;
}
result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef),
rewriteExpr(compoundAssignment.modifiedExpr));
return;
}
List<BLangStatement> statements = new ArrayList<>();
List<BLangSimpleVarRef> varRefs = new ArrayList<>();
List<BType> types = new ArrayList<>();
do {
BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + ++indexExprCount + "$",
((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr,
compoundAssignment.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos,
tempIndexVarDef.var.symbol);
statements.add(0, tempIndexVarDef);
varRefs.add(0, tempVarRef);
types.add(0, varRef.type);
varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr;
} while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR);
BLangVariableReference var = varRef;
for (int ref = 0; ref < varRefs.size(); ref++) {
var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref));
var.type = types.get(ref);
}
var.type = compoundAssignment.varRef.type;
BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var,
compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null);
rhsExpression.type = compoundAssignment.modifiedExpr.type;
BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var,
rhsExpression);
statements.add(assignStmt);
BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements);
result = rewrite(bLangBlockStmt, env);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
exprStmtNode.expr = rewriteExpr(exprStmtNode.expr);
result = exprStmtNode;
}
@Override
public void visit(BLangIf ifNode) {
ifNode.expr = rewriteExpr(ifNode.expr);
ifNode.body = rewrite(ifNode.body, env);
ifNode.elseStmt = rewrite(ifNode.elseStmt, env);
result = ifNode;
}
@Override
public void visit(BLangMatch matchStmt) {
BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
matchBlockStmt.pos = matchStmt.pos;
String matchExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos,
matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0,
names.fromString(matchExprVarName),
this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner));
BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar);
matchBlockStmt.stmts.add(matchExprVarDef);
matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar));
rewrite(matchBlockStmt, this.env);
result = matchBlockStmt;
}
@Override
public void visit(BLangForeach foreach) {
BLangBlockStmt blockNode;
BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID,
foreach.collection.type, this.env.scope.owner);
BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$",
foreach.collection.type, foreach.collection, dataSymbol);
BLangSimpleVariableDef dataVariableDefinition = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable);
BVarSymbol collectionSymbol = dataVariable.symbol;
switch (foreach.collection.type.tag) {
case TypeTags.STRING:
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.XML:
case TypeTags.MAP:
case TypeTags.TABLE:
case TypeTags.RECORD:
BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach, collectionSymbol);
blockNode = desugarForeachToWhile(foreach, iteratorVarDef);
blockNode.stmts.add(0, dataVariableDefinition);
break;
case TypeTags.OBJECT:
blockNode = desugarForeachToWhile(foreach, dataVariableDefinition);
break;
default:
blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.stmts.add(0, dataVariableDefinition);
break;
}
rewrite(blockNode, this.env);
result = blockNode;
}
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner);
BLangSimpleVariableDef resultVariableDefinition =
getIteratorNextVariableDefinition(foreach, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, resultReferenceInWhile, getUserDefineTypeNode(foreach.resultType));
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach, resultSymbol);
valueAccessExpr.expr =
addConversionExprIfRequired(valueAccessExpr.expr, types.getSafeType(valueAccessExpr.expr.type,
true, false));
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangAssignment resultAssignment =
getIteratorNextAssignment(foreach, iteratorSymbol, resultSymbol);
whileNode.body.stmts.add(1, resultAssignment);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(resultVariableDefinition);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.type = type;
return recordType;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
Optional<BAttachedFunction> nextFunc = iteratorSymbol.attachedFuncs.stream()
.filter(bAttachedFunction -> bAttachedFunction.funcName.value.equals(BLangBuiltInMethod.NEXT.getName()))
.findFirst();
return nextFunc.orElse(null);
}
@Override
public void visit(BLangWhile whileNode) {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
@Override
public void visit(BLangLock lockNode) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = ASTBuilderUtil
.createStatementExpression(lockNode.body, nilLiteral);
statementExpression.type = symTable.nilType;
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.type = nillableError;
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError, this.env.scope.owner);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr, nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
if (!lockStmt.lockVariables.isEmpty()) {
lockStmt.fieldVariables.entrySet().removeIf(entry -> lockStmt.lockVariables.contains(entry.getKey()));
}
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
@Override
public void visit(BLangTransaction transactionNode) {
DiagnosticPos pos = transactionNode.pos;
BType trxReturnType = symTable.intType;
BType otherReturnType = symTable.nilType;
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangType otherReturnNode = ASTBuilderUtil.createTypeNode(otherReturnType);
DiagnosticPos invPos = transactionNode.pos;
/* transaction block code will be desugar to function which returns int. Return value determines the status of
the transaction code.
ex.
0 = successful
1 = retry
-1 = abort
Since transaction block code doesn't return anything, we need to add return statement at end of the
block unless we have abort or retry statement.
*/
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src,
invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol);
BLangStatement statement = null;
if (!transactionNode.transactionBody.stmts.isEmpty()) {
statement = transactionNode.transactionBody.stmts.get(transactionNode.transactionBody.stmts.size() - 1);
}
if (statement == null || !(statement.getKind() == NodeKind.ABORT) && !(statement.getKind() == NodeKind.ABORT)) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(returnStmtPos, trxReturnType, 0L);
transactionNode.transactionBody.addStatement(returnStmt);
}
if (transactionNode.abortedBody == null) {
transactionNode.abortedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.committedBody == null) {
transactionNode.committedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.onRetryBody == null) {
transactionNode.onRetryBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.retryCount == null) {
transactionNode.retryCount = ASTBuilderUtil.createLiteral(pos, symTable.intType, 3L);
}
BLangLambdaFunction trxMainFunc = createLambdaFunction(pos, "$anonTrxMainFunc$",
Collections.emptyList(),
trxReturnNode, transactionNode.transactionBody);
BLangLambdaFunction trxOnRetryFunc = createLambdaFunction(pos, "$anonTrxOnRetryFunc$",
Collections.emptyList(),
otherReturnNode, transactionNode.onRetryBody);
BLangLambdaFunction trxCommittedFunc = createLambdaFunction(pos, "$anonTrxCommittedFunc$",
Collections.emptyList(),
otherReturnNode, transactionNode.committedBody);
BLangLambdaFunction trxAbortedFunc = createLambdaFunction(pos, "$anonTrxAbortedFunc$",
Collections.emptyList(),
otherReturnNode, transactionNode.abortedBody);
trxMainFunc.cachedEnv = env.createClone();
trxOnRetryFunc.cachedEnv = env.createClone();
trxCommittedFunc.cachedEnv = env.createClone();
trxAbortedFunc.cachedEnv = env.createClone();
BSymbol trxModSym = env.enclPkg.imports
.stream()
.filter(importPackage ->
importPackage.symbol.pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names
.ORG_NAME_SEPARATOR.value + Names.TRANSACTION_PACKAGE.value))
.findAny().get().symbol;
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbol(symTable.pkgEnvMap.get(trxModSym),
TRX_INITIATOR_BEGIN_FUNCTION,
SymTag.FUNCTION);
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(pos, symTable.stringType,
getTransactionBlockId());
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, transactionNode.retryCount, trxMainFunc,
trxOnRetryFunc,
trxCommittedFunc, trxAbortedFunc);
BLangInvocation trxInvocation = ASTBuilderUtil.createInvocationExprMethod(pos, invokableSymbol,
requiredArgs,
Collections.emptyList(),
symResolver);
BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pos, ASTBuilderUtil.createBlockStmt(pos));
stmt.expr = trxInvocation;
result = rewrite(stmt, env);
}
private String getTransactionBlockId() {
return String.valueOf(env.enclPkg.packageID.orgName) + "$" + env.enclPkg.packageID.name + "$"
+ transactionIndex++;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType,
BLangBlockStmt lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null);
return lambdaFunction;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
TypeNode returnType) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
func.desugared = false;
lambdaFunction.pos = pos;
return lambdaFunction;
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.type = literalExpr.type;
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replaceAll(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
if (listConstructor.type.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type));
result = rewriteExpr(expr);
} else if (getElementType(listConstructor.type).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
if (arrayLiteral.type.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type));
return;
} else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type);
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
return;
}
tupleLiteral.exprs.forEach(expr -> {
BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type;
types.setImplicitCastExpr(expr, expType, symTable.anyType);
});
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
if (groupExpr.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = groupExpr.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
result = rewriteExpr(groupExpr.expression);
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
recordLiteral.keyValuePairs.sort((v1, v2) -> Boolean.compare(v1.key.computedKey, v2.key.computedKey));
recordLiteral.keyValuePairs.forEach(keyValue -> {
BLangExpression keyExpr = keyValue.key.expr;
if (!keyValue.key.computedKey && keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
keyValue.key.expr = createStringLiteral(varRef.pos, varRef.variableName.value);
} else {
keyValue.key.expr = rewriteExpr(keyValue.key.expr);
}
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
});
BLangExpression expr;
if (recordLiteral.type.tag == TypeTags.RECORD) {
expr = new BLangStructLiteral(recordLiteral.pos, recordLiteral.keyValuePairs, recordLiteral.type);
} else if (recordLiteral.type.tag == TypeTags.MAP) {
expr = new BLangMapLiteral(recordLiteral.pos, recordLiteral.keyValuePairs, recordLiteral.type);
} else {
expr = new BLangJSONLiteral(recordLiteral.pos, recordLiteral.keyValuePairs, recordLiteral.type);
}
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTableLiteral tableLiteral) {
tableLiteral.tableDataRows = rewriteExprs(tableLiteral.tableDataRows);
List<String> keyColumns = new ArrayList<>();
for (BLangTableLiteral.BLangTableColumn column : tableLiteral.columns) {
if (column.flagSet.contains(TableColumnFlag.PRIMARYKEY)) {
keyColumns.add(column.columnName);
}
}
BLangArrayLiteral keyColumnsArrayLiteral = createArrayLiteralExprNode();
keyColumnsArrayLiteral.exprs = keyColumns.stream()
.map(expr -> ASTBuilderUtil.createLiteral(tableLiteral.pos, symTable.stringType, expr))
.collect(Collectors.toList());
keyColumnsArrayLiteral.type = new BArrayType(symTable.stringType);
tableLiteral.keyColumnsArrayLiteral = keyColumnsArrayLiteral;
List<String> indexColumns = new ArrayList<>();
for (BLangTableLiteral.BLangTableColumn column : tableLiteral.columns) {
if (column.flagSet.contains(TableColumnFlag.INDEX)) {
indexColumns.add(column.columnName);
}
}
BLangArrayLiteral indexColumnsArrayLiteral = createArrayLiteralExprNode();
indexColumnsArrayLiteral.exprs = indexColumns.stream()
.map(expr -> ASTBuilderUtil.createLiteral(tableLiteral.pos, symTable.stringType, expr))
.collect(Collectors.toList());
indexColumnsArrayLiteral.type = new BArrayType(symTable.stringType);
tableLiteral.indexColumnsArrayLiteral = indexColumnsArrayLiteral;
result = tableLiteral;
}
private void addReferenceVariablesToArgs(List<BLangExpression> args, List<BLangExpression> varRefs) {
BLangArrayLiteral localRefs = createArrayLiteralExprNode();
varRefs.forEach(varRef -> localRefs.exprs.add(rewrite(varRef, env)));
args.add(localRefs);
}
private void addFunctionPointersToArgs(List<BLangExpression> args, List<StreamingQueryStatementNode>
streamingStmts) {
BLangArrayLiteral funcPointers = createArrayLiteralExprNode();
for (StreamingQueryStatementNode stmt : streamingStmts) {
funcPointers.exprs.add(rewrite((BLangExpression) stmt.getStreamingAction().getInvokableBody(), env));
}
args.add(funcPointers);
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.type = symTable.stringType;
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
enclLocks.peek().addLockVariable((BVarSymbol) varRefExpr.symbol);
}
}
genVarRefExpr.type = varRefExpr.type;
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.lhsVar)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
genVarRefExpr.type = varRefExpr.symbol.type;
result = genVarRefExpr;
return;
}
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
BType targetType = genVarRefExpr.type;
genVarRefExpr.type = genVarRefExpr.symbol.type;
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = fieldAccessExpr.expr.type;
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value);
int varRefTypeTag = varRefType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
addToLocks((BLangStructFieldAccessExpr) targetVarRef);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (types.isLax(varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else if (varRefTypeTag == TypeTags.XML) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.lhsVar = fieldAccessExpr.lhsVar;
targetVarRef.type = fieldAccessExpr.type;
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
private void addToLocks(BLangStructFieldAccessExpr targetVarRef) {
if (enclLocks.isEmpty()) {
return;
}
if (targetVarRef.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF
|| ((BLangSimpleVarRef) targetVarRef.expr).symbol.owner.getKind() == SymbolKind.PACKAGE
|| !Names.SELF.equals(((BLangLocalVarRef) targetVarRef.expr).symbol.name)) {
return;
}
if (targetVarRef.indexExpr.getKind() == NodeKind.LITERAL) {
String field = (String) ((BLangLiteral) targetVarRef.indexExpr).value;
enclLocks.peek().addFieldVariable((BVarSymbol) ((BLangLocalVarRef) targetVarRef.expr).varSymbol, field);
}
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangVariableReference targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType varRefType = indexAccessExpr.expr.type;
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.XML) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.lhsVar = indexAccessExpr.lhsVar;
targetVarRef.type = indexAccessExpr.type;
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
BLangInvocation genIExpr = iExpr;
if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) {
result = rewriteErrorConstructor(iExpr);
}
reorderArguments(iExpr);
iExpr.requiredArgs = rewriteExprs(iExpr.requiredArgs);
iExpr.restArgs = rewriteExprs(iExpr.restArgs);
if (iExpr.functionPointerInvocation) {
visitFunctionPointerInvocation(iExpr);
return;
}
iExpr.expr = rewriteExpr(iExpr.expr);
if (iExpr.builtinMethodInvocation) {
visitBuiltInMethodInvocation(iExpr);
return;
}
result = genIExpr;
if (iExpr.expr == null) {
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
if (iExpr.exprSymbol == null) {
return;
}
iExpr.expr = ASTBuilderUtil.createVariableRef(iExpr.pos, iExpr.exprSymbol);
iExpr.expr = rewriteExpr(iExpr.expr);
}
switch (iExpr.expr.type.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!iExpr.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(iExpr.requiredArgs);
argExprs.add(0, iExpr.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(iExpr.pos, argExprs, iExpr.restArgs, iExpr.symbol,
iExpr.type, iExpr.expr, iExpr.async);
attachedFunctionInvocation.actionInvocation = iExpr.actionInvocation;
attachedFunctionInvocation.name = iExpr.name;
result = genIExpr = attachedFunctionInvocation;
}
break;
}
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
}
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) {
BType originalInvType = genIExpr.type;
genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType;
BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType);
if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
this.result = expr;
return;
}
BOperatorSymbol conversionSymbol = Symbols
.createCastOperatorSymbol(genIExpr.type, originalInvType, symTable.errorType, false, true,
InstructionCodes.NOP, null, null);
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.expr = genIExpr;
conversionExpr.targetType = originalInvType;
conversionExpr.conversionSymbol = conversionSymbol;
conversionExpr.type = originalInvType;
conversionExpr.pos = genIExpr.pos;
this.result = conversionExpr;
}
}
private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) {
BLangExpression reasonExpr = iExpr.requiredArgs.get(0);
if (reasonExpr.impConversionExpr != null &&
reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) {
reasonExpr.impConversionExpr = null;
}
reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType);
reasonExpr = rewriteExpr(reasonExpr);
iExpr.requiredArgs.remove(0);
iExpr.requiredArgs.add(reasonExpr);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos,
((BErrorType) iExpr.symbol.type).detailType);
List<BLangExpression> namedArgs = iExpr.requiredArgs.stream()
.filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR)
.collect(Collectors.toList());
if (namedArgs.isEmpty()) {
errorDetail = visitUtilMethodInvocation(iExpr.pos,
BLangBuiltInMethod.FREEZE, Lists.of(rewriteExpr(recordLiteral)));
} else {
for (BLangExpression arg : namedArgs) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
BLangRecordLiteral.BLangRecordKeyValue member = new BLangRecordLiteral.BLangRecordKeyValue();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (recordLiteral.type.tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type);
}
recordLiteral.keyValuePairs.add(member);
iExpr.requiredArgs.remove(arg);
}
recordLiteral = rewriteExpr(recordLiteral);
BLangExpression cloned = visitCloneInvocation(recordLiteral, ((BErrorType) iExpr.symbol.type).detailType);
errorDetail = visitUtilMethodInvocation(iExpr.pos, BLangBuiltInMethod.FREEZE, Lists.of(cloned));
}
iExpr.requiredArgs.add(errorDetail);
return iExpr;
}
public void visit(BLangTypeInit typeInitExpr) {
switch (typeInitExpr.type.tag) {
case TypeTags.STREAM:
result = new BLangStreamLiteral(typeInitExpr.pos, typeInitExpr.type);
break;
default:
if (typeInitExpr.type.tag == TypeTags.OBJECT && typeInitExpr.initInvocation.symbol == null) {
typeInitExpr.initInvocation.symbol =
((BObjectTypeSymbol) typeInitExpr.type.tsymbol).initializerFunc.symbol;
}
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.type);
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol;
if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitExpr.initInvocation;
typeInitExpr.initInvocation.name.value = Names.USER_DEFINED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, objVarRef);
stmtExpr.type = objVarRef.symbol.type;
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type,
typeInitExpr.initInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = resultVarRef.symbol.type;
return stmtExpr;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) {
BSymbol objSym = symResolver.lookupSymbol(env, names.fromString(name), SymTag.VARIABLE);
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr,
(BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos);
objVarDef.var = objVar;
objVarDef.type = objVar.type;
return objVarDef;
}
private BType getObjectType(BType type) {
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init conext");
}
private BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = symTable.errorType;
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = ternaryExpr.type;
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type);
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (trapExpr.expr.type.tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type);
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.rhsExpr.type,
symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.lhsExpr.type,
symTable.intType);
return;
}
}
if (lhsExprTypeTag == rhsExprTypeTag) {
return;
}
if (lhsExprTypeTag == TypeTags.STRING && binaryExpr.opKind == OperatorKind.ADD) {
if (rhsExprTypeTag == TypeTags.XML) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.rhsExpr.type,
binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.STRING && binaryExpr.opKind == OperatorKind.ADD) {
if (lhsExprTypeTag == TypeTags.XML) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.rhsExpr.type,
binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.rhsExpr.type,
binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type);
}
}
private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.type = symTable.intRangeType;
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.parent == null || binaryExpr.parent.type == null) {
return;
}
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.type.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr);
matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos,
rewriteExpr(elvisExpr.rhsExpr)));
matchExpr.type = elvisExpr.type;
matchExpr.pos = elvisExpr.pos;
result = rewriteExpr(matchExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final DiagnosticPos pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == unaryExpr.type.tag) {
binaryExpr.type = symTable.byteType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.type = symTable.intType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.type = bLangArrowFunction.expression.type;
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.type = bLangArrowFunction.funcType;
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.retType = funcNode.returnTypeNode.type;
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.type = new BInvokableType(paramTypes, funcNode.returnTypeNode.type, null);
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
bLangArrowFunction.function = lambdaFunction.function;
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
attributesItr.remove();
}
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type);
if (workerSendNode.keyExpr != null) {
workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr);
}
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type);
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
if (workerReceiveNode.keyExpr != null) {
workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr);
}
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr);
xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr);
if (xmlAttributeAccessExpr.indexExpr != null
&& xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) {
((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true;
}
xmlAttributeAccessExpr.desugared = true;
if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) {
result = xmlAttributeAccessExpr;
} else {
result = rewriteExpr(xmlAttributeAccessExpr);
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangJSONLiteral jsonLiteral) {
result = jsonLiteral;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
public void visit(BLangStreamLiteral streamLiteral) {
result = streamLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr accessExpr) {
result = accessExpr;
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
if (!intRangeExpression.includeStart) {
intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr);
}
if (!intRangeExpression.includeEnd) {
intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr);
}
intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr);
intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr);
result = intRangeExpression;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
public void visit(BLangTableQueryExpression tableQueryExpression) {
inMemoryTableQueryBuilder.visit(tableQueryExpression);
/*replace the table expression with a function invocation,
so that we manually call a native function "queryTable". */
result = createInvocationFromTableExpr(tableQueryExpression);
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
addMatchExprDefaultCase(bLangMatchExpression);
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
matchTempResultVarName, bLangMatchExpression.type, null,
new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID,
bLangMatchExpression.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef =
ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar);
tempResultVarDef.desugared = true;
BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef));
List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>();
for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
pattern.expr = rewriteExpr(pattern.expr);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt));
patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody));
}
stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr,
patternClauses));
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
BLangStatementExpression statementExpr = ASTBuilderUtil.createStatementExpression(stmts, tempResultVarRef);
statementExpr.type = bLangMatchExpression.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
String checkedExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos,
checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0,
names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar);
checkedExprVarDef.desugared = true;
BLangMatchTypedBindingPatternClause patternSuccessCase =
getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true,
checkedExprVar.symbol, null);
BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos,
this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr,
new ArrayList<BLangMatchTypedBindingPatternClause>() {{
add(patternSuccessCase);
add(patternErrorCase);
}});
BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos,
new ArrayList<BLangStatement>() {{
add(checkedExprVarDef);
add(matchStmt);
}});
BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef(
checkedExpr.pos, checkedExprVar.symbol);
BLangStatementExpression statementExpr = ASTBuilderUtil.createStatementExpression(
generatedStmtBlock, tempCheckedExprVarRef);
statementExpr.type = checkedExpr.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.type)) {
addConversionExprIfRequired(expr, symTable.anyType);
}
typeTestExpr.expr = rewriteExpr(expr);
result = typeTestExpr;
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.type = annotAccessExpr.type;
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type),
annotAccessExpr.type, null), null,
InstructionCodes.ANNOT_ACCESS);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = constantRef;
}
private BLangSimpleVariableDef getIteratorVariableDefinition(BLangForeach foreach, BVarSymbol collectionSymbol) {
BLangIdentifier iterateIdentifier =
ASTBuilderUtil.createIdentifier(foreach.pos, BLangBuiltInMethod.ITERATE.getName());
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(foreach.pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = foreach.pos;
iteratorInvocation.name = iterateIdentifier;
iteratorInvocation.expr = dataReference;
BInvokableSymbol langLibMethodSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type,
names.fromIdNode(iterateIdentifier));
iteratorInvocation.symbol = langLibMethodSymbol;
iteratorInvocation.type = langLibMethodSymbol.retType;
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = true;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
langLibMethodSymbol.retType, this.env.scope.owner);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(foreach.pos, "$iterator$",
langLibMethodSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(foreach.pos, iteratorVariable);
}
private BLangSimpleVariableDef getIteratorNextVariableDefinition(BLangForeach foreach,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(foreach, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(foreach.pos, "$result$",
foreach.nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(foreach.pos, resultVariable);
}
private BLangAssignment getIteratorNextAssignment(BLangForeach foreach,
BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangInvocation nextInvocation = createIteratorNextInvocation(foreach, iteratorSymbol);
nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false);
return ASTBuilderUtil.createAssignmentStmt(foreach.pos, resultReferenceInAssignment, nextInvocation, false);
}
private BLangInvocation createIteratorNextInvocation(BLangForeach foreach, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier =
ASTBuilderUtil.createIdentifier(foreach.pos, BLangBuiltInMethod.NEXT.getName());
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(foreach.pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = foreach.pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(foreach.pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.type = nextFuncSymbol.retType;
return nextInvocation;
}
private BLangIf getIfStatement(BLangForeach foreach, BVarSymbol resultSymbol,
BLangTypeTestExpr typeTestExpressionNode, BLangBlockStmt ifStatementBody) {
BLangIf ifStatement = (BLangIf) TreeBuilder.createIfElseStatementNode();
ifStatement.pos = foreach.pos;
ifStatement.type = symTable.booleanType;
ifStatement.expr = typeTestExpressionNode;
ifStatement.body = ifStatementBody;
return ifStatement;
}
private BLangTypeTestExpr getTypeTestExpression(BLangForeach foreach, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInTypeTest = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = foreach.pos;
nilTypeNode.type = symTable.nilType;
nilTypeNode.typeKind = TypeKind.NIL;
BLangTypeTestExpr typeTestExpressionNode = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpressionNode.pos = foreach.pos;
typeTestExpressionNode.expr = resultReferenceInTypeTest;
typeTestExpressionNode.typeNode = nilTypeNode;
typeTestExpressionNode.type = symTable.booleanType;
return typeTestExpressionNode;
}
private BLangFieldBasedAccess getValueAccessExpression(BLangForeach foreach, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(foreach.pos, "value");
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = foreach.pos;
fieldBasedAccessExpression.type = foreach.varType;
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type;
return fieldBasedAccessExpression;
}
private BlockNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockNode blockNode = TreeBuilder.createBlockNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.expression.pos;
returnNode.setExpression(bLangArrowFunction.expression);
blockNode.addStatement(returnNode);
return blockNode;
}
private BLangInvocation createInvocationFromTableExpr(BLangTableQueryExpression tableQueryExpression) {
List<BLangExpression> args = new ArrayList<>();
String functionName = QUERY_TABLE_WITHOUT_JOIN_CLAUSE;
args.add(getSQLPreparedStatement(tableQueryExpression));
args.add(getFromTableVarRef(tableQueryExpression));
BType retType = tableQueryExpression.type;
BLangExpression joinTable = getJoinTableVarRef(tableQueryExpression);
if (joinTable != null) {
args.add(joinTable);
functionName = QUERY_TABLE_WITH_JOIN_CLAUSE;
}
args.add(getSQLStatementParameters(tableQueryExpression));
args.add(getReturnType(tableQueryExpression));
BInvokableSymbol symbol =
(BInvokableSymbol) symTable.langTableModuleSymbol.scope.lookup(names.fromString(functionName)).symbol;
BLangInvocation invocation =
ASTBuilderUtil.createInvocationExprForMethod(tableQueryExpression.pos, symbol, args, symResolver);
invocation.argExprs = args;
invocation.type = retType;
return invocation;
}
private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.type = retType;
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangLiteral getSQLPreparedStatement(BLangTableQueryExpression
tableQueryExpression) {
BLangLiteral sqlQueryLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
sqlQueryLiteral.value = tableQueryExpression.getSqlQuery();
sqlQueryLiteral.type = symTable.stringType;
return sqlQueryLiteral;
}
private BLangStructLiteral getReturnType(BLangTableQueryExpression
tableQueryExpression) {
BTableType tableType = (BTableType) tableQueryExpression.type;
BStructureType structType = (BStructureType) tableType.constraint;
return new BLangStructLiteral(tableQueryExpression.pos, new ArrayList<>(), structType);
}
private BLangArrayLiteral getSQLStatementParameters(BLangTableQueryExpression tableQueryExpression) {
BLangArrayLiteral expr = createArrayLiteralExprNode();
List<BLangExpression> params = tableQueryExpression.getParams();
params.stream().map(param -> (BLangLiteral) param).forEach(literal -> {
Object value = literal.getValue();
int type = TypeTags.STRING;
if (value instanceof Integer || value instanceof Long) {
type = TypeTags.INT;
} else if (value instanceof Double || value instanceof Float) {
type = TypeTags.FLOAT;
} else if (value instanceof Boolean) {
type = TypeTags.BOOLEAN;
} else if (value instanceof Object[]) {
type = TypeTags.ARRAY;
}
literal.type = symTable.getTypeFromTag(type);
types.setImplicitCastExpr(literal, new BType(type, null), symTable.anyType);
expr.exprs.add(literal.impConversionExpr);
});
return expr;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.type = new BArrayType(symTable.anyType);
return expr;
}
private BLangExpression getJoinTableVarRef(BLangTableQueryExpression tableQueryExpression) {
JoinStreamingInput joinStreamingInput = tableQueryExpression.getTableQuery().getJoinStreamingInput();
BLangExpression joinTable = null;
if (joinStreamingInput != null) {
joinTable = (BLangExpression) joinStreamingInput.getStreamingInput().getStreamReference();
joinTable = rewrite(joinTable, env);
}
return joinTable;
}
private BLangExpression getFromTableVarRef(BLangTableQueryExpression tableQueryExpression) {
BLangExpression fromTable = (BLangExpression) tableQueryExpression.getTableQuery().getStreamingInput()
.getStreamReference();
return rewrite(fromTable, env);
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangVariableReference expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.type = iExpr.symbol.type;
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private void visitBuiltInMethodInvocation(BLangInvocation iExpr) {
switch (iExpr.builtInMethod) {
case IS_NAN:
if (iExpr.expr.type.tag == TypeTags.FLOAT) {
BOperatorSymbol notEqSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.NOT_EQUAL, symTable.floatType, symTable.floatType);
BLangBinaryExpr binaryExprNaN = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, iExpr.expr,
symTable.booleanType,
OperatorKind.NOT_EQUAL,
notEqSymbol);
result = rewriteExpr(binaryExprNaN);
} else {
BOperatorSymbol greaterEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.GREATER_EQUAL, symTable.decimalType, symTable.decimalType);
BOperatorSymbol lessThanSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.LESS_THAN, symTable.decimalType, symTable.decimalType);
BOperatorSymbol orSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.OR, symTable.booleanType, symTable.booleanType);
BOperatorSymbol notSymbol = (BOperatorSymbol) symResolver.resolveUnaryOperator(
iExpr.pos, OperatorKind.NOT, symTable.booleanType);
BLangLiteral literalZero = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.decimalType, "0");
BLangBinaryExpr binaryExprLHS = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, literalZero,
symTable.booleanType,
OperatorKind.GREATER_EQUAL,
greaterEqualSymbol);
BLangBinaryExpr binaryExprRHS = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, literalZero,
symTable.booleanType,
OperatorKind.LESS_THAN,
lessThanSymbol);
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, binaryExprLHS,
binaryExprRHS,
symTable.booleanType,
OperatorKind.OR, orSymbol);
BLangUnaryExpr finalExprNaN = ASTBuilderUtil.createUnaryExpr(iExpr.pos, binaryExpr,
symTable.booleanType,
OperatorKind.NOT, notSymbol);
result = rewriteExpr(finalExprNaN);
}
break;
case IS_FINITE:
if (iExpr.expr.type.tag == TypeTags.FLOAT) {
BOperatorSymbol equalSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.EQUAL, symTable.floatType, symTable.floatType);
BOperatorSymbol notEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.NOT_EQUAL, symTable.floatType, symTable.floatType);
BOperatorSymbol andEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.AND, symTable.booleanType, symTable.booleanType);
BLangBinaryExpr binaryExprLHS = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, iExpr.expr,
symTable.booleanType,
OperatorKind.EQUAL, equalSymbol);
BLangLiteral posInfLiteral = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.floatType,
Double.POSITIVE_INFINITY);
BLangBinaryExpr nestedLHSExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, posInfLiteral,
iExpr.expr, symTable.booleanType,
OperatorKind.NOT_EQUAL,
notEqualSymbol);
BLangLiteral negInfLiteral = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.floatType,
Double.NEGATIVE_INFINITY);
BLangBinaryExpr nestedRHSExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, negInfLiteral,
iExpr.expr, symTable.booleanType,
OperatorKind.NOT_EQUAL,
notEqualSymbol);
BLangBinaryExpr binaryExprRHS = ASTBuilderUtil.createBinaryExpr(iExpr.pos, nestedLHSExpr,
nestedRHSExpr,
symTable.booleanType,
OperatorKind.AND, andEqualSymbol);
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, binaryExprLHS,
binaryExprRHS, symTable.booleanType,
OperatorKind.AND, andEqualSymbol);
result = rewriteExpr(binaryExpr);
} else {
BOperatorSymbol isEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.EQUAL, symTable.decimalType, symTable.decimalType);
BLangBinaryExpr finalExprFinite = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr, iExpr.expr,
symTable.booleanType,
OperatorKind.EQUAL, isEqualSymbol);
result = rewriteExpr(finalExprFinite);
}
break;
case IS_INFINITE:
if (iExpr.expr.type.tag == TypeTags.FLOAT) {
BOperatorSymbol eqSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.EQUAL, symTable.floatType, symTable.floatType);
BOperatorSymbol orSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.OR, symTable.booleanType, symTable.booleanType);
BLangLiteral posInflitExpr = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.floatType,
Double.POSITIVE_INFINITY);
BLangBinaryExpr binaryExprPosInf = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr,
posInflitExpr, symTable.booleanType,
OperatorKind.EQUAL, eqSymbol);
BLangLiteral negInflitExpr = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.floatType,
Double.NEGATIVE_INFINITY);
BLangBinaryExpr binaryExprNegInf = ASTBuilderUtil.createBinaryExpr(iExpr.pos, iExpr.expr,
negInflitExpr, symTable.booleanType,
OperatorKind.EQUAL, eqSymbol);
BLangBinaryExpr binaryExprInf = ASTBuilderUtil.createBinaryExpr(iExpr.pos, binaryExprPosInf,
binaryExprNegInf,
symTable.booleanType,
OperatorKind.OR, orSymbol);
result = rewriteExpr(binaryExprInf);
} else {
BLangLiteral literalZero = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.decimalType, "0");
BLangLiteral literalOne = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.decimalType, "1");
BOperatorSymbol isEqualSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.EQUAL, symTable.decimalType, symTable.decimalType);
BOperatorSymbol divideSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
OperatorKind.DIV, symTable.decimalType, symTable.decimalType);
BLangBinaryExpr divideExpr = ASTBuilderUtil.createBinaryExpr(iExpr.pos, literalOne, iExpr.expr,
symTable.decimalType,
OperatorKind.DIV, divideSymbol);
BLangBinaryExpr finalExprInf = ASTBuilderUtil.createBinaryExpr(iExpr.pos, divideExpr, literalZero,
symTable.booleanType,
OperatorKind.EQUAL, isEqualSymbol);
result = rewriteExpr(finalExprInf);
}
break;
case CLONE:
result = visitCloneInvocation(iExpr.expr, iExpr.type);
break;
case LENGTH:
result = visitLengthInvocation(iExpr);
break;
case FREEZE:
case IS_FROZEN:
visitFreezeBuiltInMethodInvocation(iExpr);
break;
case STAMP:
result = visitTypeConversionInvocation(iExpr.expr.pos, iExpr.builtInMethod, iExpr.expr,
iExpr.requiredArgs.get(0), iExpr.type);
break;
case CONVERT:
result = visitConvertInvocation(iExpr);
break;
case DETAIL:
result = visitDetailInvocation(iExpr);
break;
case REASON:
case ITERATE:
result = visitUtilMethodInvocation(iExpr.expr.pos, iExpr.builtInMethod, Lists.of(iExpr.expr));
break;
case CALL:
visitCallBuiltInMethodInvocation(iExpr);
break;
case NEXT:
if (isJvmTarget) {
result = visitNextBuiltInMethodInvocation(iExpr);
} else {
result = new BLangBuiltInMethodInvocation(iExpr, iExpr.builtInMethod);
}
break;
default:
throw new IllegalStateException();
}
}
BLangInvocation visitUtilMethodInvocation(DiagnosticPos pos, BLangBuiltInMethod builtInMethod,
List<BLangExpression> requiredArgs) {
BInvokableSymbol invokableSymbol
= (BInvokableSymbol) symResolver.lookupSymbol(symTable.pkgEnvMap.get(symTable.utilsPackageSymbol),
names.fromString(builtInMethod.getName()),
SymTag.FUNCTION);
for (int i = 0; i < invokableSymbol.params.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), invokableSymbol.params.get(i).type));
}
BLangInvocation invocationExprMethod = ASTBuilderUtil
.createInvocationExprMethod(pos, invokableSymbol, requiredArgs,
new ArrayList<>(), symResolver);
return rewrite(invocationExprMethod, env);
}
private BLangExpression visitNextBuiltInMethodInvocation(BLangInvocation iExpr) {
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbol(symTable.pkgEnvMap.get(symTable.utilsPackageSymbol),
names.fromString(iExpr.builtInMethod.getName()), SymTag.FUNCTION);
List<BLangExpression> requiredArgs = Lists.of(iExpr.expr);
BLangExpression invocationExprMethod = ASTBuilderUtil.createInvocationExprMethod(iExpr.pos, invokableSymbol,
requiredArgs, new ArrayList<>(), symResolver);
invocationExprMethod = addConversionExprIfRequired(invocationExprMethod, iExpr.type);
return rewriteExpr(invocationExprMethod);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
return addConversionExprIfRequired(visitUtilMethodInvocation(expr.pos, BLangBuiltInMethod.CLONE,
Lists.of(expr)), lhsType);
}
private BLangExpression visitCloneAndStampInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
BLangInvocation cloned = visitUtilMethodInvocation(expr.pos, BLangBuiltInMethod.CLONE, Lists.of(expr));
return addConversionExprIfRequired(visitStampInvocation(cloned, lhsType, expr.pos), lhsType);
}
private BLangInvocation visitStampInvocation(BLangExpression expression, BType typeToStamp, DiagnosticPos pos) {
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = typeToStamp;
typedescExpr.type = symTable.typeDesc;
return visitUtilMethodInvocation(pos, BLangBuiltInMethod.STAMP, Lists.of(typedescExpr, expression));
}
private BLangExpression visitConvertInvocation(BLangInvocation iExpr) {
BType targetType = iExpr.type;
if (iExpr.expr instanceof BLangTypedescExpr) {
targetType = ((BLangTypedescExpr) iExpr.expr).resolvedType;
}
BLangExpression inputTypeCastExpr = iExpr.requiredArgs.get(0);
if (types.isValueType(iExpr.requiredArgs.get(0).type)) {
inputTypeCastExpr = createTypeCastExpr(iExpr.requiredArgs.get(0), iExpr.requiredArgs.get(0).type,
symTable.anydataType);
}
BLangBuiltInMethod convertMethod;
if (types.isValueType(targetType)) {
convertMethod = BLangBuiltInMethod.SIMPLE_VALUE_CONVERT;
} else {
convertMethod = BLangBuiltInMethod.CONVERT;
}
BLangExpression invocationExpr =
visitTypeConversionInvocation(iExpr.expr.pos, convertMethod, iExpr.expr, inputTypeCastExpr, iExpr.type);
return invocationExpr;
}
private BLangExpression visitDetailInvocation(BLangInvocation iExpr) {
BLangInvocation utilMethod = visitUtilMethodInvocation(iExpr.expr.pos, iExpr.builtInMethod,
Lists.of(iExpr.expr));
utilMethod.type = iExpr.type;
return utilMethod;
}
private BLangExpression visitTypeConversionInvocation(DiagnosticPos pos, BLangBuiltInMethod builtInMethod,
BLangExpression typeDesc, BLangExpression valExpr,
BType lhType) {
return addConversionExprIfRequired(visitUtilMethodInvocation(pos, builtInMethod, Lists.of(typeDesc, valExpr)),
lhType);
}
private BLangExpression visitLengthInvocation(BLangInvocation iExpr) {
return visitUtilMethodInvocation(iExpr.pos, BLangBuiltInMethod.LENGTH, Lists.of(iExpr.expr));
}
private void visitFreezeBuiltInMethodInvocation(BLangInvocation iExpr) {
if (types.isValueType(iExpr.expr.type)) {
if (iExpr.builtInMethod == BLangBuiltInMethod.FREEZE) {
result = iExpr.expr;
} else {
result = ASTBuilderUtil.createLiteral(iExpr.pos, symTable.booleanType, true);
}
return;
}
result = addConversionExprIfRequired(visitUtilMethodInvocation(iExpr.pos, iExpr.builtInMethod,
Lists.of(iExpr.expr)), iExpr.type);
}
private void visitCallBuiltInMethodInvocation(BLangInvocation iExpr) {
BLangExpression expr = iExpr.expr;
if (iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
iExpr.symbol = ((BLangVariableReference) iExpr.expr).symbol;
iExpr.expr = null;
} else if (iExpr.expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
iExpr.symbol = ((BLangVariableReference) ((BLangTypeConversionExpr) iExpr.expr).expr).symbol;
iExpr.expr = null;
} else {
iExpr.expr = ((BLangAccessExpression) iExpr.expr).expr;
}
Name funcPointerName = iExpr.symbol.name;
iExpr.name = ASTBuilderUtil.createIdentifier(iExpr.pos, funcPointerName.value);
iExpr.builtinMethodInvocation = false;
iExpr.functionPointerInvocation = true;
result = new BFunctionPointerInvocation(iExpr, expr);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatementLink link = new BLangStatementLink();
link.parent = currentLink;
currentLink = link;
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
link.statement = stmt;
stmt.statementLink = link;
currentLink = link.parent;
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType sourceType, BType targetType) {
BOperatorSymbol symbol = (BOperatorSymbol) symResolver.resolveConversionOperator(sourceType, targetType);
return createTypeCastExpr(expr, targetType, symbol);
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType,
BOperatorSymbol symbol) {
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.type = targetType;
conversionExpr.targetType = targetType;
conversionExpr.conversionSymbol = symbol;
return conversionExpr;
}
private BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol)) {
return;
}
BLangBlockStmt blockStmt = invokableNode.body;
if (invokableNode.workers.size() == 0 &&
invokableNode.symbol.type.getReturnType().isNullable()
&& (blockStmt.stmts.size() < 1 ||
blockStmt.stmts.get(blockStmt.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
DiagnosticPos invPos = invokableNode.pos;
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src,
invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol);
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
blockStmt.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol);
}
if (invokableSymbol.restParam == null) {
return;
}
if (iExpr.restArgs.size() == 1 && iExpr.restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.exprs = iExpr.restArgs;
arrayLiteral.type = invokableSymbol.restParam.type;
iExpr.restArgs = new ArrayList<>();
iExpr.restArgs.add(arrayLiteral);
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new HashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
int i = 0;
for (; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.get(param.name.value));
} else {
BLangExpression expr = new BLangIgnoreExpr();
expr.type = param.type;
args.add(expr);
}
}
iExpr.requiredArgs = args;
}
private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(
DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType;
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<BType>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos,
patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0,
names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternFailureCaseVar.symbol);
BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode();
patternBlockFailureCase.pos = pos;
if (!isCheckPanicExpr && returnOnError) {
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pos;
returnStmt.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(returnStmt);
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(panicNode);
}
return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase);
}
private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType,
boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) {
String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match";
BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos,
patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0,
names.fromString(patternSuccessCaseVarName),
this.env.scope.owner.pkgID, lhsType, this.env.scope.owner));
BLangExpression varRefExpr;
if (isVarDef) {
varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol);
} else {
varRefExpr = lhsExpr;
}
BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternSuccessCaseVar.symbol);
BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos,
varRefExpr, patternSuccessCaseVarRef, false);
BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos,
new ArrayList<BLangStatement>() {{
add(assignmentStmtSuccessCase);
}});
return ASTBuilderUtil.createMatchStatementPattern(pos,
patternSuccessCaseVar, patternBlockSuccessCase);
}
private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) {
List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses;
BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar);
BLangIf currentIfNode = parentIfNode;
for (int i = 1; i < patterns.size(); i++) {
BLangMatchBindingPatternClause patternClause = patterns.get(i);
if (i == patterns.size() - 1 && patternClause.isLastPattern) {
currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar);
} else {
currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar);
currentIfNode = (BLangIf) currentIfNode.elseStmt;
}
}
return parentIfNode;
}
/**
* Generate an if-else statement from the given match statement.
*
* @param pattern match pattern statement node
* @param matchExprVar variable node of the match expression
* @return if else statement node
*/
private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) {
BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol);
if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) {
BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar);
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null);
}
BType expectedType = matchExprVar.type;
if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern;
expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType);
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol);
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
if (structuredPattern.typeGuardExpr != null) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos);
blockStmt.addStatement(varDef);
blockStmt.addStatement(varDefStmt);
BLangStatementExpression stmtExpr = ASTBuilderUtil
.createStatementExpression(blockStmt, structuredPattern.typeGuardExpr);
stmtExpr.type = symTable.booleanType;
ifCondition = ASTBuilderUtil
.createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.AND, symTable.booleanType,
symTable.booleanType));
} else {
structuredPattern.body.stmts.add(0, varDef);
structuredPattern.body.stmts.add(1, varDefStmt);
}
}
BLangIf ifNode = ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null);
return ifNode;
}
private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body;
BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern;
if (patternClause.variable.name.value.equals(Names.IGNORE.value)) {
return patternClause.body;
}
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos,
matchExprVar.symbol);
BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type);
BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "",
patternClause.variable.type, patternVarExpr, patternClause.variable.symbol);
BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar);
patternClause.body.stmts.add(0, patternVarDef);
body = patternClause.body;
return body;
}
private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body = pattern.body;
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol);
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
structuredPattern.body.stmts.add(0, varDefStmt);
body = structuredPattern.body;
}
return body;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.type;
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
return expr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BOperatorSymbol conversionSymbol;
if (types.isValueType(lhsType)) {
conversionSymbol = Symbols.createUnboxValueTypeOpSymbol(rhsType, lhsType);
} else if (lhsType.tag == TypeTags.UNION && types.isSubTypeOfBaseType(lhsType, TypeTags.ERROR)) {
conversionSymbol = Symbols.createCastOperatorSymbol(rhsType, symTable.errorType, symTable.errorType, false,
true, InstructionCodes.NOP, null, null);
lhsType = symTable.errorType;
} else if (lhsType.tag == TypeTags.UNION || rhsType.tag == TypeTags.UNION) {
conversionSymbol = Symbols.createCastOperatorSymbol(rhsType, lhsType, symTable.errorType, false, true,
InstructionCodes.NOP, null, null);
} else if (lhsType.tag == TypeTags.MAP || rhsType.tag == TypeTags.MAP) {
conversionSymbol = Symbols.createCastOperatorSymbol(rhsType, lhsType, symTable.errorType, false, true,
InstructionCodes.NOP, null, null);
} else if (lhsType.tag == TypeTags.TABLE || rhsType.tag == TypeTags.TABLE) {
conversionSymbol = Symbols.createCastOperatorSymbol(rhsType, lhsType, symTable.errorType, false, true,
InstructionCodes.NOP, null, null);
} else {
conversionSymbol = (BOperatorSymbol) symResolver.resolveCastOperator(expr, rhsType, lhsType);
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.conversionSymbol = conversionSymbol;
conversionExpr.type = lhsType;
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
return conversionExpr;
}
private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol) {
BType patternType;
switch (patternClause.getKind()) {
case MATCH_STATIC_PATTERN_CLAUSE:
BLangMatchStaticBindingPatternClause staticPattern =
(BLangMatchStaticBindingPatternClause) patternClause;
patternType = staticPattern.literal.type;
break;
case MATCH_STRUCTURED_PATTERN_CLAUSE:
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) patternClause;
patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable);
break;
default:
BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause;
patternType = simplePattern.variable.type;
break;
}
BLangExpression binaryExpr;
BType[] memberTypes;
if (patternType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) patternType;
memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
} else {
memberTypes = new BType[1];
memberTypes[0] = patternType;
}
if (memberTypes.length == 1) {
binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
} else {
BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]);
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
for (int i = 2; i < memberTypes.length; i++) {
lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]);
rhsExpr = binaryExpr;
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
}
}
return binaryExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
List<BField> fields = new ArrayList<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName,
env.enclPkg.symbol.pkgID, fieldType, recordSymbol);
fields.add(new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint :
symTable.anydataType;
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(typeDefFields, recordVarType);
recordTypeNode.pos = bindingPatternVariable.pos;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
recordTypeNode.symbol = recordSymbol;
recordTypeNode.initFunction = createInitFunctionForStructureType(recordTypeNode, env,
Names.INIT_FUNCTION_SUFFIX);
recordSymbol.scope.define(recordSymbol.initializerFunc.symbol.name, recordSymbol.initializerFunc.symbol);
createTypeDefinition(recordVarType, recordSymbol, recordTypeNode);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorCount++),
env.enclPkg.symbol.pkgID,
null, null);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
createTypeDefinition(detailType, detailType.tsymbol, recordTypeNode);
}
BErrorType errorType = new BErrorType(errorTypeSymbol,
((BErrorType) errorVariable.type).reasonType,
detailType);
errorTypeSymbol.type = errorType;
createTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType));
return errorType;
}
return bindingPatternVariable.type;
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(
Flags.PUBLIC,
names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID,
symTable.pureType,
null);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.type,
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return createRecordTypeNode(fieldList, detailType);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorNo + "$detailType"),
env.enclPkg.symbol.pkgID, null, null);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(
Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol);
detailRecordType.fields.add(new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordTypeSymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType);
}
private BLangRecordTypeNode createRecordTypeNode(List<BLangSimpleVariable> typeDefFields,
BRecordType recordVarType) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
recordTypeNode.type = recordVarType;
recordTypeNode.fields = typeDefFields;
return recordTypeNode;
}
private BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = errorType;
return errorTypeNode;
}
private void createTypeDefinition(BType type, BTypeSymbol symbol, BLangType typeNode) {
BLangTypeDefinition typeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
env.enclPkg.addTypeDefinition(typeDefinition);
typeDefinition.typeNode = typeNode;
typeDefinition.type = type;
typeDefinition.symbol = symbol;
}
private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol, BType patternType) {
DiagnosticPos pos = patternClause.pos;
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) {
BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause;
return createBinaryExpression(pos, varRef, pattern.literal);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) {
return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType);
}
if (patternType == symTable.nilType) {
BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null);
return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType,
OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL,
symTable.anyType, symTable.nilType));
} else {
return createIsAssignableExpression(pos, varSymbol, patternType);
}
}
private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.type = symTable.anyType;
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type,
binaryExpr);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos,
BVarSymbol varSymbol,
BType patternType) {
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names);
}
private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(variable.pos, function.receiver.symbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, variable.name);
fieldAccess.symbol = variable.symbol;
fieldAccess.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes;
List<BType> unmatchedTypes = new ArrayList<>();
if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type;
exprTypes = new ArrayList<>(unionType.getMemberTypes());
} else {
exprTypes = Lists.of(bLangMatchExpression.type);
}
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
if (this.types.isAssignable(type, pattern.variable.type)) {
assignable = true;
break;
}
}
if (!assignable) {
unmatchedTypes.add(type);
}
}
if (unmatchedTypes.isEmpty()) {
return;
}
BType defaultPatternType;
if (unmatchedTypes.size() == 1) {
defaultPatternType = unmatchedTypes.get(0);
} else {
defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes));
}
String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default";
BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName),
this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner));
BLangMatchExprPatternClause defaultPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
defaultPattern.variable = patternMatchCaseVar;
defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol);
defaultPattern.pos = bLangMatchExpression.pos;
bLangMatchExpression.patternClauses.add(defaultPattern);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.lhsVar || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.type;
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName,
accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar);
BLangMatch matcEXpr = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr));
BLangStatementExpression stmtExpression = ASTBuilderUtil.createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.type = originalExprType;
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successPattern = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!accessExpr.errorSafeNavigation && !accessExpr.nilSafeNavigation) {
accessExpr.type = accessExpr.originalType;
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type);
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>());
if (accessExpr.nilSafeNavigation) {
matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar));
matchStmt.type = type;
}
if (accessExpr.errorSafeNavigation) {
matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar));
matchStmt.type = type;
matchStmt.pos = accessExpr.pos;
}
BLangMatchTypedBindingPatternClause successPattern =
getSuccessPattern(accessExpr, tempResultVar, accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
this.matchStmtStack.push(matchStmt);
if (this.successPattern != null) {
this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt));
}
this.successPattern = successPattern;
}
private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName,
symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, errorPatternVar, patternBody);
return errorPattern;
}
private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos,
BLangExpression expr) {
String nullPatternVarName = IGNORE.toString();
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType,
null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangMatchExprPatternClause nullPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
nullPattern.variable = errorPatternVar;
nullPattern.expr = expr;
nullPattern.pos = pos;
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName,
symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, nullPatternVar, patternBody);
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getSuccessPattern(BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
BType type = types.getSafeType(accessExpr.expr.type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
BVarSymbol successPatternSymbol;
if (type.tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
} else {
successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
accessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol);
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.type = accessExpr.originalType;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(accessExpr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause successPattern =
ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody);
this.safeNavigationAssignment = assignmentStmt;
return successPattern;
}
private boolean safeNavigateLHS(BLangExpression expr) {
if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
return false;
}
BLangExpression varRef = ((BLangAccessExpression) expr).expr;
if (varRef.type.isNullable()) {
return true;
}
return safeNavigateLHS(varRef);
}
private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr,
boolean safeAssignment) {
this.accessExprStack = new Stack<>();
List<BLangStatement> stmts = new ArrayList<>();
createLHSSafeNavigation(stmts, accessExpr.expr);
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos,
cloneExpression(accessExpr), rhsExpr);
stmts.add(assignment);
return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts);
}
private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) {
NodeKind kind = expr.getKind();
boolean root = false;
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
BLangAccessExpression accessExpr = (BLangAccessExpression) expr;
createLHSSafeNavigation(stmts, accessExpr.expr);
accessExpr.expr = accessExprStack.pop();
} else {
root = true;
}
if (expr.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expr;
BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value
+ "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner);
BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos,
interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol);
BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos,
intermediateVariable);
stmts.add(intermediateVariableDefinition);
expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol);
}
if (expr.type.isNullable()) {
BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode());
isNillTest.type = symTable.booleanType;
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos);
expr = cloneExpression(expr);
expr.type = types.getSafeType(expr.type, true, false);
if (isDefaultableMappingType(expr.type) && !root) {
BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
jsonLiteral.type = expr.type;
jsonLiteral.pos = expr.pos;
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos,
expr, jsonLiteral);
thenStmt.addStatement(assignment);
} else {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = ERROR_REASON_NULL_REFERENCE_ERROR;
literal.type = symTable.stringType;
BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
errorCtorInvocation.pos = expr.pos;
errorCtorInvocation.argExprs.add(literal);
errorCtorInvocation.requiredArgs.add(literal);
errorCtorInvocation.type = symTable.errorType;
errorCtorInvocation.symbol = symTable.errorConstructor;
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.expr = errorCtorInvocation;
panicNode.pos = expr.pos;
thenStmt.addStatement(panicNode);
}
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null);
stmts.add(ifelse);
}
accessExprStack.push(expr);
}
private BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.type = symTable.nilType;
return nillTypeNode;
}
private BLangVariableReference cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case INVOCATION:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangVariableReference varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression((BLangVariableReference) originalAccessExpr.expr);
}
varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false);
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
case INVOCATION:
accessExpr = null;
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.lhsVar = originalAccessExpr.lhsVar;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.type = originalAccessExpr.originalType;
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangExpression getDefaultValueExpr(BLangAccessExpression accessExpr) {
BType fieldType = accessExpr.originalType;
BType type = types.getSafeType(accessExpr.expr.type, true, false);
switch (type.tag) {
case TypeTags.JSON:
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR &&
((BLangIndexBasedAccess) accessExpr).indexExpr.type.tag == TypeTags.INT) {
return new BLangJSONArrayLiteral(new ArrayList<>(), new BArrayType(fieldType));
}
return new BLangJSONLiteral(accessExpr.pos, new ArrayList<>(), fieldType);
case TypeTags.MAP:
return new BLangMapLiteral(accessExpr.pos, new ArrayList<>(), type);
case TypeTags.RECORD:
return new BLangRecordLiteral(accessExpr.pos, type);
default:
throw new IllegalStateException();
}
}
private BLangExpression getDefaultValueLiteral(DefaultValueLiteral defaultValue, int paramTypeTag) {
if (defaultValue == null || defaultValue.getValue() == null) {
return getNullLiteral();
}
Object value = defaultValue.getValue();
int literalTypeTag = defaultValue.getLiteralTypeTag();
if (value instanceof Long) {
switch (paramTypeTag) {
case TypeTags.FLOAT:
return getFloatLiteral(((Long) value).doubleValue());
case TypeTags.DECIMAL:
return getDecimalLiteral(String.valueOf(value));
default:
return getIntLiteral((Long) value);
}
}
if (value instanceof String) {
switch (paramTypeTag) {
case TypeTags.FLOAT:
return getFloatLiteral(Double.parseDouble((String) value));
case TypeTags.DECIMAL:
return getDecimalLiteral(String.valueOf(value));
case TypeTags.FINITE:
case TypeTags.UNION:
if (literalTypeTag == TypeTags.FLOAT) {
return getFloatLiteral(Double.parseDouble((String) value));
}
return getStringLiteral((String) value);
default:
return getStringLiteral((String) value);
}
}
if (value instanceof Boolean) {
return getBooleanLiteral((Boolean) value);
}
throw new IllegalStateException("Unsupported default value type " + paramTypeTag);
}
private BLangExpression getDefaultValue(int paramTypeTag) {
switch (paramTypeTag) {
case TypeTags.STRING:
return getStringLiteral("");
case TypeTags.BOOLEAN:
return getBooleanLiteral(false);
case TypeTags.FLOAT:
return getFloatLiteral(0.0);
case TypeTags.BYTE:
case TypeTags.INT:
return getIntLiteral(0);
case TypeTags.DECIMAL:
return getDecimalLiteral("0.0");
case TypeTags.FINITE:
case TypeTags.RECORD:
case TypeTags.OBJECT:
case TypeTags.UNION:
default:
return getNullLiteral();
}
}
private BLangLiteral getStringLiteral(String value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.stringType;
return literal;
}
private BLangLiteral getIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.intType;
return literal;
}
private BLangLiteral getFloatLiteral(double value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.floatType;
return literal;
}
private BLangLiteral getDecimalLiteral(String value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.decimalType;
return literal;
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.booleanType;
return literal;
}
private BLangLiteral getNullLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.type = symTable.nilType;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
default:
return false;
}
}
private BLangFunction createInitFunctionForStructureType(BLangStructureTypeNode structureTypeNode, SymbolEnv env,
Name suffix) {
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithNilReturn(structureTypeNode.pos, Names.EMPTY.value, suffix);
initFunction.receiver = ASTBuilderUtil.createReceiver(structureTypeNode.pos, structureTypeNode.type);
BVarSymbol receiverSymbol = new BVarSymbol(Flags.asMask(EnumSet.noneOf(Flag.class)),
names.fromIdNode(initFunction.receiver.name),
env.enclPkg.symbol.pkgID, structureTypeNode.type, null);
initFunction.receiver.symbol = receiverSymbol;
initFunction.type = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
initFunction.attachedFunction = true;
initFunction.flagSet.add(Flag.ATTACHED);
Name funcSymbolName = names.fromString(Symbols.getAttachedFuncSymbolName(
structureTypeNode.type.tsymbol.name.value, Names.USER_DEFINED_INIT_SUFFIX.value));
initFunction.symbol = Symbols
.createFunctionSymbol(Flags.asMask(initFunction.flagSet), funcSymbolName, env.enclPkg.symbol.pkgID,
initFunction.type, structureTypeNode.symbol.scope.owner,
initFunction.body != null);
initFunction.symbol.scope = new Scope(initFunction.symbol);
initFunction.symbol.scope.define(receiverSymbol.name, receiverSymbol);
initFunction.symbol.receiverSymbol = receiverSymbol;
receiverSymbol.owner = initFunction.symbol;
initFunction.symbol.retType = symTable.nilType;
initFunction.symbol.taintTable = new HashMap<>();
TaintRecord taintRecord = new TaintRecord(TaintRecord.TaintedStatus.UNTAINTED, new ArrayList<>());
initFunction.symbol.taintTable.put(TaintAnalyzer.ALL_UNTAINTED_TABLE_ENTRY_INDEX, taintRecord);
BStructureTypeSymbol typeSymbol = ((BStructureTypeSymbol) structureTypeNode.type.tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(suffix, initFunction.symbol,
(BInvokableType) initFunction.type);
structureTypeNode.initFunction = initFunction;
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = binaryExpr.type;
result = rewriteExpr(stmtExpr);
}
/**
* Split packahe init function into several smaller functions.
*
* @param packageNode package node
* @param env symbol environment
* @return initial init function but trimmed in size
*/
private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) {
int methodSize = INIT_METHOD_SPLIT_SIZE;
if (packageNode.initFunction.body.stmts.size() < methodSize || !isJvmTarget) {
return packageNode.initFunction;
}
BLangFunction initFunction = packageNode.initFunction;
List<BLangFunction> generatedFunctions = new ArrayList<>();
List<BLangStatement> stmts = new ArrayList<>();
stmts.addAll(initFunction.body.stmts);
initFunction.body.stmts.clear();
BLangFunction newFunc = initFunction;
int varDefIndex = 0;
for (int i = 0; i < stmts.size(); i++) {
if (stmts.get(i).getKind() == NodeKind.VARIABLE_DEF) {
break;
}
varDefIndex++;
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
symTable.rootScope.define(names.fromIdNode(newFunc.name) , newFunc.symbol);
}
newFunc.body.stmts.add(stmts.get(i));
}
List<BLangStatement> chunkStmts = new ArrayList<>();
for (int i = varDefIndex; i < stmts.size(); i++) {
BLangStatement stmt = stmts.get(i);
chunkStmts.add(stmt);
varDefIndex++;
if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) &&
(newFunc.body.stmts.size() + chunkStmts.size() > methodSize)) {
if (newFunc.body.stmts.size() + chunkStmts.size() > methodSize) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
symTable.rootScope.define(names.fromIdNode(newFunc.name) , newFunc.symbol);
}
newFunc.body.stmts.addAll(chunkStmts);
chunkStmts.clear();
} else if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) &&
Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags,
Flags.LISTENER)
) {
break;
}
}
newFunc.body.stmts.addAll(chunkStmts);
for (int i = varDefIndex; i < stmts.size(); i++) {
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
symTable.rootScope.define(names.fromIdNode(newFunc.name) , newFunc.symbol);
}
newFunc.body.stmts.add(stmts.get(i));
}
generatedFunctions.add(newFunc);
for (int j = 0; j < generatedFunctions.size() - 1; j++) {
BLangFunction thisFunction = generatedFunctions.get(j);
BLangCheckedExpr checkedExpr =
ASTBuilderUtil.createCheckExpr(initFunction.pos,
createInvocationNode(generatedFunctions.get(j + 1).name.value,
new ArrayList<>(), symTable.errorOrNilType),
symTable.nilType);
checkedExpr.equivalentErrorTypeList.add(symTable.errorType);
BLangExpressionStmt expressionStmt = ASTBuilderUtil.createExpressionStmt(thisFunction.pos,
thisFunction.body);
expressionStmt.expr = checkedExpr;
expressionStmt.expr.pos = initFunction.pos;
if (j > 0) {
thisFunction = rewrite(thisFunction, env);
packageNode.functions.add(thisFunction);
packageNode.topLevelNodes.add(thisFunction);
}
}
if (generatedFunctions.size() > 1) {
BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1);
lastFunc = rewrite(lastFunc, env);
packageNode.functions.add(lastFunc);
packageNode.topLevelNodes.add(lastFunc);
}
return generatedFunctions.get(0);
}
/**
* Create an intermediate package init function.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
new Name(Names.INIT_FUNCTION_SUFFIX.value
+ this.initFuncIndex++), symTable);
createInvokableSymbol(initFunction, env);
return initFunction;
}
}
|
class Desugar extends BLangNodeVisitor {
private static final CompilerContext.Key<Desugar> DESUGAR_KEY =
new CompilerContext.Key<>();
private static final String QUERY_TABLE_WITH_JOIN_CLAUSE = "queryTableWithJoinClause";
private static final String QUERY_TABLE_WITHOUT_JOIN_CLAUSE = "queryTableWithoutJoinClause";
private static final String BASE_64 = "base64";
private static final String ERROR_REASON_FUNCTION_NAME = "reason";
private static final String ERROR_DETAIL_FUNCTION_NAME = "detail";
private static final String TO_STRING_FUNCTION_NAME = "toString";
private static final String LENGTH_FUNCTION_NAME = "length";
private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException";
private static final String CONSTRUCT_FROM = "constructFrom";
private SymbolTable symTable;
private SymbolResolver symResolver;
private final SymbolEnter symbolEnter;
private ClosureDesugar closureDesugar;
private StreamingCodeDesugar streamingCodeDesugar;
private AnnotationDesugar annotationDesugar;
private InMemoryTableQueryBuilder inMemoryTableQueryBuilder;
private Types types;
private Names names;
private ServiceDesugar serviceDesugar;
private BLangNode result;
private BLangStatementLink currentLink;
public Stack<BLangLockStmt> enclLocks = new Stack<>();
private SymbolEnv env;
private int lambdaFunctionCount = 0;
private int transactionIndex = 0;
private int recordCount = 0;
private int errorCount = 0;
private int annonVarCount = 0;
private int initFuncIndex = 0;
private int indexExprNumber = 0;
private Stack<BLangMatch> matchStmtStack = new Stack<>();
Stack<BLangExpression> accessExprStack = new Stack<>();
private BLangMatchTypedBindingPatternClause successPattern;
private BLangAssignment safeNavigationAssignment;
static boolean isJvmTarget = false;
public static Desugar getInstance(CompilerContext context) {
Desugar desugar = context.get(DESUGAR_KEY);
if (desugar == null) {
desugar = new Desugar(context);
}
return desugar;
}
private Desugar(CompilerContext context) {
isJvmTarget = true;
context.put(DESUGAR_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.closureDesugar = ClosureDesugar.getInstance(context);
this.streamingCodeDesugar = StreamingCodeDesugar.getInstance(context);
this.annotationDesugar = AnnotationDesugar.getInstance(context);
this.inMemoryTableQueryBuilder = InMemoryTableQueryBuilder.getInstance(context);
this.types = Types.getInstance(context);
this.names = Names.getInstance(context);
this.names = Names.getInstance(context);
this.serviceDesugar = ServiceDesugar.getInstance(context);
}
public BLangPackage perform(BLangPackage pkgNode) {
annotationDesugar.initializeAnnotationMap(pkgNode);
return rewrite(pkgNode, env);
}
private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) {
for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
continue;
}
if (typeDef.symbol.tag == SymTag.OBJECT) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode;
objectTypeNode.functions.forEach(f -> {
if (!pkgNode.objAttachedFunctions.contains(f.symbol)) {
pkgNode.functions.add(f);
pkgNode.topLevelNodes.add(f);
}
});
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
continue;
}
if (objectTypeNode.initFunction == null) {
objectTypeNode.initFunction = createInitFunctionForStructureType(objectTypeNode, env,
Names.USER_DEFINED_INIT_SUFFIX);
}
BObjectTypeSymbol objectSymbol = ((BObjectTypeSymbol) objectTypeNode.type.tsymbol);
objectSymbol.attachedFuncs.add(objectSymbol.initializerFunc);
pkgNode.functions.add(objectTypeNode.initFunction);
pkgNode.topLevelNodes.add(objectTypeNode.initFunction);
} else if (typeDef.symbol.tag == SymTag.RECORD) {
BLangRecordTypeNode recordTypeNod = (BLangRecordTypeNode) typeDef.typeNode;
pkgNode.functions.add(recordTypeNod.initFunction);
pkgNode.topLevelNodes.add(recordTypeNod.initFunction);
}
}
}
/**
* Create package init functions.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.INIT_FUNCTION_SUFFIX,
symTable);
pkgNode.xmlnsList.forEach(xmlns -> {
pkgNode.initFunction.body.addStatement(createNamespaceDeclrStatement(xmlns));
});
pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.START_FUNCTION_SUFFIX,
symTable);
pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias,
Names.STOP_FUNCTION_SUFFIX);
createInvokableSymbol(pkgNode.initFunction, env);
createInvokableSymbol(pkgNode.startFunction, env);
createInvokableSymbol(pkgNode.stopFunction, env);
}
private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) {
Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream()
.filter(bLangFunction -> !bLangFunction.attachedFunction &&
bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value))
.findFirst();
if (!userDefInitOptional.isPresent()) {
addNilReturnStatement(pkgNode.initFunction.body);
return;
}
BLangFunction userDefInit = userDefInitOptional.get();
BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
userDefInitInvocation.pos = pkgNode.initFunction.pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(userDefInit.name.value);
userDefInitInvocation.name = name;
userDefInitInvocation.symbol = userDefInit.symbol;
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
pkgAlias.setLiteral(false);
pkgAlias.setValue(pkgNode.packageID.name.value);
userDefInitInvocation.pkgAlias = pkgAlias;
userDefInitInvocation.type = userDefInit.returnTypeNode.type;
userDefInitInvocation.requiredArgs = Collections.emptyList();
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pkgNode.initFunction.pos;
returnStmt.expr = userDefInitInvocation;
pkgNode.initFunction.body.stmts.add(returnStmt);
}
/**
* Create invokable symbol for function.
*
* @param bLangFunction function node
* @param env Symbol environment
*/
private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) {
BType returnType = bLangFunction.returnTypeNode.type == null ?
symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type;
BInvokableType invokableType = new BInvokableType(new ArrayList<>(), returnType, null);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet),
new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true);
functionSymbol.retType = returnType;
for (BLangVariable param : bLangFunction.requiredParams) {
functionSymbol.params.add(param.symbol);
}
functionSymbol.scope = new Scope(functionSymbol);
bLangFunction.symbol = functionSymbol;
}
/**
* Add nil return statement.
*
* @param bLangBlockStmt block statement node
*/
private void addNilReturnStatement(BLangBlockStmt bLangBlockStmt) {
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(bLangBlockStmt.pos, symTable.nilType);
bLangBlockStmt.addStatement(returnStmt);
}
/**
* Create namespace declaration statement for XMNLNS.
*
* @param xmlns XMLNS node
* @return XMLNS statement
*/
private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) {
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = xmlns.pos;
return xmlnsStmt;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) {
result = pkgNode;
return;
}
SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol);
createPackageInitFunctions(pkgNode, env);
addAttachedFunctionsToPackageLevel(pkgNode, env);
pkgNode.constants.stream()
.filter(constant -> constant.expr.getKind() == NodeKind.LITERAL ||
constant.expr.getKind() == NodeKind.NUMERIC_LITERAL)
.forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition));
BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env);
pkgNode.constants.stream()
.filter(constant -> constant.symbol.type.tag == TypeTags.MAP)
.forEach(constant -> {
BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol);
constant.expr = rewriteExpr(constant.expr);
BLangInvocation frozenConstValExpr =
visitUtilMethodInvocation(constant.pos, BLangBuiltInMethod.FREEZE, Lists.of(constant.expr));
BLangAssignment constInit =
ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr);
pkgNode.initFunction.body.stmts.add(constInit);
});
pkgNode.globalVars.forEach(globalVar -> {
BLangAssignment assignment = createAssignmentStmt(globalVar);
if (assignment.expr != null) {
pkgNode.initFunction.body.stmts.add(assignment);
}
});
pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env));
annotationDesugar.rewritePackageAnnotations(pkgNode, env);
addUserDefinedModuleInitInvocationAndReturn(pkgNode);
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env);
pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env);
pkgNode.constants = rewrite(pkgNode.constants, env);
pkgNode.globalVars = rewrite(pkgNode.globalVars, env);
pkgNode.functions = rewrite(pkgNode.functions, env);
serviceDesugar.rewriteListeners(pkgNode.globalVars, env);
serviceDesugar.rewriteServiceAttachments(serviceAttachments, env);
addNilReturnStatement(pkgNode.startFunction.body);
addNilReturnStatement(pkgNode.stopFunction.body);
pkgNode.initFunction = splitInitFunction(pkgNode, env);
pkgNode.initFunction = rewrite(pkgNode.initFunction, env);
pkgNode.startFunction = rewrite(pkgNode.startFunction, env);
pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env);
closureDesugar.visit(pkgNode);
pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage));
pkgNode.completedPhases.add(CompilerPhase.DESUGAR);
initFuncIndex = 0;
result = pkgNode;
}
@Override
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
rewrite(pkgEnv.node, pkgEnv);
result = importPkgNode;
}
@Override
public void visit(BLangTypeDefinition typeDef) {
if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE
|| typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) {
typeDef.typeNode = rewrite(typeDef.typeNode, env);
}
typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = typeDef;
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
objectTypeNode.fields.addAll(objectTypeNode.referencedFields);
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
result = objectTypeNode;
return;
}
Map<BSymbol, BLangStatement> initFunctionStmts = objectTypeNode.initFunction.initFunctionStmts;
objectTypeNode.fields.stream()
.filter(field -> !initFunctionStmts.containsKey(field.symbol))
.filter(field -> field.expr != null)
.forEachOrdered(field -> {
initFunctionStmts.put(field.symbol,
createStructFieldUpdate(objectTypeNode.initFunction, field));
});
BLangStatement[] initStmts = initFunctionStmts.values().toArray(new BLangStatement[0]);
for (int i = 0; i < initFunctionStmts.size(); i++) {
objectTypeNode.initFunction.body.stmts.add(i, initStmts[i]);
}
result = objectTypeNode;
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
recordTypeNode.fields.addAll(recordTypeNode.referencedFields);
recordTypeNode.fields.stream()
.filter(field -> !recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) &&
!Symbols.isOptional(field.symbol))
.filter(field -> field.expr != null)
.forEachOrdered(field -> {
recordTypeNode.initFunction.initFunctionStmts.put(field.symbol,
createStructFieldUpdate(recordTypeNode.initFunction, field));
});
BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts
.values().toArray(new BLangStatement[0]);
for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) {
recordTypeNode.initFunction.body.stmts.add(i, initStmts[i]);
}
result = recordTypeNode;
}
@Override
|
class Desugar extends BLangNodeVisitor {
private static final CompilerContext.Key<Desugar> DESUGAR_KEY =
new CompilerContext.Key<>();
private static final String QUERY_TABLE_WITH_JOIN_CLAUSE = "queryTableWithJoinClause";
private static final String QUERY_TABLE_WITHOUT_JOIN_CLAUSE = "queryTableWithoutJoinClause";
private static final String BASE_64 = "base64";
private static final String ERROR_REASON_FUNCTION_NAME = "reason";
private static final String ERROR_DETAIL_FUNCTION_NAME = "detail";
private static final String TO_STRING_FUNCTION_NAME = "toString";
private static final String LENGTH_FUNCTION_NAME = "length";
private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException";
private static final String CONSTRUCT_FROM = "constructFrom";
private SymbolTable symTable;
private SymbolResolver symResolver;
private final SymbolEnter symbolEnter;
private ClosureDesugar closureDesugar;
private StreamingCodeDesugar streamingCodeDesugar;
private AnnotationDesugar annotationDesugar;
private InMemoryTableQueryBuilder inMemoryTableQueryBuilder;
private Types types;
private Names names;
private ServiceDesugar serviceDesugar;
private BLangNode result;
private BLangStatementLink currentLink;
public Stack<BLangLockStmt> enclLocks = new Stack<>();
private SymbolEnv env;
private int lambdaFunctionCount = 0;
private int transactionIndex = 0;
private int recordCount = 0;
private int errorCount = 0;
private int annonVarCount = 0;
private int initFuncIndex = 0;
private int indexExprCount = 0;
private Stack<BLangMatch> matchStmtStack = new Stack<>();
Stack<BLangExpression> accessExprStack = new Stack<>();
private BLangMatchTypedBindingPatternClause successPattern;
private BLangAssignment safeNavigationAssignment;
static boolean isJvmTarget = false;
public static Desugar getInstance(CompilerContext context) {
Desugar desugar = context.get(DESUGAR_KEY);
if (desugar == null) {
desugar = new Desugar(context);
}
return desugar;
}
private Desugar(CompilerContext context) {
isJvmTarget = true;
context.put(DESUGAR_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.closureDesugar = ClosureDesugar.getInstance(context);
this.streamingCodeDesugar = StreamingCodeDesugar.getInstance(context);
this.annotationDesugar = AnnotationDesugar.getInstance(context);
this.inMemoryTableQueryBuilder = InMemoryTableQueryBuilder.getInstance(context);
this.types = Types.getInstance(context);
this.names = Names.getInstance(context);
this.names = Names.getInstance(context);
this.serviceDesugar = ServiceDesugar.getInstance(context);
}
public BLangPackage perform(BLangPackage pkgNode) {
annotationDesugar.initializeAnnotationMap(pkgNode);
return rewrite(pkgNode, env);
}
private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) {
for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
continue;
}
if (typeDef.symbol.tag == SymTag.OBJECT) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode;
objectTypeNode.functions.forEach(f -> {
if (!pkgNode.objAttachedFunctions.contains(f.symbol)) {
pkgNode.functions.add(f);
pkgNode.topLevelNodes.add(f);
}
});
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
continue;
}
if (objectTypeNode.initFunction == null) {
objectTypeNode.initFunction = createInitFunctionForStructureType(objectTypeNode, env,
Names.USER_DEFINED_INIT_SUFFIX);
}
BObjectTypeSymbol objectSymbol = ((BObjectTypeSymbol) objectTypeNode.type.tsymbol);
objectSymbol.attachedFuncs.add(objectSymbol.initializerFunc);
pkgNode.functions.add(objectTypeNode.initFunction);
pkgNode.topLevelNodes.add(objectTypeNode.initFunction);
} else if (typeDef.symbol.tag == SymTag.RECORD) {
BLangRecordTypeNode recordTypeNod = (BLangRecordTypeNode) typeDef.typeNode;
pkgNode.functions.add(recordTypeNod.initFunction);
pkgNode.topLevelNodes.add(recordTypeNod.initFunction);
}
}
}
/**
* Create package init functions.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.INIT_FUNCTION_SUFFIX,
symTable);
pkgNode.xmlnsList.forEach(xmlns -> {
pkgNode.initFunction.body.addStatement(createNamespaceDeclrStatement(xmlns));
});
pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.START_FUNCTION_SUFFIX,
symTable);
pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias,
Names.STOP_FUNCTION_SUFFIX);
createInvokableSymbol(pkgNode.initFunction, env);
createInvokableSymbol(pkgNode.startFunction, env);
createInvokableSymbol(pkgNode.stopFunction, env);
}
private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) {
Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream()
.filter(bLangFunction -> !bLangFunction.attachedFunction &&
bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value))
.findFirst();
if (!userDefInitOptional.isPresent()) {
addNilReturnStatement(pkgNode.initFunction.body);
return;
}
BLangFunction userDefInit = userDefInitOptional.get();
BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
userDefInitInvocation.pos = pkgNode.initFunction.pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(userDefInit.name.value);
userDefInitInvocation.name = name;
userDefInitInvocation.symbol = userDefInit.symbol;
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
pkgAlias.setLiteral(false);
pkgAlias.setValue(pkgNode.packageID.name.value);
userDefInitInvocation.pkgAlias = pkgAlias;
userDefInitInvocation.type = userDefInit.returnTypeNode.type;
userDefInitInvocation.requiredArgs = Collections.emptyList();
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pkgNode.initFunction.pos;
returnStmt.expr = userDefInitInvocation;
pkgNode.initFunction.body.stmts.add(returnStmt);
}
/**
* Create invokable symbol for function.
*
* @param bLangFunction function node
* @param env Symbol environment
*/
private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) {
BType returnType = bLangFunction.returnTypeNode.type == null ?
symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type;
BInvokableType invokableType = new BInvokableType(new ArrayList<>(), returnType, null);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet),
new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true);
functionSymbol.retType = returnType;
for (BLangVariable param : bLangFunction.requiredParams) {
functionSymbol.params.add(param.symbol);
}
functionSymbol.scope = new Scope(functionSymbol);
bLangFunction.symbol = functionSymbol;
}
/**
* Add nil return statement.
*
* @param bLangBlockStmt block statement node
*/
private void addNilReturnStatement(BLangBlockStmt bLangBlockStmt) {
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(bLangBlockStmt.pos, symTable.nilType);
bLangBlockStmt.addStatement(returnStmt);
}
/**
* Create namespace declaration statement for XMNLNS.
*
* @param xmlns XMLNS node
* @return XMLNS statement
*/
private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) {
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = xmlns.pos;
return xmlnsStmt;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) {
result = pkgNode;
return;
}
SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol);
createPackageInitFunctions(pkgNode, env);
addAttachedFunctionsToPackageLevel(pkgNode, env);
pkgNode.constants.stream()
.filter(constant -> constant.expr.getKind() == NodeKind.LITERAL ||
constant.expr.getKind() == NodeKind.NUMERIC_LITERAL)
.forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition));
BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env);
pkgNode.constants.stream()
.filter(constant -> constant.symbol.type.tag == TypeTags.MAP)
.forEach(constant -> {
BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol);
constant.expr = rewriteExpr(constant.expr);
BLangInvocation frozenConstValExpr =
visitUtilMethodInvocation(constant.pos, BLangBuiltInMethod.FREEZE, Lists.of(constant.expr));
BLangAssignment constInit =
ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr);
pkgNode.initFunction.body.stmts.add(constInit);
});
pkgNode.globalVars.forEach(globalVar -> {
BLangAssignment assignment = createAssignmentStmt(globalVar);
if (assignment.expr != null) {
pkgNode.initFunction.body.stmts.add(assignment);
}
});
pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env));
annotationDesugar.rewritePackageAnnotations(pkgNode, env);
addUserDefinedModuleInitInvocationAndReturn(pkgNode);
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env);
pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env);
pkgNode.constants = rewrite(pkgNode.constants, env);
pkgNode.globalVars = rewrite(pkgNode.globalVars, env);
pkgNode.functions = rewrite(pkgNode.functions, env);
serviceDesugar.rewriteListeners(pkgNode.globalVars, env);
serviceDesugar.rewriteServiceAttachments(serviceAttachments, env);
addNilReturnStatement(pkgNode.startFunction.body);
addNilReturnStatement(pkgNode.stopFunction.body);
pkgNode.initFunction = splitInitFunction(pkgNode, env);
pkgNode.initFunction = rewrite(pkgNode.initFunction, env);
pkgNode.startFunction = rewrite(pkgNode.startFunction, env);
pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env);
closureDesugar.visit(pkgNode);
pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage));
pkgNode.completedPhases.add(CompilerPhase.DESUGAR);
initFuncIndex = 0;
result = pkgNode;
}
@Override
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
rewrite(pkgEnv.node, pkgEnv);
result = importPkgNode;
}
@Override
public void visit(BLangTypeDefinition typeDef) {
if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE
|| typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) {
typeDef.typeNode = rewrite(typeDef.typeNode, env);
}
typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = typeDef;
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
objectTypeNode.fields.addAll(objectTypeNode.referencedFields);
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
result = objectTypeNode;
return;
}
Map<BSymbol, BLangStatement> initFunctionStmts = objectTypeNode.initFunction.initFunctionStmts;
objectTypeNode.fields.stream()
.filter(field -> !initFunctionStmts.containsKey(field.symbol))
.filter(field -> field.expr != null)
.forEachOrdered(field -> {
initFunctionStmts.put(field.symbol,
createStructFieldUpdate(objectTypeNode.initFunction, field));
});
BLangStatement[] initStmts = initFunctionStmts.values().toArray(new BLangStatement[0]);
for (int i = 0; i < initFunctionStmts.size(); i++) {
objectTypeNode.initFunction.body.stmts.add(i, initStmts[i]);
}
result = objectTypeNode;
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
recordTypeNode.fields.addAll(recordTypeNode.referencedFields);
recordTypeNode.fields.stream()
.filter(field -> !recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) &&
!Symbols.isOptional(field.symbol))
.filter(field -> field.expr != null)
.forEachOrdered(field -> {
recordTypeNode.initFunction.initFunctionStmts.put(field.symbol,
createStructFieldUpdate(recordTypeNode.initFunction, field));
});
BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts
.values().toArray(new BLangStatement[0]);
for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) {
recordTypeNode.initFunction.body.stmts.add(i, initStmts[i]);
}
result = recordTypeNode;
}
@Override
|
My bad. It is a kind of unnecessary condition. Will remove. Thanks.
|
private void updateSymbolType(BLangConstant constant) {
if (constant.symbol.kind == SymbolKind.CONSTANT && constant.symbol.type.getKind() != TypeKind.FINITE &&
constant.symbol.value != null) {
BFiniteType finiteType = checkType(constant, constant.symbol.value.value, constant.symbol.type,
constant.symbol.pos);
if (finiteType != null) {
constant.symbol.type = finiteType;
}
}
}
|
if (constant.symbol.kind == SymbolKind.CONSTANT && constant.symbol.type.getKind() != TypeKind.FINITE &&
|
private void updateSymbolType(BLangConstant constant) {
BConstantSymbol symbol = constant.symbol;
if (symbol.type.getKind() != TypeKind.FINITE && symbol.value != null) {
BType singletonType = checkType(constant.expr, constant, symbol.value.value, symbol.type, symbol.pos);
if (singletonType != null) {
symbol.type = singletonType;
}
}
}
|
class ConstantValueResolver extends BLangNodeVisitor {
private static final CompilerContext.Key<ConstantValueResolver> CONSTANT_VALUE_RESOLVER_KEY =
new CompilerContext.Key<>();
private BConstantSymbol currentConstSymbol;
private BLangConstantValue result;
private BLangDiagnosticLog dlog;
private Location currentPos;
private Map<BConstantSymbol, BLangConstant> unresolvedConstants = new HashMap<>();
private Map<String, BLangConstantValue> constantMap = new HashMap<String, BLangConstantValue>();
private ConstantValueResolver(CompilerContext context) {
context.put(CONSTANT_VALUE_RESOLVER_KEY, this);
this.dlog = BLangDiagnosticLog.getInstance(context);
}
public static ConstantValueResolver getInstance(CompilerContext context) {
ConstantValueResolver constantValueResolver = context.get(CONSTANT_VALUE_RESOLVER_KEY);
if (constantValueResolver == null) {
constantValueResolver = new ConstantValueResolver(context);
}
return constantValueResolver;
}
public void resolve(List<BLangConstant> constants, PackageID packageID) {
this.dlog.setCurrentPackageId(packageID);
constants.forEach(constant -> this.unresolvedConstants.put(constant.symbol, constant));
constants.forEach(constant -> constant.accept(this));
constantMap.clear();
constants.forEach(constant -> checkUniqueness(constant));
constants.forEach(constant -> updateSymbolType(constant));
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol tempCurrentConstSymbol = this.currentConstSymbol;
this.currentConstSymbol = constant.symbol;
this.currentConstSymbol.value = visitExpr(constant.expr);
unresolvedConstants.remove(this.currentConstSymbol);
this.currentConstSymbol = tempCurrentConstSymbol;
}
@Override
public void visit(BLangLiteral literal) {
this.result = new BLangConstantValue(literal.value, literal.getBType());
}
@Override
public void visit(BLangNumericLiteral literal) {
this.result = new BLangConstantValue(literal.value, literal.getBType());
}
@Override
public void visit(BLangConstRef constRef) {
this.result = ((BConstantSymbol) constRef.symbol).value;
}
@Override
public void visit(BLangSimpleVarRef varRef) {
if (varRef.symbol == null || (varRef.symbol.tag & SymTag.CONSTANT) != SymTag.CONSTANT) {
this.result = null;
return;
}
BConstantSymbol constSymbol = (BConstantSymbol) varRef.symbol;
BLangConstantValue constVal = constSymbol.value;
if (constVal != null) {
this.result = constVal;
return;
}
if (this.currentConstSymbol == constSymbol) {
dlog.error(varRef.pos, DiagnosticErrorCode.SELF_REFERENCE_CONSTANT, constSymbol.name);
return;
}
if (!this.unresolvedConstants.containsKey(constSymbol)) {
dlog.error(varRef.pos, DiagnosticErrorCode.CANNOT_RESOLVE_CONST, constSymbol.name.value);
this.result = null;
return;
}
this.unresolvedConstants.get(constSymbol).accept(this);
this.result = constSymbol.value;
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
Map<String, BLangConstantValue> mapConstVal = new HashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
String key;
BLangConstantValue value;
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValuePair =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
NodeKind nodeKind = keyValuePair.key.expr.getKind();
if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
key = (String) ((BLangLiteral) keyValuePair.key.expr).value;
} else if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) {
key = ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value;
} else {
continue;
}
value = visitExpr(keyValuePair.valueExpr);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangRecordLiteral.BLangRecordVarNameField varNameField =
(BLangRecordLiteral.BLangRecordVarNameField) field;
key = varNameField.variableName.value;
value = visitExpr(varNameField);
} else {
BLangConstantValue spreadOpConstValue =
visitExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr);
if (spreadOpConstValue != null) {
mapConstVal.putAll((Map<String, BLangConstantValue>) spreadOpConstValue.value);
}
continue;
}
mapConstVal.put(key, value);
}
this.result = new BLangConstantValue(mapConstVal, recordLiteral.getBType());
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
BLangConstantValue lhs = visitExpr(binaryExpr.lhsExpr);
BLangConstantValue rhs = visitExpr(binaryExpr.rhsExpr);
this.result = calculateConstValue(lhs, rhs, binaryExpr.opKind);
}
public void visit(BLangGroupExpr groupExpr) {
this.result = visitExpr(groupExpr.expression);
}
public void visit(BLangUnaryExpr unaryExpr) {
BLangConstantValue value = visitExpr(unaryExpr.expr);
this.result = evaluateUnaryOperator(value, unaryExpr.operator);
}
private BLangConstantValue calculateConstValue(BLangConstantValue lhs, BLangConstantValue rhs, OperatorKind kind) {
if (lhs == null || rhs == null || lhs.value == null || rhs.value == null) {
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
try {
switch (kind) {
case ADD:
return calculateAddition(lhs, rhs);
case SUB:
return calculateSubtract(lhs, rhs);
case MUL:
return calculateMultiplication(lhs, rhs);
case DIV:
return calculateDivision(lhs, rhs);
case BITWISE_AND:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a & b);
case BITWISE_OR:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a | b);
case BITWISE_LEFT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a << b);
case BITWISE_RIGHT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a >> b);
case BITWISE_UNSIGNED_RIGHT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a >>> b);
case BITWISE_XOR:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a ^ b);
default:
dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED);
}
} catch (NumberFormatException nfe) {
} catch (ArithmeticException ae) {
dlog.error(currentPos, DiagnosticErrorCode.INVALID_CONST_EXPRESSION, ae.getMessage());
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue evaluateUnaryOperator(BLangConstantValue value, OperatorKind kind) {
if (value == null || value.value == null) {
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
try {
switch (kind) {
case ADD:
return new BLangConstantValue(value.value, currentConstSymbol.type);
case SUB:
return calculateNegation(value);
case BITWISE_COMPLEMENT:
return calculateBitWiseComplement(value);
case NOT:
return calculateBooleanComplement(value);
}
} catch (ClassCastException ce) {
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue calculateBitWiseOp(BLangConstantValue lhs, BLangConstantValue rhs,
BiFunction<Long, Long, Long> func) {
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
Long val = func.apply((Long) lhs.value, (Long) rhs.value);
return new BLangConstantValue(val, this.currentConstSymbol.type);
default:
dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED);
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue calculateAddition(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value + (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
+ Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.add(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
case TypeTags.STRING:
result = String.valueOf(lhs.value) + String.valueOf(rhs.value);
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateSubtract(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value - (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
- Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.subtract(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateMultiplication(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value * (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
* Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.multiply(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateDivision(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) ((Long) lhs.value / (Long) rhs.value);
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
/ Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.divide(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateMod(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) ((Long) lhs.value % (Long) rhs.value);
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
% Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.remainder(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateNegation(BLangConstantValue value) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
result = -1 * ((Long) (value.value));
break;
case TypeTags.FLOAT:
result = String.valueOf(-1 * Double.parseDouble(String.valueOf(value.value)));
break;
case TypeTags.DECIMAL:
BigDecimal valDecimal = new BigDecimal(String.valueOf(value.value), MathContext.DECIMAL128);
BigDecimal negDecimal = new BigDecimal(String.valueOf(-1), MathContext.DECIMAL128);
BigDecimal resultDecimal = valDecimal.multiply(negDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateBitWiseComplement(BLangConstantValue value) {
Object result = null;
if (this.currentConstSymbol.type.tag == TypeTags.INT) {
result = ~((Long) (value.value));
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateBooleanComplement(BLangConstantValue value) {
Object result = null;
if (this.currentConstSymbol.type.tag == TypeTags.BOOLEAN) {
result = !((Boolean) (value.value));
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue visitExpr(BLangExpression node) {
if (!node.typeChecked) {
return null;
}
switch (node.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case RECORD_LITERAL_EXPR:
case SIMPLE_VARIABLE_REF:
case BINARY_EXPR:
case GROUP_EXPR:
case UNARY_EXPR:
BLangConstantValue prevResult = this.result;
Location prevPos = this.currentPos;
this.currentPos = node.pos;
this.result = null;
node.accept(this);
BLangConstantValue newResult = this.result;
this.result = prevResult;
this.currentPos = prevPos;
return newResult;
default:
return null;
}
}
private void checkUniqueness(BLangConstant constant) {
if (constant.symbol.kind == SymbolKind.CONSTANT) {
String nameString = constant.name.value;
BLangConstantValue value = constant.symbol.value;
if (constantMap.containsKey(nameString)) {
if (value == null) {
dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString);
} else {
BLangConstantValue lastValue = constantMap.get(nameString);
if (!value.equals(lastValue)) {
if (lastValue == null) {
dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString);
} else {
dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL_WITH_ANOTHER,
nameString, lastValue);
}
}
}
} else {
constantMap.put(nameString, value);
}
}
}
public BFiniteType createFiniteType(BLangConstant constant, BLangExpression expr) {
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE,
Flags.asMask(EnumSet.noneOf(Flag.class)), Names.EMPTY, constant.symbol.pkgID, null,
constant.symbol.owner, constant.symbol.pos, VIRTUAL);
BFiniteType finiteType = new BFiniteType(finiteTypeSymbol);
finiteType.addValue(expr);
return finiteType;
}
private BFiniteType checkType(BLangConstant constant, Object value, BType type, Location pos) {
switch (type.getKind()) {
case INT:
case BYTE:
case FLOAT:
case DECIMAL:
return createFiniteType(constant, createConstantNumericLiteralExpression(value, type, pos));
case STRING:
case NIL:
case BOOLEAN:
return createFiniteType(constant, createConstantLiteralExpression(value, type, pos));
default:
return null;
}
}
private BLangNumericLiteral createConstantNumericLiteralExpression(Object value, BType type, Location pos) {
BLangNumericLiteral literal = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
literal.value = value;
literal.isConstant = true;
literal.setBType(type);
literal.pos = pos;
return literal;
}
private BLangLiteral createConstantLiteralExpression(Object value, BType type, Location pos) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.isConstant = true;
literal.setBType(type);
literal.pos = pos;
return literal;
}
}
|
class ConstantValueResolver extends BLangNodeVisitor {
private static final CompilerContext.Key<ConstantValueResolver> CONSTANT_VALUE_RESOLVER_KEY =
new CompilerContext.Key<>();
private BConstantSymbol currentConstSymbol;
private BLangConstantValue result;
private BLangDiagnosticLog dlog;
private Location currentPos;
private SymbolTable symbolTable;
private Map<BConstantSymbol, BLangConstant> unresolvedConstants = new HashMap<>();
private Map<String, BLangConstantValue> constantMap = new HashMap<String, BLangConstantValue>();
private ArrayList<BConstantSymbol> resolvingConstants = new ArrayList<>();
private HashSet<BConstantSymbol> unresolvableConstants = new HashSet<>();
private ConstantValueResolver(CompilerContext context) {
context.put(CONSTANT_VALUE_RESOLVER_KEY, this);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.symbolTable = SymbolTable.getInstance(context);
}
public static ConstantValueResolver getInstance(CompilerContext context) {
ConstantValueResolver constantValueResolver = context.get(CONSTANT_VALUE_RESOLVER_KEY);
if (constantValueResolver == null) {
constantValueResolver = new ConstantValueResolver(context);
}
return constantValueResolver;
}
public void resolve(List<BLangConstant> constants, PackageID packageID) {
this.dlog.setCurrentPackageId(packageID);
constants.forEach(constant -> this.unresolvedConstants.put(constant.symbol, constant));
constants.forEach(constant -> constant.accept(this));
constantMap.clear();
}
@Override
public void visit(BLangConstant constant) {
if (!unresolvedConstants.containsKey(constant.symbol)) {
return;
}
BConstantSymbol tempCurrentConstSymbol = this.currentConstSymbol;
this.currentConstSymbol = constant.symbol;
this.resolvingConstants.add(this.currentConstSymbol);
this.currentConstSymbol.value = visitExpr(constant.expr);
this.resolvingConstants.remove(this.currentConstSymbol);
updateSymbolType(constant);
checkUniqueness(constant);
unresolvedConstants.remove(this.currentConstSymbol);
this.currentConstSymbol = tempCurrentConstSymbol;
}
@Override
public void visit(BLangLiteral literal) {
this.result = new BLangConstantValue(literal.value, literal.getBType());
}
@Override
public void visit(BLangNumericLiteral literal) {
this.result = new BLangConstantValue(literal.value, literal.getBType());
}
@Override
public void visit(BLangConstRef constRef) {
this.result = ((BConstantSymbol) constRef.symbol).value;
}
@Override
public void visit(BLangSimpleVarRef varRef) {
if (varRef.symbol == null || (varRef.symbol.tag & SymTag.CONSTANT) != SymTag.CONSTANT) {
this.result = null;
return;
}
BConstantSymbol constSymbol = (BConstantSymbol) varRef.symbol;
BLangConstantValue constVal = constSymbol.value;
if (constVal != null) {
this.result = constVal;
return;
}
if (this.currentConstSymbol == constSymbol) {
dlog.error(varRef.pos, DiagnosticErrorCode.SELF_REFERENCE_CONSTANT, constSymbol.name);
return;
}
if (!this.unresolvedConstants.containsKey(constSymbol)) {
if (this.unresolvableConstants.contains(constSymbol)) {
this.result = null;
return;
}
this.unresolvableConstants.add(constSymbol);
dlog.error(varRef.pos, DiagnosticErrorCode.CANNOT_RESOLVE_CONST, constSymbol.name.value);
this.result = null;
return;
}
if (this.resolvingConstants.contains(constSymbol)) {
for (BConstantSymbol symbol : this.resolvingConstants) {
this.unresolvableConstants.add(symbol);
}
dlog.error(varRef.pos, DiagnosticErrorCode.CONSTANT_CYCLIC_REFERENCE, this.resolvingConstants);
this.result = null;
return;
}
this.unresolvedConstants.get(constSymbol).accept(this);
this.result = constSymbol.value;
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
Map<String, BLangConstantValue> mapConstVal = new HashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
String key;
BLangConstantValue value;
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValuePair =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
NodeKind nodeKind = keyValuePair.key.expr.getKind();
if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
key = (String) ((BLangLiteral) keyValuePair.key.expr).value;
} else if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) {
key = ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value;
} else {
continue;
}
value = visitExpr(keyValuePair.valueExpr);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangRecordLiteral.BLangRecordVarNameField varNameField =
(BLangRecordLiteral.BLangRecordVarNameField) field;
key = varNameField.variableName.value;
value = visitExpr(varNameField);
} else {
BLangConstantValue spreadOpConstValue =
visitExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr);
if (spreadOpConstValue != null) {
mapConstVal.putAll((Map<String, BLangConstantValue>) spreadOpConstValue.value);
}
continue;
}
mapConstVal.put(key, value);
}
this.result = new BLangConstantValue(mapConstVal, recordLiteral.getBType());
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
BLangConstantValue lhs = visitExpr(binaryExpr.lhsExpr);
BLangConstantValue rhs = visitExpr(binaryExpr.rhsExpr);
this.result = calculateConstValue(lhs, rhs, binaryExpr.opKind);
}
public void visit(BLangGroupExpr groupExpr) {
this.result = visitExpr(groupExpr.expression);
}
public void visit(BLangUnaryExpr unaryExpr) {
BLangConstantValue value = visitExpr(unaryExpr.expr);
this.result = evaluateUnaryOperator(value, unaryExpr.operator);
}
private BLangConstantValue calculateConstValue(BLangConstantValue lhs, BLangConstantValue rhs, OperatorKind kind) {
if (lhs == null || rhs == null || lhs.value == null || rhs.value == null) {
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
try {
switch (kind) {
case ADD:
return calculateAddition(lhs, rhs);
case SUB:
return calculateSubtract(lhs, rhs);
case MUL:
return calculateMultiplication(lhs, rhs);
case DIV:
return calculateDivision(lhs, rhs);
case BITWISE_AND:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a & b);
case BITWISE_OR:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a | b);
case BITWISE_LEFT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a << b);
case BITWISE_RIGHT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a >> b);
case BITWISE_UNSIGNED_RIGHT_SHIFT:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a >>> b);
case BITWISE_XOR:
return calculateBitWiseOp(lhs, rhs, (a, b) -> a ^ b);
default:
dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED);
}
} catch (NumberFormatException nfe) {
} catch (ArithmeticException ae) {
dlog.error(currentPos, DiagnosticErrorCode.INVALID_CONST_EXPRESSION, ae.getMessage());
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue evaluateUnaryOperator(BLangConstantValue value, OperatorKind kind) {
if (value == null || value.value == null) {
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
try {
switch (kind) {
case ADD:
return new BLangConstantValue(value.value, currentConstSymbol.type);
case SUB:
return calculateNegation(value);
case BITWISE_COMPLEMENT:
return calculateBitWiseComplement(value);
case NOT:
return calculateBooleanComplement(value);
}
} catch (ClassCastException ce) {
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue calculateBitWiseOp(BLangConstantValue lhs, BLangConstantValue rhs,
BiFunction<Long, Long, Long> func) {
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
Long val = func.apply((Long) lhs.value, (Long) rhs.value);
return new BLangConstantValue(val, this.currentConstSymbol.type);
default:
dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED);
}
return new BLangConstantValue(null, this.currentConstSymbol.type);
}
private BLangConstantValue calculateAddition(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value + (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
+ Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.add(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
case TypeTags.STRING:
result = String.valueOf(lhs.value) + String.valueOf(rhs.value);
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateSubtract(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value - (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
- Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.subtract(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateMultiplication(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) lhs.value * (Long) rhs.value;
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
* Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.multiply(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateDivision(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) ((Long) lhs.value / (Long) rhs.value);
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
/ Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.divide(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateMod(BLangConstantValue lhs, BLangConstantValue rhs) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
result = (Long) ((Long) lhs.value % (Long) rhs.value);
break;
case TypeTags.FLOAT:
result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value))
% Double.parseDouble(String.valueOf(rhs.value)));
break;
case TypeTags.DECIMAL:
BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128);
BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128);
BigDecimal resultDecimal = lhsDecimal.remainder(rhsDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateNegation(BLangConstantValue value) {
Object result = null;
switch (this.currentConstSymbol.type.tag) {
case TypeTags.INT:
result = -1 * ((Long) (value.value));
break;
case TypeTags.FLOAT:
result = String.valueOf(-1 * Double.parseDouble(String.valueOf(value.value)));
break;
case TypeTags.DECIMAL:
BigDecimal valDecimal = new BigDecimal(String.valueOf(value.value), MathContext.DECIMAL128);
BigDecimal negDecimal = new BigDecimal(String.valueOf(-1), MathContext.DECIMAL128);
BigDecimal resultDecimal = valDecimal.multiply(negDecimal, MathContext.DECIMAL128);
result = resultDecimal.toPlainString();
break;
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateBitWiseComplement(BLangConstantValue value) {
Object result = null;
if (this.currentConstSymbol.type.tag == TypeTags.INT) {
result = ~((Long) (value.value));
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue calculateBooleanComplement(BLangConstantValue value) {
Object result = null;
if (this.currentConstSymbol.type.tag == TypeTags.BOOLEAN) {
result = !((Boolean) (value.value));
}
return new BLangConstantValue(result, currentConstSymbol.type);
}
private BLangConstantValue visitExpr(BLangExpression node) {
if (!node.typeChecked) {
return null;
}
switch (node.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case RECORD_LITERAL_EXPR:
case SIMPLE_VARIABLE_REF:
case BINARY_EXPR:
case GROUP_EXPR:
case UNARY_EXPR:
BLangConstantValue prevResult = this.result;
Location prevPos = this.currentPos;
this.currentPos = node.pos;
this.result = null;
node.accept(this);
BLangConstantValue newResult = this.result;
this.result = prevResult;
this.currentPos = prevPos;
return newResult;
default:
return null;
}
}
private void checkUniqueness(BLangConstant constant) {
if (constant.symbol.kind == SymbolKind.CONSTANT) {
String nameString = constant.name.value;
BLangConstantValue value = constant.symbol.value;
if (constantMap.containsKey(nameString)) {
if (value == null) {
dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString);
} else {
BLangConstantValue lastValue = constantMap.get(nameString);
if (!value.equals(lastValue)) {
if (lastValue == null) {
dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString);
} else {
dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL_WITH_ANOTHER,
nameString, lastValue);
}
}
}
} else {
constantMap.put(nameString, value);
}
}
}
private BFiniteType createFiniteType(BLangConstant constant, BLangExpression expr) {
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, 0, Names.EMPTY,
constant.symbol.pkgID, null, constant.symbol.owner, constant.symbol.pos, VIRTUAL);
BFiniteType finiteType = new BFiniteType(finiteTypeSymbol);
finiteType.addValue(expr);
return finiteType;
}
private BType checkType(BLangExpression expr, BLangConstant constant, Object value, BType type, Location pos) {
if (expr != null && expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
((BLangSimpleVarRef) expr).symbol.type.getKind() == TypeKind.FINITE) {
return ((BLangSimpleVarRef) expr).symbol.type;
}
switch (type.tag) {
case TypeTags.INT:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
BLangNumericLiteral numericLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
return createFiniteType(constant, updateLiteral(numericLiteral, value, type, pos));
case TypeTags.BYTE:
BLangNumericLiteral byteLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
return createFiniteType(constant, updateLiteral(byteLiteral, value, symbolTable.intType, pos));
case TypeTags.STRING:
case TypeTags.NIL:
case TypeTags.BOOLEAN:
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
return createFiniteType(constant, updateLiteral(literal, value, type, pos));
default:
return null;
}
}
private BLangLiteral updateLiteral(BLangLiteral literal, Object value, BType type, Location pos) {
literal.value = value;
literal.isConstant = true;
literal.setBType(type);
literal.pos = pos;
return literal;
}
}
|
These shouldn't be equal in any case, though ;)
|
public void createFromActiveSession() {
PrepareResult result = deployApp(testApp);
long sessionId = applicationRepository.createSessionFromExisting(applicationId(),
new SilentDeployLogger(),
false,
timeoutBudget);
long originalSessionId = result.sessionId();
ApplicationMetaData originalApplicationMetaData = getApplicationMetaData(applicationId(), originalSessionId);
ApplicationMetaData applicationMetaData = getApplicationMetaData(applicationId(), sessionId);
assertNotEquals(sessionId, originalSessionId);
assertEquals(applicationMetaData.getApplicationName(), originalApplicationMetaData.getApplicationName());
assertEquals(applicationMetaData.getPreviousActiveGeneration(), originalApplicationMetaData.getGeneration().longValue());
assertNotEquals(applicationMetaData.getGeneration(), originalApplicationMetaData.getApplicationName());
assertEquals(applicationMetaData.getDeployedByUser(), originalApplicationMetaData.getDeployedByUser());
}
|
assertNotEquals(applicationMetaData.getGeneration(), originalApplicationMetaData.getApplicationName());
|
public void createFromActiveSession() {
PrepareResult result = deployApp(testApp);
long sessionId = applicationRepository.createSessionFromExisting(applicationId(),
new SilentDeployLogger(),
false,
timeoutBudget);
long originalSessionId = result.sessionId();
ApplicationMetaData originalApplicationMetaData = getApplicationMetaData(applicationId(), originalSessionId);
ApplicationMetaData applicationMetaData = getApplicationMetaData(applicationId(), sessionId);
assertNotEquals(sessionId, originalSessionId);
assertEquals(applicationMetaData.getApplicationName(), originalApplicationMetaData.getApplicationName());
assertEquals(applicationMetaData.getPreviousActiveGeneration(), originalApplicationMetaData.getGeneration().longValue());
assertNotEquals(applicationMetaData.getGeneration(), originalApplicationMetaData.getGeneration());
assertEquals(applicationMetaData.getDeployedByUser(), originalApplicationMetaData.getDeployedByUser());
}
|
class ApplicationRepositoryTest {
private final static File testApp = new File("src/test/apps/app");
private final static File testAppJdiscOnly = new File("src/test/apps/app-jdisc-only");
private final static File testAppJdiscOnlyRestart = new File("src/test/apps/app-jdisc-only-restart");
private final static File testAppLogServerWithContainer = new File("src/test/apps/app-logserver-with-container");
private final static TenantName tenant1 = TenantName.from("test1");
private final static TenantName tenant2 = TenantName.from("test2");
private final static TenantName tenant3 = TenantName.from("test3");
private final static Clock clock = Clock.systemUTC();
private ApplicationRepository applicationRepository;
private TenantRepository tenantRepository;
private SessionHandlerTest.MockProvisioner provisioner;
private OrchestratorMock orchestrator;
private TimeoutBudget timeoutBudget;
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Before
public void setup() {
Curator curator = new MockCurator();
tenantRepository = new TenantRepository(new TestComponentRegistry.Builder()
.curator(curator)
.build());
tenantRepository.addTenant(tenant1);
tenantRepository.addTenant(tenant2);
tenantRepository.addTenant(tenant3);
orchestrator = new OrchestratorMock();
provisioner = new SessionHandlerTest.MockProvisioner();
applicationRepository = new ApplicationRepository(tenantRepository, provisioner, orchestrator, clock);
timeoutBudget = new TimeoutBudget(clock, Duration.ofSeconds(60));
}
@Test
public void prepareAndActivate() throws IOException {
PrepareResult result = prepareAndActivateApp(testApp);
assertTrue(result.configChangeActions().getRefeedActions().isEmpty());
assertTrue(result.configChangeActions().getRestartActions().isEmpty());
}
@Test
public void prepareAndActivateWithRestart() throws IOException {
prepareAndActivateApp(testAppJdiscOnly);
PrepareResult result = prepareAndActivateApp(testAppJdiscOnlyRestart);
assertTrue(result.configChangeActions().getRefeedActions().isEmpty());
assertFalse(result.configChangeActions().getRestartActions().isEmpty());
}
@Test
public void createAndPrepareAndActivate() {
PrepareResult result = deployApp(testApp);
assertTrue(result.configChangeActions().getRefeedActions().isEmpty());
assertTrue(result.configChangeActions().getRestartActions().isEmpty());
}
@Test
@Test
public void testSuspension() {
deployApp(testApp);
assertFalse(applicationRepository.isSuspended(applicationId()));
orchestrator.suspend(applicationId());
assertTrue(applicationRepository.isSuspended(applicationId()));
}
@Test
public void getLogs() {
WireMockServer wireMock = new WireMockServer(wireMockConfig().port(8080));
wireMock.start();
WireMock.configureFor("localhost", wireMock.port());
stubFor(get(urlEqualTo("/logs"))
.willReturn(aResponse()
.withStatus(200)));
wireMock.start();
deployApp(testAppLogServerWithContainer);
HttpResponse response = applicationRepository.getLogs(applicationId(), "");
assertEquals(200, response.getStatus());
wireMock.stop();
}
@Test
public void deleteUnusedTenants() {
Instant now = ManualClock.at("1970-01-01T01:00:00");
deployApp(testApp);
deployApp(testApp, new PrepareParams.Builder().applicationId(applicationId(tenant2)).build());
Duration ttlForUnusedTenant = Duration.ofHours(1);
assertTrue(applicationRepository.deleteUnusedTenants(ttlForUnusedTenant, now).isEmpty());
ttlForUnusedTenant = Duration.ofMillis(1);
assertEquals(tenant3, applicationRepository.deleteUnusedTenants(ttlForUnusedTenant, now).iterator().next());
applicationRepository.delete(applicationId());
Set<TenantName> tenantsDeleted = applicationRepository.deleteUnusedTenants(Duration.ofMillis(1), now);
assertTrue(tenantsDeleted.contains(tenant1));
assertFalse(tenantsDeleted.contains(tenant2));
}
@Test
public void decideVersion() {
ApplicationId regularApp = ApplicationId.from("tenant1", "application1", "default");
ApplicationId systemApp = ApplicationId.from("hosted-vespa", "routing", "default");
ApplicationId testerApp = ApplicationId.from("tenant1", "application1", "default-t");
Version sessionVersion = new Version(Vtag.currentVersion.getMajor(), 0);
Version oldSessionVersion = Version.fromString("5.0");
assertEquals(sessionVersion, ApplicationRepository.decideVersion(systemApp, Environment.prod, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(systemApp, Environment.dev, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(systemApp, Environment.perf, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(testerApp, Environment.prod, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(testerApp, Environment.dev, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(testerApp, Environment.perf, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(regularApp, Environment.prod, sessionVersion, false));
assertEquals(Vtag.currentVersion, ApplicationRepository.decideVersion(regularApp, Environment.dev, sessionVersion, false));
assertEquals(oldSessionVersion, ApplicationRepository.decideVersion(regularApp, Environment.dev, oldSessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(regularApp, Environment.dev, sessionVersion, true));
assertEquals(Vtag.currentVersion, ApplicationRepository.decideVersion(regularApp, Environment.perf, sessionVersion, false));
}
@Test
public void deleteUnusedFileReferences() throws IOException {
File fileReferencesDir = temporaryFolder.newFolder();
File filereferenceDir = createFilereferenceOnDisk(new File(fileReferencesDir, "foo"), Instant.now().minus(Duration.ofDays(15)));
File filereferenceDir2 = createFilereferenceOnDisk(new File(fileReferencesDir, "baz"), Instant.now());
tenantRepository.addTenant(tenant1);
Provisioner provisioner = new SessionHandlerTest.MockProvisioner();
applicationRepository = new ApplicationRepository(tenantRepository, provisioner, orchestrator, clock);
timeoutBudget = new TimeoutBudget(clock, Duration.ofSeconds(60));
PrepareParams prepareParams = new PrepareParams.Builder().applicationId(applicationId()).ignoreValidationErrors(true).build();
deployApp(new File("src/test/apps/app"), prepareParams);
Set<String> toBeDeleted = applicationRepository.deleteUnusedFiledistributionReferences(fileReferencesDir);
assertEquals(Collections.singleton("foo"), toBeDeleted);
assertFalse(filereferenceDir.exists());
assertTrue(filereferenceDir2.exists());
}
private File createFilereferenceOnDisk(File filereferenceDir, Instant lastModifiedTime) {
assertTrue(filereferenceDir.mkdir());
File bar = new File(filereferenceDir, "file");
IOUtils.writeFile(bar, Utf8.toBytes("test"));
assertTrue(filereferenceDir.setLastModified(lastModifiedTime.toEpochMilli()));
return filereferenceDir;
}
@Test
public void delete() {
{
PrepareResult result = deployApp(testApp);
long sessionId = result.sessionId();
Tenant tenant = tenantRepository.getTenant(applicationId().tenant());
LocalSession applicationData = tenant.getLocalSessionRepo().getSession(sessionId);
assertNotNull(applicationData);
assertNotNull(applicationData.getApplicationId());
assertNotNull(tenant.getRemoteSessionRepo().getSession(sessionId));
assertNotNull(applicationRepository.getActiveSession(applicationId()));
assertTrue(applicationRepository.delete(applicationId()));
assertNull(applicationRepository.getActiveSession(applicationId()));
assertNull(tenant.getLocalSessionRepo().getSession(sessionId));
assertNull(tenant.getRemoteSessionRepo().getSession(sessionId));
assertTrue(provisioner.removed);
assertThat(provisioner.lastApplicationId.tenant(), is(tenant.getName()));
assertThat(provisioner.lastApplicationId, is(applicationId()));
assertFalse(applicationRepository.delete(applicationId()));
}
{
deployApp(testApp);
assertTrue(applicationRepository.delete(applicationId()));
deployApp(testApp);
ApplicationId fooId = applicationId(tenant2);
PrepareParams prepareParams2 = new PrepareParams.Builder().applicationId(fooId).build();
deployApp(testApp, prepareParams2);
assertNotNull(applicationRepository.getActiveSession(fooId));
assertTrue(applicationRepository.delete(fooId));
assertThat(provisioner.lastApplicationId, is(fooId));
assertNotNull(applicationRepository.getActiveSession(applicationId()));
assertTrue(applicationRepository.delete(applicationId()));
}
}
@Test
public void testDeletingInactiveSessions() {
ManualClock clock = new ManualClock(Instant.now());
ConfigserverConfig configserverConfig =
new ConfigserverConfig(new ConfigserverConfig.Builder()
.configServerDBDir(Files.createTempDir().getAbsolutePath())
.configDefinitionsDir(Files.createTempDir().getAbsolutePath())
.sessionLifetime(60));
DeployTester tester = new DeployTester(configserverConfig, clock);
tester.deployApp("src/test/apps/app", clock.instant());
clock.advance(Duration.ofSeconds(10));
Optional<Deployment> deployment2 = tester.redeployFromLocalActive();
assertTrue(deployment2.isPresent());
deployment2.get().activate();
long activeSessionId = tester.tenant().getApplicationRepo().getSessionIdForApplication(tester.applicationId());
clock.advance(Duration.ofSeconds(10));
Optional<com.yahoo.config.provision.Deployment> deployment3 = tester.redeployFromLocalActive();
assertTrue(deployment3.isPresent());
deployment3.get().prepare();
LocalSession deployment3session = ((com.yahoo.vespa.config.server.deploy.Deployment) deployment3.get()).session();
assertNotEquals(activeSessionId, deployment3session);
assertEquals(activeSessionId, tester.tenant().getApplicationRepo().getSessionIdForApplication(tester.applicationId()));
assertEquals(3, tester.tenant().getLocalSessionRepo().listSessions().size());
clock.advance(Duration.ofHours(1));
tester.applicationRepository().deleteExpiredLocalSessions();
final Collection<LocalSession> sessions = tester.tenant().getLocalSessionRepo().listSessions();
assertEquals(1, sessions.size());
assertEquals(3, new ArrayList<>(sessions).get(0).getSessionId());
assertEquals(0, applicationRepository.deleteExpiredRemoteSessions(Duration.ofSeconds(0)));
}
private PrepareResult prepareAndActivateApp(File application) throws IOException {
FilesApplicationPackage appDir = FilesApplicationPackage.fromFile(application);
ApplicationId applicationId = applicationId();
long sessionId = applicationRepository.createSession(applicationId, timeoutBudget, appDir.getAppDir());
return applicationRepository.prepareAndActivate(tenantRepository.getTenant(applicationId.tenant()),
sessionId, prepareParams(), false, Instant.now());
}
private PrepareResult deployApp(File applicationPackage) {
return deployApp(applicationPackage, prepareParams());
}
private PrepareResult deployApp(File applicationPackage, PrepareParams prepareParams) {
return applicationRepository.deploy(applicationPackage, prepareParams);
}
private PrepareParams prepareParams() {
return new PrepareParams.Builder().applicationId(applicationId()).build();
}
private ApplicationId applicationId() {
return ApplicationId.from(tenant1, ApplicationName.from("testapp"), InstanceName.defaultName());
}
private ApplicationId applicationId(TenantName tenantName) {
return ApplicationId.from(tenantName, ApplicationName.from("testapp"), InstanceName.defaultName());
}
private ApplicationMetaData getApplicationMetaData(ApplicationId applicationId, long sessionId) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
return applicationRepository.getMetadataFromSession(tenant, sessionId);
}
}
|
class ApplicationRepositoryTest {
private final static File testApp = new File("src/test/apps/app");
private final static File testAppJdiscOnly = new File("src/test/apps/app-jdisc-only");
private final static File testAppJdiscOnlyRestart = new File("src/test/apps/app-jdisc-only-restart");
private final static File testAppLogServerWithContainer = new File("src/test/apps/app-logserver-with-container");
private final static TenantName tenant1 = TenantName.from("test1");
private final static TenantName tenant2 = TenantName.from("test2");
private final static TenantName tenant3 = TenantName.from("test3");
private final static Clock clock = Clock.systemUTC();
private ApplicationRepository applicationRepository;
private TenantRepository tenantRepository;
private SessionHandlerTest.MockProvisioner provisioner;
private OrchestratorMock orchestrator;
private TimeoutBudget timeoutBudget;
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Before
public void setup() {
Curator curator = new MockCurator();
tenantRepository = new TenantRepository(new TestComponentRegistry.Builder()
.curator(curator)
.build());
tenantRepository.addTenant(tenant1);
tenantRepository.addTenant(tenant2);
tenantRepository.addTenant(tenant3);
orchestrator = new OrchestratorMock();
provisioner = new SessionHandlerTest.MockProvisioner();
applicationRepository = new ApplicationRepository(tenantRepository, provisioner, orchestrator, clock);
timeoutBudget = new TimeoutBudget(clock, Duration.ofSeconds(60));
}
@Test
public void prepareAndActivate() throws IOException {
PrepareResult result = prepareAndActivateApp(testApp);
assertTrue(result.configChangeActions().getRefeedActions().isEmpty());
assertTrue(result.configChangeActions().getRestartActions().isEmpty());
}
@Test
public void prepareAndActivateWithRestart() throws IOException {
prepareAndActivateApp(testAppJdiscOnly);
PrepareResult result = prepareAndActivateApp(testAppJdiscOnlyRestart);
assertTrue(result.configChangeActions().getRefeedActions().isEmpty());
assertFalse(result.configChangeActions().getRestartActions().isEmpty());
}
@Test
public void createAndPrepareAndActivate() {
PrepareResult result = deployApp(testApp);
assertTrue(result.configChangeActions().getRefeedActions().isEmpty());
assertTrue(result.configChangeActions().getRestartActions().isEmpty());
}
@Test
@Test
public void testSuspension() {
deployApp(testApp);
assertFalse(applicationRepository.isSuspended(applicationId()));
orchestrator.suspend(applicationId());
assertTrue(applicationRepository.isSuspended(applicationId()));
}
@Test
public void getLogs() {
WireMockServer wireMock = new WireMockServer(wireMockConfig().port(8080));
wireMock.start();
WireMock.configureFor("localhost", wireMock.port());
stubFor(get(urlEqualTo("/logs"))
.willReturn(aResponse()
.withStatus(200)));
wireMock.start();
deployApp(testAppLogServerWithContainer);
HttpResponse response = applicationRepository.getLogs(applicationId(), "");
assertEquals(200, response.getStatus());
wireMock.stop();
}
@Test
public void deleteUnusedTenants() {
Instant now = ManualClock.at("1970-01-01T01:00:00");
deployApp(testApp);
deployApp(testApp, new PrepareParams.Builder().applicationId(applicationId(tenant2)).build());
Duration ttlForUnusedTenant = Duration.ofHours(1);
assertTrue(applicationRepository.deleteUnusedTenants(ttlForUnusedTenant, now).isEmpty());
ttlForUnusedTenant = Duration.ofMillis(1);
assertEquals(tenant3, applicationRepository.deleteUnusedTenants(ttlForUnusedTenant, now).iterator().next());
applicationRepository.delete(applicationId());
Set<TenantName> tenantsDeleted = applicationRepository.deleteUnusedTenants(Duration.ofMillis(1), now);
assertTrue(tenantsDeleted.contains(tenant1));
assertFalse(tenantsDeleted.contains(tenant2));
}
@Test
public void decideVersion() {
ApplicationId regularApp = ApplicationId.from("tenant1", "application1", "default");
ApplicationId systemApp = ApplicationId.from("hosted-vespa", "routing", "default");
ApplicationId testerApp = ApplicationId.from("tenant1", "application1", "default-t");
Version sessionVersion = new Version(Vtag.currentVersion.getMajor(), 0);
Version oldSessionVersion = Version.fromString("5.0");
assertEquals(sessionVersion, ApplicationRepository.decideVersion(systemApp, Environment.prod, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(systemApp, Environment.dev, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(systemApp, Environment.perf, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(testerApp, Environment.prod, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(testerApp, Environment.dev, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(testerApp, Environment.perf, sessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(regularApp, Environment.prod, sessionVersion, false));
assertEquals(Vtag.currentVersion, ApplicationRepository.decideVersion(regularApp, Environment.dev, sessionVersion, false));
assertEquals(oldSessionVersion, ApplicationRepository.decideVersion(regularApp, Environment.dev, oldSessionVersion, false));
assertEquals(sessionVersion, ApplicationRepository.decideVersion(regularApp, Environment.dev, sessionVersion, true));
assertEquals(Vtag.currentVersion, ApplicationRepository.decideVersion(regularApp, Environment.perf, sessionVersion, false));
}
@Test
public void deleteUnusedFileReferences() throws IOException {
File fileReferencesDir = temporaryFolder.newFolder();
File filereferenceDir = createFilereferenceOnDisk(new File(fileReferencesDir, "foo"), Instant.now().minus(Duration.ofDays(15)));
File filereferenceDir2 = createFilereferenceOnDisk(new File(fileReferencesDir, "baz"), Instant.now());
tenantRepository.addTenant(tenant1);
Provisioner provisioner = new SessionHandlerTest.MockProvisioner();
applicationRepository = new ApplicationRepository(tenantRepository, provisioner, orchestrator, clock);
timeoutBudget = new TimeoutBudget(clock, Duration.ofSeconds(60));
PrepareParams prepareParams = new PrepareParams.Builder().applicationId(applicationId()).ignoreValidationErrors(true).build();
deployApp(new File("src/test/apps/app"), prepareParams);
Set<String> toBeDeleted = applicationRepository.deleteUnusedFiledistributionReferences(fileReferencesDir);
assertEquals(Collections.singleton("foo"), toBeDeleted);
assertFalse(filereferenceDir.exists());
assertTrue(filereferenceDir2.exists());
}
private File createFilereferenceOnDisk(File filereferenceDir, Instant lastModifiedTime) {
assertTrue(filereferenceDir.mkdir());
File bar = new File(filereferenceDir, "file");
IOUtils.writeFile(bar, Utf8.toBytes("test"));
assertTrue(filereferenceDir.setLastModified(lastModifiedTime.toEpochMilli()));
return filereferenceDir;
}
@Test
public void delete() {
{
PrepareResult result = deployApp(testApp);
long sessionId = result.sessionId();
Tenant tenant = tenantRepository.getTenant(applicationId().tenant());
LocalSession applicationData = tenant.getLocalSessionRepo().getSession(sessionId);
assertNotNull(applicationData);
assertNotNull(applicationData.getApplicationId());
assertNotNull(tenant.getRemoteSessionRepo().getSession(sessionId));
assertNotNull(applicationRepository.getActiveSession(applicationId()));
assertTrue(applicationRepository.delete(applicationId()));
assertNull(applicationRepository.getActiveSession(applicationId()));
assertNull(tenant.getLocalSessionRepo().getSession(sessionId));
assertNull(tenant.getRemoteSessionRepo().getSession(sessionId));
assertTrue(provisioner.removed);
assertThat(provisioner.lastApplicationId.tenant(), is(tenant.getName()));
assertThat(provisioner.lastApplicationId, is(applicationId()));
assertFalse(applicationRepository.delete(applicationId()));
}
{
deployApp(testApp);
assertTrue(applicationRepository.delete(applicationId()));
deployApp(testApp);
ApplicationId fooId = applicationId(tenant2);
PrepareParams prepareParams2 = new PrepareParams.Builder().applicationId(fooId).build();
deployApp(testApp, prepareParams2);
assertNotNull(applicationRepository.getActiveSession(fooId));
assertTrue(applicationRepository.delete(fooId));
assertThat(provisioner.lastApplicationId, is(fooId));
assertNotNull(applicationRepository.getActiveSession(applicationId()));
assertTrue(applicationRepository.delete(applicationId()));
}
}
@Test
public void testDeletingInactiveSessions() {
ManualClock clock = new ManualClock(Instant.now());
ConfigserverConfig configserverConfig =
new ConfigserverConfig(new ConfigserverConfig.Builder()
.configServerDBDir(Files.createTempDir().getAbsolutePath())
.configDefinitionsDir(Files.createTempDir().getAbsolutePath())
.sessionLifetime(60));
DeployTester tester = new DeployTester(configserverConfig, clock);
tester.deployApp("src/test/apps/app", clock.instant());
clock.advance(Duration.ofSeconds(10));
Optional<Deployment> deployment2 = tester.redeployFromLocalActive();
assertTrue(deployment2.isPresent());
deployment2.get().activate();
long activeSessionId = tester.tenant().getApplicationRepo().getSessionIdForApplication(tester.applicationId());
clock.advance(Duration.ofSeconds(10));
Optional<com.yahoo.config.provision.Deployment> deployment3 = tester.redeployFromLocalActive();
assertTrue(deployment3.isPresent());
deployment3.get().prepare();
LocalSession deployment3session = ((com.yahoo.vespa.config.server.deploy.Deployment) deployment3.get()).session();
assertNotEquals(activeSessionId, deployment3session);
assertEquals(activeSessionId, tester.tenant().getApplicationRepo().getSessionIdForApplication(tester.applicationId()));
assertEquals(3, tester.tenant().getLocalSessionRepo().listSessions().size());
clock.advance(Duration.ofHours(1));
tester.applicationRepository().deleteExpiredLocalSessions();
final Collection<LocalSession> sessions = tester.tenant().getLocalSessionRepo().listSessions();
assertEquals(1, sessions.size());
assertEquals(3, new ArrayList<>(sessions).get(0).getSessionId());
assertEquals(0, applicationRepository.deleteExpiredRemoteSessions(Duration.ofSeconds(0)));
}
private PrepareResult prepareAndActivateApp(File application) throws IOException {
FilesApplicationPackage appDir = FilesApplicationPackage.fromFile(application);
ApplicationId applicationId = applicationId();
long sessionId = applicationRepository.createSession(applicationId, timeoutBudget, appDir.getAppDir());
return applicationRepository.prepareAndActivate(tenantRepository.getTenant(applicationId.tenant()),
sessionId, prepareParams(), false, Instant.now());
}
private PrepareResult deployApp(File applicationPackage) {
return deployApp(applicationPackage, prepareParams());
}
private PrepareResult deployApp(File applicationPackage, PrepareParams prepareParams) {
return applicationRepository.deploy(applicationPackage, prepareParams);
}
private PrepareParams prepareParams() {
return new PrepareParams.Builder().applicationId(applicationId()).build();
}
private ApplicationId applicationId() {
return ApplicationId.from(tenant1, ApplicationName.from("testapp"), InstanceName.defaultName());
}
private ApplicationId applicationId(TenantName tenantName) {
return ApplicationId.from(tenantName, ApplicationName.from("testapp"), InstanceName.defaultName());
}
private ApplicationMetaData getApplicationMetaData(ApplicationId applicationId, long sessionId) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
return applicationRepository.getMetadataFromSession(tenant, sessionId);
}
}
|
this method is no longer used
|
void testExceptionHistoryWithTaskFailure() throws Exception {
final Exception expectedException = new Exception("Expected Local Exception");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.get(1);
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, expectedException)));
};
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException);
}
|
};
|
void testExceptionHistoryWithTaskFailure() throws Exception {
final Exception expectedException = new Exception("Expected Local Exception");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.get(1);
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, expectedException)));
};
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException);
}
|
class AdaptiveSchedulerTest {
private static final Duration DEFAULT_TIMEOUT = Duration.ofHours(1);
private static final int PARALLELISM = 4;
private static final JobVertex JOB_VERTEX = createNoOpVertex("v1", PARALLELISM);
private static final Logger LOG = LoggerFactory.getLogger(AdaptiveSchedulerTest.class);
@RegisterExtension
public static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_RESOURCE =
TestingUtils.defaultExecutorExtension();
@RegisterExtension
public static final TestExecutorExtension<ScheduledExecutorService> TEST_EXECUTOR_RESOURCE =
new TestExecutorExtension<>(Executors::newSingleThreadScheduledExecutor);
private final ManuallyTriggeredComponentMainThreadExecutor mainThreadExecutor =
new ManuallyTriggeredComponentMainThreadExecutor(Thread.currentThread());
private final ComponentMainThreadExecutor singleThreadMainThreadExecutor =
ComponentMainThreadExecutorServiceAdapter.forSingleThreadExecutor(
TEST_EXECUTOR_RESOURCE.getExecutor());
private final ClassLoader classLoader = ClassLoader.getSystemClassLoader();
@Test
void testInitialState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.getState()).isInstanceOf(Created.class);
}
@Test
void testArchivedCheckpointingSettingsNotNullIfCheckpointingIsEnabled() throws Exception {
final JobGraph jobGraph = createJobGraph();
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
CheckpointCoordinatorConfiguration.builder().build(), null));
final ArchivedExecutionGraph archivedExecutionGraph =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor())
.getArchivedExecutionGraph(JobStatus.INITIALIZING, null);
ArchivedExecutionGraphTest.assertContainsCheckpointSettings(archivedExecutionGraph);
}
@Test
void testArchivedJobVerticesPresent() throws Exception {
final JobGraph jobGraph = createJobGraph();
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
CheckpointCoordinatorConfiguration.builder().build(), null));
final ArchivedExecutionGraph archivedExecutionGraph =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor())
.getArchivedExecutionGraph(JobStatus.INITIALIZING, null);
ArchivedExecutionJobVertex jobVertex =
archivedExecutionGraph.getJobVertex(JOB_VERTEX.getID());
assertThat(jobVertex)
.isNotNull()
.satisfies(
archived -> {
assertThat(archived.getParallelism())
.isEqualTo(JOB_VERTEX.getParallelism());
assertThat(archived.getMaxParallelism()).isEqualTo(128);
});
ArchivedExecutionGraphTest.assertContainsCheckpointSettings(archivedExecutionGraph);
}
@Test
void testIsState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
final State state = scheduler.getState();
assertThat(scheduler.isState(state)).isTrue();
assertThat(scheduler.isState(new DummyState())).isFalse();
}
@Test
void testRunIfState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
AtomicBoolean ran = new AtomicBoolean(false);
scheduler.runIfState(scheduler.getState(), () -> ran.set(true));
assertThat(ran.get()).isTrue();
}
@Test
void testRunIfStateWithStateMismatch() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
AtomicBoolean ran = new AtomicBoolean(false);
scheduler.runIfState(new DummyState(), () -> ran.set(true));
assertThat(ran.get()).isFalse();
}
@Test
void testHasEnoughResourcesReturnsFalseIfUnsatisfied() {
final ResourceCounter resourceRequirement =
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1);
assertThat(
AdaptiveScheduler.hasDesiredResources(
resourceRequirement, Collections.emptyList()))
.isFalse();
}
@Test
void testHasEnoughResourcesReturnsTrueIfSatisfied() {
final ResourceCounter resourceRequirement =
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1);
final Collection<TestSlotInfo> freeSlots =
createSlotInfosForResourceRequirements(resourceRequirement);
assertThat(AdaptiveScheduler.hasDesiredResources(resourceRequirement, freeSlots)).isTrue();
}
private Collection<TestSlotInfo> createSlotInfosForResourceRequirements(
ResourceCounter resourceRequirements) {
final Collection<TestSlotInfo> slotInfos = new ArrayList<>();
for (Map.Entry<ResourceProfile, Integer> resourceProfileCount :
resourceRequirements.getResourcesWithCount()) {
for (int i = 0; i < resourceProfileCount.getValue(); i++) {
slotInfos.add(new TestSlotInfo(resourceProfileCount.getKey()));
}
}
return slotInfos;
}
@Test
void testHasEnoughResourcesUsesUnmatchedSlotsAsUnknown() throws Exception {
final int numRequiredSlots = 1;
final ResourceCounter requiredResources =
ResourceCounter.withResource(ResourceProfile.UNKNOWN, numRequiredSlots);
final ResourceCounter providedResources =
ResourceCounter.withResource(
ResourceProfile.newBuilder().setCpuCores(1).build(), numRequiredSlots);
final Collection<TestSlotInfo> freeSlots =
createSlotInfosForResourceRequirements(providedResources);
assertThat(AdaptiveScheduler.hasDesiredResources(requiredResources, freeSlots)).isTrue();
}
@Test
void testExecutionGraphGenerationWithAvailableResources() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final int numAvailableSlots = 2;
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(numAvailableSlots);
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, numAvailableSlots)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(numAvailableSlots);
final ArchivedExecutionGraph executionGraph =
CompletableFuture.supplyAsync(
() -> scheduler.requestJob().getArchivedExecutionGraph(),
singleThreadMainThreadExecutor)
.join();
assertThat(executionGraph.getJobVertex(JOB_VERTEX.getID()).getParallelism())
.isEqualTo(numAvailableSlots);
assertThat(
JacksonMapperFactory.createObjectMapper()
.readTree(executionGraph.getJsonPlan())
.get("nodes")
.size())
.isEqualTo(1);
}
@Test
void testExecutionGraphGenerationSetsInitializationTimestamp() throws Exception {
final long initializationTimestamp = 42L;
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler adaptiveScheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setInitializationTimestamp(initializationTimestamp)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(PARALLELISM);
singleThreadMainThreadExecutor.execute(
() -> {
adaptiveScheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(1);
final ArchivedExecutionGraph executionGraph =
CompletableFuture.supplyAsync(
() -> adaptiveScheduler.requestJob().getArchivedExecutionGraph(),
singleThreadMainThreadExecutor)
.join();
assertThat(executionGraph.getStatusTimestamp(JobStatus.INITIALIZING))
.isEqualTo(initializationTimestamp);
}
@Test
void testInitializationTimestampForwarding() throws Exception {
final long expectedInitializationTimestamp = 42L;
final AdaptiveScheduler adaptiveScheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setInitializationTimestamp(expectedInitializationTimestamp)
.build(EXECUTOR_RESOURCE.getExecutor());
final long initializationTimestamp =
adaptiveScheduler
.requestJob()
.getArchivedExecutionGraph()
.getStatusTimestamp(JobStatus.INITIALIZING);
assertThat(initializationTimestamp).isEqualTo(expectedInitializationTimestamp);
}
@Test
void testFatalErrorsForwardedToFatalErrorHandler() throws Exception {
final TestingFatalErrorHandler fatalErrorHandler = new TestingFatalErrorHandler();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setFatalErrorHandler(fatalErrorHandler)
.build(EXECUTOR_RESOURCE.getExecutor());
final RuntimeException exception = new RuntimeException();
scheduler.runIfState(
scheduler.getState(),
() -> {
throw exception;
});
assertThat(fatalErrorHandler.getException()).isEqualTo(exception);
}
@Test
void testResourceTimeout() throws Exception {
final ManuallyTriggeredComponentMainThreadExecutor mainThreadExecutor =
new ManuallyTriggeredComponentMainThreadExecutor(Thread.currentThread());
final Duration resourceTimeout = Duration.ofMinutes(1234);
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, resourceTimeout);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
scheduler.startScheduling();
final boolean b =
mainThreadExecutor.getActiveNonPeriodicScheduledTask().stream()
.anyMatch(
scheduledTask ->
scheduledTask.getDelay(TimeUnit.MINUTES)
== resourceTimeout.toMinutes());
assertThat(b).isTrue();
}
@Test
void testNumRestartsMetric() throws Exception {
final CompletableFuture<Gauge<Long>> numRestartsMetricFuture = new CompletableFuture<>();
final MetricRegistry metricRegistry =
TestingMetricRegistry.builder()
.setRegisterConsumer(
(metric, name, group) -> {
if (MetricNames.NUM_RESTARTS.equals(name)) {
numRestartsMetricFuture.complete((Gauge<Long>) metric);
}
})
.build();
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
new DefaultDeclarativeSlotPool(
jobGraph.getJobID(),
new DefaultAllocatedSlotPool(),
ignored -> {},
Time.minutes(10),
Time.minutes(10));
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.MIN_PARALLELISM_INCREASE, 1);
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setJobMasterConfiguration(configuration)
.setJobManagerJobMetricGroup(
JobManagerMetricGroup.createJobManagerMetricGroup(
metricRegistry, "localhost")
.addJob(new JobID(), "jobName"))
.setDeclarativeSlotPool(declarativeSlotPool)
.build(EXECUTOR_RESOURCE.getExecutor());
final Gauge<Long> numRestartsMetric = numRestartsMetricFuture.get();
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(1 + PARALLELISM);
taskManagerGateway.setCancelConsumer(createCancelConsumer(scheduler));
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
declarativeSlotPool.offerSlots(
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
new LocalTaskManagerLocation(),
taskManagerGateway,
System.currentTimeMillis());
});
taskManagerGateway.waitForSubmissions(1);
assertThat(numRestartsMetric.getValue()).isEqualTo(0L);
singleThreadMainThreadExecutor.execute(
() -> {
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(PARALLELISM);
assertThat(numRestartsMetric.getValue()).isEqualTo(1L);
}
@Test
void testStatusMetrics() throws Exception {
final CompletableFuture<UpTimeGauge> upTimeMetricFuture = new CompletableFuture<>();
final CompletableFuture<DownTimeGauge> downTimeMetricFuture = new CompletableFuture<>();
final CompletableFuture<Gauge<Long>> restartTimeMetricFuture = new CompletableFuture<>();
final MetricRegistry metricRegistry =
TestingMetricRegistry.builder()
.setRegisterConsumer(
(metric, name, group) -> {
switch (name) {
case UpTimeGauge.METRIC_NAME:
upTimeMetricFuture.complete((UpTimeGauge) metric);
break;
case DownTimeGauge.METRIC_NAME:
downTimeMetricFuture.complete((DownTimeGauge) metric);
break;
case "restartingTimeTotal":
restartTimeMetricFuture.complete((Gauge<Long>) metric);
break;
}
})
.build();
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.MIN_PARALLELISM_INCREASE, 1);
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(10L));
configuration.set(
MetricOptions.JOB_STATUS_METRICS,
Arrays.asList(MetricOptions.JobStatusMetrics.TOTAL_TIME));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setJobMasterConfiguration(configuration)
.setJobManagerJobMetricGroup(
JobManagerMetricGroup.createJobManagerMetricGroup(
metricRegistry, "localhost")
.addJob(new JobID(), "jobName"))
.setDeclarativeSlotPool(declarativeSlotPool)
.build(EXECUTOR_RESOURCE.getExecutor());
final UpTimeGauge upTimeGauge = upTimeMetricFuture.get();
final DownTimeGauge downTimeGauge = downTimeMetricFuture.get();
final Gauge<Long> restartTimeGauge = restartTimeMetricFuture.get();
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(1 + PARALLELISM);
taskManagerGateway.setCancelConsumer(createCancelConsumer(scheduler));
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(1);
CommonTestUtils.waitUntilCondition(() -> upTimeGauge.getValue() > 0L);
assertThat(downTimeGauge.getValue()).isEqualTo(0L);
assertThat(restartTimeGauge.getValue()).isEqualTo(0L);
singleThreadMainThreadExecutor.execute(
() -> {
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(2);
CommonTestUtils.waitUntilCondition(() -> upTimeGauge.getValue() > 0L);
assertThat(downTimeGauge.getValue()).isEqualTo(0L);
assertThat(restartTimeGauge.getValue()).isGreaterThanOrEqualTo(0L);
}
@Test
void testStartSchedulingTransitionsToWaitingForResources() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
scheduler.startScheduling();
assertThat(scheduler.getState()).isInstanceOf(WaitingForResources.class);
}
@Test
void testStartSchedulingSetsResourceRequirementsForDefaultMode() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.build(EXECUTOR_RESOURCE.getExecutor());
scheduler.startScheduling();
assertThat(declarativeSlotPool.getResourceRequirements())
.contains(ResourceRequirement.create(ResourceProfile.UNKNOWN, PARALLELISM));
}
@Test
void testStartSchedulingSetsResourceRequirementsForReactiveMode() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.SCHEDULER_MODE, SchedulerExecutionMode.REACTIVE);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
scheduler.startScheduling();
final int expectedParallelism =
KeyGroupRangeAssignment.computeDefaultMaxParallelism(PARALLELISM);
assertThat(declarativeSlotPool.getResourceRequirements())
.contains(ResourceRequirement.create(ResourceProfile.UNKNOWN, expectedParallelism));
}
/** Tests that the listener for new slots is properly set up. */
@Test
void testResourceAcquisitionTriggersJobExecution() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(PARALLELISM);
CompletableFuture<State> startingStateFuture = new CompletableFuture<>();
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
startingStateFuture.complete(scheduler.getState());
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
assertThat(startingStateFuture.get()).isInstanceOf(WaitingForResources.class);
taskManagerGateway.waitForSubmissions(PARALLELISM);
final ArchivedExecutionGraph executionGraph =
CompletableFuture.supplyAsync(
() -> scheduler.requestJob().getArchivedExecutionGraph(),
singleThreadMainThreadExecutor)
.get();
assertThat(executionGraph.getJobVertex(JOB_VERTEX.getID()).getParallelism())
.isEqualTo(PARALLELISM);
}
@Test
void testGoToFinished() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
final ArchivedExecutionGraph archivedExecutionGraph =
new ArchivedExecutionGraphBuilder().setState(JobStatus.FAILED).build();
scheduler.goToFinished(archivedExecutionGraph);
assertThat(scheduler.getState()).isInstanceOf(Finished.class);
}
@Test
void testJobStatusListenerOnlyCalledIfJobStatusChanges() throws Exception {
final AtomicInteger numStatusUpdates = new AtomicInteger();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setJobStatusListener(
(jobId, newJobStatus, timestamp) ->
numStatusUpdates.incrementAndGet())
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.requestJobStatus())
.withFailMessage("Assumption about job status for Scheduler@Created is incorrect.")
.isEqualTo(JobStatus.INITIALIZING);
scheduler.transitionToState(new DummyState.Factory(JobStatus.INITIALIZING));
assertThat(numStatusUpdates.get()).isEqualTo(0);
}
@Test
void testJobStatusListenerNotifiedOfJobStatusChanges() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final CompletableFuture<Void> jobCreatedNotification = new CompletableFuture<>();
final CompletableFuture<Void> jobRunningNotification = new CompletableFuture<>();
final CompletableFuture<Void> jobFinishedNotification = new CompletableFuture<>();
final CompletableFuture<JobStatus> unexpectedJobStatusNotification =
new CompletableFuture<>();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setJobMasterConfiguration(configuration)
.setJobStatusListener(
(jobId, newJobStatus, timestamp) -> {
switch (newJobStatus) {
case CREATED:
jobCreatedNotification.complete(null);
break;
case RUNNING:
jobRunningNotification.complete(null);
break;
case FINISHED:
jobFinishedNotification.complete(null);
break;
default:
unexpectedJobStatusNotification.complete(newJobStatus);
}
})
.setDeclarativeSlotPool(declarativeSlotPool)
.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(1 + PARALLELISM);
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
taskManagerGateway);
});
final TaskDeploymentDescriptor submittedTask = taskManagerGateway.submittedTasks.take();
singleThreadMainThreadExecutor.execute(
() ->
scheduler.updateTaskExecutionState(
new TaskExecutionState(
submittedTask.getExecutionAttemptId(),
ExecutionState.FINISHED)));
jobCreatedNotification.get();
jobRunningNotification.get();
jobFinishedNotification.get();
assertThat(unexpectedJobStatusNotification.isDone()).isFalse();
}
@Test
void testCloseShutsDownCheckpointingComponents() throws Exception {
final CompletableFuture<JobStatus> completedCheckpointStoreShutdownFuture =
new CompletableFuture<>();
final CompletedCheckpointStore completedCheckpointStore =
TestingCompletedCheckpointStore
.createStoreWithShutdownCheckAndNoCompletedCheckpoints(
completedCheckpointStoreShutdownFuture);
final CompletableFuture<JobStatus> checkpointIdCounterShutdownFuture =
new CompletableFuture<>();
final CheckpointIDCounter checkpointIdCounter =
TestingCheckpointIDCounter.createStoreWithShutdownCheckAndNoStartAction(
checkpointIdCounterShutdownFuture);
final JobGraph jobGraph = createJobGraph();
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
CheckpointCoordinatorConfiguration.builder().build(), null));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setCheckpointRecoveryFactory(
new TestingCheckpointRecoveryFactory(
completedCheckpointStore, checkpointIdCounter))
.build(EXECUTOR_RESOURCE.getExecutor());
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
scheduler.handleGlobalFailure(new FlinkException("Test exception"));
scheduler.closeAsync();
});
assertThat(completedCheckpointStoreShutdownFuture.get()).isEqualTo(JobStatus.FAILED);
assertThat(checkpointIdCounterShutdownFuture.get()).isEqualTo(JobStatus.FAILED);
}
@Test
void testTransitionToStateCallsOnLeave() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
final LifecycleMethodCapturingState firstState = new LifecycleMethodCapturingState();
scheduler.transitionToState(new StateInstanceFactory(firstState));
firstState.reset();
scheduler.transitionToState(new DummyState.Factory());
assertThat(firstState.onLeaveCalled).isTrue();
assertThat(firstState.onLeaveNewStateArgument.equals(DummyState.class)).isTrue();
}
@Test
void testConsistentMaxParallelism() throws Exception {
final int parallelism = 240;
final int expectedMaxParallelism =
KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism);
final JobVertex vertex = createNoOpVertex(parallelism);
final JobGraph jobGraph = streamingJobGraph(vertex);
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(1 + parallelism);
taskManagerGateway.setCancelConsumer(createCancelConsumer(scheduler));
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(1);
ArchivedExecutionGraph executionGraph =
getArchivedExecutionGraphForRunningJob(scheduler).get();
ArchivedExecutionJobVertex archivedVertex = executionGraph.getJobVertex(vertex.getID());
assertThat(archivedVertex.getParallelism()).isEqualTo(1);
assertThat(archivedVertex.getMaxParallelism()).isEqualTo(expectedMaxParallelism);
singleThreadMainThreadExecutor.execute(
() -> {
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, parallelism)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(parallelism);
ArchivedExecutionGraph resubmittedExecutionGraph =
getArchivedExecutionGraphForRunningJob(scheduler).get();
ArchivedExecutionJobVertex resubmittedArchivedVertex =
resubmittedExecutionGraph.getJobVertex(vertex.getID());
assertThat(resubmittedArchivedVertex.getParallelism()).isEqualTo(parallelism);
assertThat(resubmittedArchivedVertex.getMaxParallelism()).isEqualTo(expectedMaxParallelism);
}
@Test
void testRequirementIncreaseTriggersScaleUp() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final AdaptiveScheduler scheduler =
createSchedulerWithNoResourceWaitTimeout(jobGraph, declarativeSlotPool);
final int scaledUpParallelism = PARALLELISM * 2;
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
createSubmissionBufferingTaskManagerGateway(scaledUpParallelism, scheduler);
startJobWithSlotsMatchingParallelism(
scheduler, declarativeSlotPool, taskManagerGateway, PARALLELISM);
awaitJobReachingParallelism(taskManagerGateway, scheduler, PARALLELISM);
JobResourceRequirements newJobResourceRequirements =
createRequirementsWithUpperParallelism(scaledUpParallelism);
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.updateJobResourceRequirements(newJobResourceRequirements);
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
awaitJobReachingParallelism(taskManagerGateway, scheduler, scaledUpParallelism);
}
@Test
void testRequirementDecreaseTriggersScaleDown() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final AdaptiveScheduler scheduler =
createSchedulerWithNoResourceWaitTimeout(jobGraph, declarativeSlotPool);
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
createSubmissionBufferingTaskManagerGateway(PARALLELISM, scheduler);
startJobWithSlotsMatchingParallelism(
scheduler, declarativeSlotPool, taskManagerGateway, PARALLELISM);
awaitJobReachingParallelism(taskManagerGateway, scheduler, PARALLELISM);
int scaledDownParallelism = PARALLELISM - 1;
JobResourceRequirements newJobResourceRequirements =
createRequirementsWithUpperParallelism(scaledDownParallelism);
singleThreadMainThreadExecutor.execute(
() -> scheduler.updateJobResourceRequirements(newJobResourceRequirements));
awaitJobReachingParallelism(taskManagerGateway, scheduler, scaledDownParallelism);
}
private AdaptiveScheduler createSchedulerWithNoResourceWaitTimeout(
JobGraph jobGraph, DeclarativeSlotPool declarativeSlotPool) throws Exception {
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
return new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
}
private SubmissionBufferingTaskManagerGateway createSubmissionBufferingTaskManagerGateway(
int parallelism, SchedulerNG scheduler) {
SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(parallelism);
taskManagerGateway.setCancelConsumer(
executionAttemptID ->
singleThreadMainThreadExecutor.execute(
() ->
scheduler.updateTaskExecutionState(
new TaskExecutionState(
executionAttemptID,
ExecutionState.CANCELED))));
return taskManagerGateway;
}
private void startJobWithSlotsMatchingParallelism(
SchedulerNG scheduler,
DeclarativeSlotPool declarativeSlotPool,
TaskManagerGateway taskManagerGateway,
int parallelism) {
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, parallelism)),
taskManagerGateway);
});
}
private void awaitJobReachingParallelism(
SubmissionBufferingTaskManagerGateway taskManagerGateway,
SchedulerNG scheduler,
int parallelism)
throws Exception {
taskManagerGateway.waitForSubmissions(parallelism);
final ArchivedExecutionGraph executionGraph =
CompletableFuture.supplyAsync(
() -> scheduler.requestJob().getArchivedExecutionGraph(),
singleThreadMainThreadExecutor)
.get();
assertThat(executionGraph.getJobVertex(JOB_VERTEX.getID()).getParallelism())
.isEqualTo(parallelism);
}
private static JobResourceRequirements createRequirementsWithUpperParallelism(int parallelism) {
return new JobResourceRequirements(
Collections.singletonMap(
JOB_VERTEX.getID(),
new JobVertexResourceRequirements(
new JobVertexResourceRequirements.Parallelism(1, parallelism))));
}
@Test
void testHowToHandleFailureRejectedByStrategy() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setRestartBackoffTimeStrategy(NoRestartBackoffTimeStrategy.INSTANCE)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.howToHandleFailure(new Exception("test")).canRestart()).isFalse();
}
@Test
void testHowToHandleFailureAllowedByStrategy() throws Exception {
final TestRestartBackoffTimeStrategy restartBackoffTimeStrategy =
new TestRestartBackoffTimeStrategy(true, 1234);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setRestartBackoffTimeStrategy(restartBackoffTimeStrategy)
.build(EXECUTOR_RESOURCE.getExecutor());
final FailureResult failureResult = scheduler.howToHandleFailure(new Exception("test"));
assertThat(failureResult.canRestart()).isTrue();
assertThat(failureResult.getBackoffTime().toMillis())
.isEqualTo(restartBackoffTimeStrategy.getBackoffTime());
}
@Test
void testHowToHandleFailureUnrecoverableFailure() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(
scheduler
.howToHandleFailure(
new SuppressRestartsException(new Exception("test")))
.canRestart())
.isFalse();
}
private Iterable<RootExceptionHistoryEntry> runExceptionHistoryTests(
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic) throws Exception {
return runExceptionHistoryTests(testLogic, ignored -> {}, ignored -> {});
}
private Iterable<RootExceptionHistoryEntry> runExceptionHistoryTests(
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic,
Consumer<AdaptiveSchedulerBuilder> setupScheduler)
throws Exception {
return runExceptionHistoryTests(testLogic, setupScheduler, ignored -> {});
}
private Iterable<RootExceptionHistoryEntry> runExceptionHistoryTests(
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic,
Consumer<AdaptiveSchedulerBuilder> setupScheduler,
Consumer<JobGraph> setupJobGraph)
throws Exception {
final JobGraph jobGraph = createJobGraph();
setupJobGraph.accept(jobGraph);
final CompletedCheckpointStore completedCheckpointStore =
new StandaloneCompletedCheckpointStore(1);
final CheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter();
final CheckpointsCleaner checkpointCleaner = new CheckpointsCleaner();
TestingCheckpointRecoveryFactory checkpointRecoveryFactory =
new TestingCheckpointRecoveryFactory(completedCheckpointStore, checkpointIDCounter);
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
AdaptiveSchedulerBuilder builder =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setJobMasterConfiguration(configuration)
.setDeclarativeSlotPool(declarativeSlotPool)
.setCheckpointRecoveryFactory(checkpointRecoveryFactory)
.setCheckpointCleaner(checkpointCleaner);
setupScheduler.accept(builder);
final AdaptiveScheduler scheduler = builder.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(PARALLELISM);
taskManagerGateway.setCancelConsumer(
attemptId ->
singleThreadMainThreadExecutor.execute(
() ->
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId,
ExecutionState.CANCELED,
null)))));
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(PARALLELISM);
CompletableFuture<Iterable<ArchivedExecutionVertex>> vertexFuture =
new CompletableFuture<>();
singleThreadMainThreadExecutor.execute(
() ->
vertexFuture.complete(
scheduler
.requestJob()
.getArchivedExecutionGraph()
.getAllExecutionVertices()));
final Iterable<ArchivedExecutionVertex> executionVertices = vertexFuture.get();
final List<ExecutionAttemptID> attemptIds =
IterableUtils.toStream(executionVertices)
.map(ArchivedExecutionVertex::getCurrentExecutionAttempt)
.map(ArchivedExecution::getAttemptId)
.collect(Collectors.toList());
CompletableFuture<Void> runTestLogicFuture =
CompletableFuture.runAsync(
() -> testLogic.accept(scheduler, attemptIds),
singleThreadMainThreadExecutor);
runTestLogicFuture.get();
singleThreadMainThreadExecutor.execute(scheduler::cancel);
scheduler.getJobTerminationFuture().get();
return scheduler.requestJob().getExceptionHistory();
}
@Test
void testExceptionHistoryWithGlobalFailure() throws Exception {
final Exception expectedException = new Exception("Expected Global Exception");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> scheduler.handleGlobalFailure(expectedException);
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getTaskManagerLocation()).isNull();
assertThat(failure.getFailingTaskName()).isNull();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException);
}
/** Verify AdaptiveScheduler propagates failure labels as generated by Failure Enrichers. */
@Test
void testExceptionHistoryWithTaskFailureLabels() throws Exception {
final Exception taskException = new Exception("Task Exception");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.get(1);
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, taskException)));
};
final TestingFailureEnricher failureEnricher = new TestingFailureEnricher();
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withFailureEnrichers(Collections.singletonList(failureEnricher))
.withTestLogic(testLogic)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getException().deserializeError(classLoader)).isEqualTo(taskException);
assertThat(failure.getFailureLabels()).isEqualTo(failureEnricher.getFailureLabels());
}
@Test
@Test
void testExceptionHistoryWithTaskFailureWithRestart() throws Exception {
final Exception expectedException = new Exception("Expected Local Exception");
final Consumer<AdaptiveSchedulerBuilder> setupScheduler =
builder ->
builder.setRestartBackoffTimeStrategy(
new FixedDelayRestartBackoffTimeStrategy
.FixedDelayRestartBackoffTimeStrategyFactory(1, 100)
.create());
final BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.get(1);
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, expectedException)));
};
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.withModifiedScheduler(setupScheduler)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException);
}
@Test
void testExceptionHistoryWithTaskFailureFromStopWithSavepoint() throws Exception {
final Exception expectedException = new Exception("Expected Local Exception");
Consumer<JobGraph> setupJobGraph =
jobGraph ->
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
CheckpointCoordinatorConfiguration.builder()
.setCheckpointInterval(Long.MAX_VALUE)
.build(),
null));
final CompletedCheckpointStore completedCheckpointStore =
new StandaloneCompletedCheckpointStore(1);
final CheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter();
final CheckpointsCleaner checkpointCleaner = new CheckpointsCleaner();
TestingCheckpointRecoveryFactory checkpointRecoveryFactory =
new TestingCheckpointRecoveryFactory(completedCheckpointStore, checkpointIDCounter);
Consumer<AdaptiveSchedulerBuilder> setupScheduler =
builder ->
builder.setCheckpointRecoveryFactory(checkpointRecoveryFactory)
.setCheckpointCleaner(checkpointCleaner);
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.get(1);
scheduler.stopWithSavepoint(
"file:
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, expectedException)));
for (ExecutionAttemptID id : attemptIds) {
scheduler.declineCheckpoint(
new DeclineCheckpoint(
scheduler.requestJob().getJobId(),
id,
checkpointIDCounter.get() - 1,
new CheckpointException(
CheckpointFailureReason.IO_EXCEPTION)));
}
};
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.withModifiedScheduler(setupScheduler)
.withModifiedJobGraph(setupJobGraph)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException);
}
@Test
void testExceptionHistoryWithTaskConcurrentGlobalFailure() throws Exception {
final Exception expectedException1 = new Exception("Expected Global Exception 1");
final Exception expectedException2 = new Exception("Expected Global Exception 2");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
scheduler.handleGlobalFailure(expectedException1);
scheduler.handleGlobalFailure(expectedException2);
};
final Iterable<RootExceptionHistoryEntry> entries = runExceptionHistoryTests(testLogic);
assertThat(entries).hasSize(1);
final RootExceptionHistoryEntry failure = entries.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException1);
final Iterable<ExceptionHistoryEntry> concurrentExceptions =
failure.getConcurrentExceptions();
final List<Throwable> foundExceptions =
IterableUtils.toStream(concurrentExceptions)
.map(ExceptionHistoryEntry::getException)
.map(exception -> exception.deserializeError(classLoader))
.collect(Collectors.toList());
assertThat(foundExceptions).containsExactly(expectedException2);
}
@Test
void testExceptionHistoryWithTaskConcurrentFailure() throws Exception {
final Exception expectedException1 = new Exception("Expected Local Exception 1");
final Exception expectedException2 = new Exception("Expected Local Exception 2");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.remove(0);
final ExecutionAttemptID attemptId2 = attemptIds.remove(0);
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, expectedException1)));
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId2,
ExecutionState.FAILED,
expectedException2)));
};
final Iterable<RootExceptionHistoryEntry> entries =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.run();
assertThat(entries).hasSize(1);
final RootExceptionHistoryEntry failure = entries.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException1);
final Iterable<ExceptionHistoryEntry> concurrentExceptions =
failure.getConcurrentExceptions();
final List<Throwable> foundExceptions =
IterableUtils.toStream(concurrentExceptions)
.map(ExceptionHistoryEntry::getException)
.map(exception -> exception.deserializeError(classLoader))
.collect(Collectors.toList());
assertThat(foundExceptions).isEmpty();
}
@Test
void testRepeatedTransitionIntoCurrentStateFails() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
final State state = scheduler.getState();
assertThat(state).isInstanceOf(Created.class);
assertThatThrownBy(() -> scheduler.transitionToState(new Created.Factory(scheduler, LOG)))
.isInstanceOf(IllegalStateException.class);
}
@Test
void testTriggerSavepointFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatFuture(
scheduler.triggerSavepoint(
"some directory", false, SavepointFormatType.CANONICAL))
.eventuallyFailsWith(ExecutionException.class)
.withCauseInstanceOf(CheckpointException.class);
}
@Test
void testStopWithSavepointFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatFuture(
scheduler.triggerSavepoint(
"some directory", false, SavepointFormatType.CANONICAL))
.eventuallyFailsWith(ExecutionException.class)
.withCauseInstanceOf(CheckpointException.class);
}
@Test
void testDeliverOperatorEventToCoordinatorFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatThrownBy(
() ->
scheduler.deliverOperatorEventToCoordinator(
createExecutionAttemptId(),
new OperatorID(),
new TestOperatorEvent()))
.isInstanceOf(TaskNotRunningException.class);
}
@Test
void testDeliverCoordinationRequestToCoordinatorFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatFuture(
scheduler.deliverCoordinationRequestToCoordinator(
new OperatorID(), new CoordinationRequest() {}))
.eventuallyFailsWith(ExecutionException.class)
.withCauseInstanceOf(FlinkException.class);
}
@Test
void testUpdateTaskExecutionStateReturnsFalseInIllegalState() throws Exception {
final JobGraph jobGraph = createJobGraph();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
createExecutionAttemptId(),
ExecutionState.FAILED))))
.isFalse();
}
@Test
void testRequestNextInputSplitFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatThrownBy(
() ->
scheduler.requestNextInputSplit(
JOB_VERTEX.getID(), createExecutionAttemptId()))
.isInstanceOf(IOException.class);
}
@Test
public void testRequestPartitionStateFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatThrownBy(
() ->
scheduler.requestPartitionState(
new IntermediateDataSetID(), new ResultPartitionID()))
.isInstanceOf(PartitionProducerDisposedException.class);
}
@Test
void testTryToAssignSlotsReturnsNotPossibleIfExpectedResourcesAreNotAvailable()
throws Exception {
final TestingSlotAllocator slotAllocator =
TestingSlotAllocator.newBuilder()
.setTryReserveResourcesFunction(ignored -> Optional.empty())
.build();
final AdaptiveScheduler adaptiveScheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setSlotAllocator(slotAllocator)
.build(EXECUTOR_RESOURCE.getExecutor());
final CreatingExecutionGraph.AssignmentResult assignmentResult =
adaptiveScheduler.tryToAssignSlots(
CreatingExecutionGraph.ExecutionGraphWithVertexParallelism.create(
new StateTrackingMockExecutionGraph(), JobSchedulingPlan.empty()));
assertThat(assignmentResult.isSuccess()).isFalse();
}
@Test
void testComputeVertexParallelismStoreForExecutionInReactiveMode() {
JobVertex v1 = createNoOpVertex("v1", 1, 50);
JobVertex v2 = createNoOpVertex("v2", 50, 50);
JobGraph graph = streamingJobGraph(v1, v2);
VertexParallelismStore parallelismStore =
AdaptiveScheduler.computeVertexParallelismStoreForExecution(
graph,
SchedulerExecutionMode.REACTIVE,
SchedulerBase::getDefaultMaxParallelism);
for (JobVertex vertex : graph.getVertices()) {
VertexParallelismInformation info = parallelismStore.getParallelismInfo(vertex.getID());
assertThat(info.getParallelism()).isEqualTo(vertex.getParallelism());
assertThat(info.getMaxParallelism()).isEqualTo(vertex.getMaxParallelism());
}
}
@Test
void testComputeVertexParallelismStoreForExecutionInDefaultMode() {
JobVertex v1 = createNoOpVertex("v1", 1, 50);
JobVertex v2 = createNoOpVertex("v2", 50, 50);
JobGraph graph = streamingJobGraph(v1, v2);
VertexParallelismStore parallelismStore =
AdaptiveScheduler.computeVertexParallelismStoreForExecution(
graph, null, SchedulerBase::getDefaultMaxParallelism);
for (JobVertex vertex : graph.getVertices()) {
VertexParallelismInformation info = parallelismStore.getParallelismInfo(vertex.getID());
assertThat(info.getParallelism()).isEqualTo(vertex.getParallelism());
assertThat(info.getMaxParallelism()).isEqualTo(vertex.getMaxParallelism());
}
}
@Test
void testCheckpointCleanerIsClosedAfterCheckpointServices() throws Exception {
final ScheduledExecutorService executorService =
Executors.newSingleThreadScheduledExecutor();
try {
DefaultSchedulerTest.doTestCheckpointCleanerIsClosedAfterCheckpointServices(
(checkpointRecoveryFactory, checkpointCleaner) -> {
final JobGraph jobGraph = createJobGraph();
enableCheckpointing(jobGraph);
try {
return new AdaptiveSchedulerBuilder(
jobGraph,
ComponentMainThreadExecutorServiceAdapter
.forSingleThreadExecutor(executorService))
.setCheckpointRecoveryFactory(checkpointRecoveryFactory)
.setCheckpointCleaner(checkpointCleaner)
.build(EXECUTOR_RESOURCE.getExecutor());
} catch (Exception e) {
throw new RuntimeException(e);
}
},
executorService,
LOG);
} finally {
executorService.shutdownNow();
}
}
@Test
void testIdleSlotsAreReleasedAfterDownScalingTriggeredByLoweredResourceRequirements()
throws Exception {
final JobGraph jobGraph = createJobGraph();
final Duration slotIdleTimeout = Duration.ofMillis(10);
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.SLOT_IDLE_TIMEOUT, slotIdleTimeout.toMillis());
final DeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID(), slotIdleTimeout);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
try {
final int numInitialSlots = 4;
final int numSlotsAfterDownscaling = 2;
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(numInitialSlots);
taskManagerGateway.setCancelConsumer(createCancelConsumer(scheduler));
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, numInitialSlots)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(numInitialSlots);
singleThreadMainThreadExecutor.execute(
() ->
scheduler.updateJobResourceRequirements(
JobResourceRequirements.newBuilder()
.setParallelismForJobVertex(
JOB_VERTEX.getID(), 1, numSlotsAfterDownscaling)
.build()));
taskManagerGateway.waitForSubmissions(numSlotsAfterDownscaling);
taskManagerGateway.waitForFreedSlots(numInitialSlots - numSlotsAfterDownscaling);
final CompletableFuture<JobStatus> jobStatusFuture = new CompletableFuture<>();
singleThreadMainThreadExecutor.execute(
() -> jobStatusFuture.complete(scheduler.getState().getJobStatus()));
assertThatFuture(jobStatusFuture).eventuallySucceeds().isEqualTo(JobStatus.RUNNING);
assertThat(taskManagerGateway.freedSlots).isEmpty();
} finally {
final CompletableFuture<Void> closeFuture = new CompletableFuture<>();
singleThreadMainThreadExecutor.execute(
() -> FutureUtils.forward(scheduler.closeAsync(), closeFuture));
assertThatFuture(closeFuture).eventuallySucceeds();
}
}
@Test
public void testUpdateResourceRequirementsInReactiveModeIsNotSupported() throws Exception {
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.SCHEDULER_MODE, SchedulerExecutionMode.REACTIVE);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatThrownBy(
() ->
scheduler.updateJobResourceRequirements(
JobResourceRequirements.empty()))
.isInstanceOf(UnsupportedOperationException.class);
}
@Test
public void testRequestDefaultResourceRequirements() throws Exception {
final JobGraph jobGraph = createJobGraph();
final Configuration configuration = new Configuration();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.requestJobResourceRequirements())
.isEqualTo(
JobResourceRequirements.newBuilder()
.setParallelismForJobVertex(
JOB_VERTEX.getID(), 1, JOB_VERTEX.getParallelism())
.build());
}
@Test
public void testRequestDefaultResourceRequirementsInReactiveMode() throws Exception {
final JobGraph jobGraph = createJobGraph();
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.SCHEDULER_MODE, SchedulerExecutionMode.REACTIVE);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.requestJobResourceRequirements())
.isEqualTo(
JobResourceRequirements.newBuilder()
.setParallelismForJobVertex(
JOB_VERTEX.getID(),
1,
SchedulerBase.getDefaultMaxParallelism(JOB_VERTEX))
.build());
}
@Test
public void testRequestUpdatedResourceRequirements() throws Exception {
final JobGraph jobGraph = createJobGraph();
final Configuration configuration = new Configuration();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final JobResourceRequirements newJobResourceRequirements =
JobResourceRequirements.newBuilder()
.setParallelismForJobVertex(JOB_VERTEX.getID(), 1, 12)
.build();
assertThat(scheduler.requestJobResourceRequirements())
.isNotEqualTo(newJobResourceRequirements);
scheduler.updateJobResourceRequirements(newJobResourceRequirements);
assertThat(scheduler.requestJobResourceRequirements())
.isEqualTo(newJobResourceRequirements);
}
private CompletableFuture<ArchivedExecutionGraph> getArchivedExecutionGraphForRunningJob(
SchedulerNG scheduler) {
return CompletableFuture.supplyAsync(
() -> {
ArchivedExecutionGraph graph = null;
while (graph == null || graph.getState() != JobStatus.RUNNING) {
graph = scheduler.requestJob().getArchivedExecutionGraph();
}
return graph;
},
singleThreadMainThreadExecutor);
}
private Consumer<ExecutionAttemptID> createCancelConsumer(SchedulerNG scheduler) {
return executionAttemptId ->
singleThreadMainThreadExecutor.execute(
() ->
scheduler.updateTaskExecutionState(
new TaskExecutionState(
executionAttemptId, ExecutionState.CANCELED)));
}
private static DefaultDeclarativeSlotPool createDeclarativeSlotPool(JobID jobId) {
return createDeclarativeSlotPool(jobId, DEFAULT_TIMEOUT);
}
private static DefaultDeclarativeSlotPool createDeclarativeSlotPool(
JobID jobId, Duration idleSlotTimeout) {
return new DefaultDeclarativeSlotPool(
jobId,
new DefaultAllocatedSlotPool(),
ignored -> {},
Time.fromDuration(idleSlotTimeout),
Time.fromDuration(DEFAULT_TIMEOUT));
}
private static JobGraph createJobGraph() {
return streamingJobGraph(JOB_VERTEX);
}
private static class LifecycleMethodCapturingState extends DummyState {
boolean onLeaveCalled = false;
@Nullable Class<? extends State> onLeaveNewStateArgument = null;
void reset() {
onLeaveCalled = false;
onLeaveNewStateArgument = null;
}
@Override
public void onLeave(Class<? extends State> newState) {
onLeaveCalled = true;
onLeaveNewStateArgument = newState;
}
}
/**
* A {@link SimpleAckingTaskManagerGateway} that buffers all the task submissions into a
* blocking queue, allowing one to wait for an arbitrary number of submissions.
*/
public static class SubmissionBufferingTaskManagerGateway
extends SimpleAckingTaskManagerGateway {
final BlockingQueue<TaskDeploymentDescriptor> submittedTasks;
final BlockingQueue<AllocationID> freedSlots;
public SubmissionBufferingTaskManagerGateway(int capacity) {
submittedTasks = new ArrayBlockingQueue<>(capacity);
freedSlots = new ArrayBlockingQueue<>(capacity);
initializeFunctions();
}
@Override
public void setSubmitConsumer(Consumer<TaskDeploymentDescriptor> submitConsumer) {
super.setSubmitConsumer(
taskDeploymentDescriptor -> {
Preconditions.checkState(submittedTasks.offer(taskDeploymentDescriptor));
submitConsumer.accept(taskDeploymentDescriptor);
});
}
@Override
public void setFreeSlotFunction(
BiFunction<AllocationID, Throwable, CompletableFuture<Acknowledge>>
freeSlotFunction) {
super.setFreeSlotFunction(
(allocationID, throwable) -> {
Preconditions.checkState(freedSlots.offer(allocationID));
return freeSlotFunction.apply(allocationID, throwable);
});
}
/**
* Block until an arbitrary number of submissions have been received.
*
* @param numSubmissions The number of submissions to wait for
* @return the list of the waited-for submissions
* @throws InterruptedException if a timeout is exceeded waiting for a submission
*/
public List<TaskDeploymentDescriptor> waitForSubmissions(int numSubmissions)
throws InterruptedException {
List<TaskDeploymentDescriptor> descriptors = new ArrayList<>();
for (int i = 0; i < numSubmissions; i++) {
descriptors.add(submittedTasks.take());
}
return descriptors;
}
public List<AllocationID> waitForFreedSlots(int numFreedSlots) throws InterruptedException {
final List<AllocationID> allocationIds = new ArrayList<>();
for (int i = 0; i < numFreedSlots; i++) {
allocationIds.add(freedSlots.take());
}
return allocationIds;
}
private void initializeFunctions() {
setSubmitConsumer(ignored -> {});
setFreeSlotFunction(
(allocationId, throwable) ->
CompletableFuture.completedFuture(Acknowledge.get()));
}
}
private static class StateInstanceFactory
implements StateFactory<LifecycleMethodCapturingState> {
private final LifecycleMethodCapturingState instance;
public StateInstanceFactory(LifecycleMethodCapturingState instance) {
this.instance = instance;
}
@Override
public Class<LifecycleMethodCapturingState> getStateClass() {
return LifecycleMethodCapturingState.class;
}
@Override
public LifecycleMethodCapturingState getState() {
return instance;
}
}
static class DummyState implements State {
private final JobStatus jobStatus;
public DummyState() {
this(JobStatus.RUNNING);
}
public DummyState(JobStatus jobStatus) {
this.jobStatus = jobStatus;
}
@Override
public void cancel() {}
@Override
public void suspend(Throwable cause) {}
@Override
public JobStatus getJobStatus() {
return jobStatus;
}
@Override
public ArchivedExecutionGraph getJob() {
return null;
}
@Override
public void handleGlobalFailure(Throwable cause) {}
@Override
public Logger getLogger() {
return null;
}
private static class Factory implements StateFactory<DummyState> {
private final JobStatus jobStatus;
public Factory() {
this(JobStatus.RUNNING);
}
public Factory(JobStatus jobStatus) {
this.jobStatus = jobStatus;
}
@Override
public Class<DummyState> getStateClass() {
return DummyState.class;
}
@Override
public DummyState getState() {
return new DummyState(jobStatus);
}
}
}
private static class ExceptionHistoryTester {
private final ComponentMainThreadExecutor mainThreadExecutor;
private BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attempts) -> {};
private Consumer<AdaptiveSchedulerBuilder> schedulerModifier = ignored -> {};
private Consumer<JobGraph> jobGraphModifier = ignored -> {};
private Collection<FailureEnricher> failureEnrichers = Collections.emptySet();
ExceptionHistoryTester(ComponentMainThreadExecutor mainThreadExecutor) {
this.mainThreadExecutor = mainThreadExecutor;
}
ExceptionHistoryTester withTestLogic(
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic) {
this.testLogic = testLogic;
return this;
}
ExceptionHistoryTester withModifiedScheduler(
Consumer<AdaptiveSchedulerBuilder> schedulerModifier) {
this.schedulerModifier = schedulerModifier;
return this;
}
ExceptionHistoryTester withModifiedJobGraph(Consumer<JobGraph> jobGraphModifier) {
this.jobGraphModifier = jobGraphModifier;
return this;
}
ExceptionHistoryTester withFailureEnrichers(Collection<FailureEnricher> failureEnrichers) {
this.failureEnrichers = failureEnrichers;
return this;
}
Iterable<RootExceptionHistoryEntry> run() throws Exception {
final JobGraph jobGraph = createJobGraph();
jobGraphModifier.accept(jobGraph);
final CompletedCheckpointStore completedCheckpointStore =
new StandaloneCompletedCheckpointStore(1);
final CheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter();
final CheckpointsCleaner checkpointCleaner = new CheckpointsCleaner();
TestingCheckpointRecoveryFactory checkpointRecoveryFactory =
new TestingCheckpointRecoveryFactory(
completedCheckpointStore, checkpointIDCounter);
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
AdaptiveSchedulerBuilder builder =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.setDeclarativeSlotPool(declarativeSlotPool)
.setCheckpointRecoveryFactory(checkpointRecoveryFactory)
.setCheckpointCleaner(checkpointCleaner)
.setFailureEnrichers(failureEnrichers);
schedulerModifier.accept(builder);
final AdaptiveScheduler scheduler = builder.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(PARALLELISM);
taskManagerGateway.setCancelConsumer(
attemptId ->
mainThreadExecutor.execute(
() ->
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId,
ExecutionState.CANCELED,
null)))));
mainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(PARALLELISM);
CompletableFuture<Iterable<ArchivedExecutionVertex>> vertexFuture =
new CompletableFuture<>();
mainThreadExecutor.execute(
() ->
vertexFuture.complete(
scheduler
.requestJob()
.getArchivedExecutionGraph()
.getAllExecutionVertices()));
final Iterable<ArchivedExecutionVertex> executionVertices = vertexFuture.get();
final List<ExecutionAttemptID> attemptIds =
IterableUtils.toStream(executionVertices)
.map(ArchivedExecutionVertex::getCurrentExecutionAttempt)
.map(ArchivedExecution::getAttemptId)
.collect(Collectors.toList());
CompletableFuture<Void> runTestLogicFuture =
CompletableFuture.runAsync(
() -> testLogic.accept(scheduler, attemptIds), mainThreadExecutor);
runTestLogicFuture.get();
mainThreadExecutor.execute(scheduler::cancel);
scheduler.getJobTerminationFuture().get();
return scheduler.requestJob().getExceptionHistory();
}
}
}
|
class AdaptiveSchedulerTest {
private static final Duration DEFAULT_TIMEOUT = Duration.ofHours(1);
private static final int PARALLELISM = 4;
private static final JobVertex JOB_VERTEX = createNoOpVertex("v1", PARALLELISM);
private static final Logger LOG = LoggerFactory.getLogger(AdaptiveSchedulerTest.class);
@RegisterExtension
public static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_RESOURCE =
TestingUtils.defaultExecutorExtension();
@RegisterExtension
public static final TestExecutorExtension<ScheduledExecutorService> TEST_EXECUTOR_RESOURCE =
new TestExecutorExtension<>(Executors::newSingleThreadScheduledExecutor);
private final ManuallyTriggeredComponentMainThreadExecutor mainThreadExecutor =
new ManuallyTriggeredComponentMainThreadExecutor(Thread.currentThread());
private final ComponentMainThreadExecutor singleThreadMainThreadExecutor =
ComponentMainThreadExecutorServiceAdapter.forSingleThreadExecutor(
TEST_EXECUTOR_RESOURCE.getExecutor());
private final ClassLoader classLoader = ClassLoader.getSystemClassLoader();
@Test
void testInitialState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.getState()).isInstanceOf(Created.class);
}
@Test
void testArchivedCheckpointingSettingsNotNullIfCheckpointingIsEnabled() throws Exception {
final JobGraph jobGraph = createJobGraph();
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
CheckpointCoordinatorConfiguration.builder().build(), null));
final ArchivedExecutionGraph archivedExecutionGraph =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor())
.getArchivedExecutionGraph(JobStatus.INITIALIZING, null);
ArchivedExecutionGraphTest.assertContainsCheckpointSettings(archivedExecutionGraph);
}
@Test
void testArchivedJobVerticesPresent() throws Exception {
final JobGraph jobGraph = createJobGraph();
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
CheckpointCoordinatorConfiguration.builder().build(), null));
final ArchivedExecutionGraph archivedExecutionGraph =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor())
.getArchivedExecutionGraph(JobStatus.INITIALIZING, null);
ArchivedExecutionJobVertex jobVertex =
archivedExecutionGraph.getJobVertex(JOB_VERTEX.getID());
assertThat(jobVertex)
.isNotNull()
.satisfies(
archived -> {
assertThat(archived.getParallelism())
.isEqualTo(JOB_VERTEX.getParallelism());
assertThat(archived.getMaxParallelism()).isEqualTo(128);
});
ArchivedExecutionGraphTest.assertContainsCheckpointSettings(archivedExecutionGraph);
}
@Test
void testIsState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
final State state = scheduler.getState();
assertThat(scheduler.isState(state)).isTrue();
assertThat(scheduler.isState(new DummyState())).isFalse();
}
@Test
void testRunIfState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
AtomicBoolean ran = new AtomicBoolean(false);
scheduler.runIfState(scheduler.getState(), () -> ran.set(true));
assertThat(ran.get()).isTrue();
}
@Test
void testRunIfStateWithStateMismatch() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
AtomicBoolean ran = new AtomicBoolean(false);
scheduler.runIfState(new DummyState(), () -> ran.set(true));
assertThat(ran.get()).isFalse();
}
@Test
void testHasEnoughResourcesReturnsFalseIfUnsatisfied() {
final ResourceCounter resourceRequirement =
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1);
assertThat(
AdaptiveScheduler.hasDesiredResources(
resourceRequirement, Collections.emptyList()))
.isFalse();
}
@Test
void testHasEnoughResourcesReturnsTrueIfSatisfied() {
final ResourceCounter resourceRequirement =
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1);
final Collection<TestSlotInfo> freeSlots =
createSlotInfosForResourceRequirements(resourceRequirement);
assertThat(AdaptiveScheduler.hasDesiredResources(resourceRequirement, freeSlots)).isTrue();
}
private Collection<TestSlotInfo> createSlotInfosForResourceRequirements(
ResourceCounter resourceRequirements) {
final Collection<TestSlotInfo> slotInfos = new ArrayList<>();
for (Map.Entry<ResourceProfile, Integer> resourceProfileCount :
resourceRequirements.getResourcesWithCount()) {
for (int i = 0; i < resourceProfileCount.getValue(); i++) {
slotInfos.add(new TestSlotInfo(resourceProfileCount.getKey()));
}
}
return slotInfos;
}
@Test
void testHasEnoughResourcesUsesUnmatchedSlotsAsUnknown() {
final int numRequiredSlots = 1;
final ResourceCounter requiredResources =
ResourceCounter.withResource(ResourceProfile.UNKNOWN, numRequiredSlots);
final ResourceCounter providedResources =
ResourceCounter.withResource(
ResourceProfile.newBuilder().setCpuCores(1).build(), numRequiredSlots);
final Collection<TestSlotInfo> freeSlots =
createSlotInfosForResourceRequirements(providedResources);
assertThat(AdaptiveScheduler.hasDesiredResources(requiredResources, freeSlots)).isTrue();
}
@Test
void testExecutionGraphGenerationWithAvailableResources() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final int numAvailableSlots = 2;
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(numAvailableSlots);
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, numAvailableSlots)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(numAvailableSlots);
final ArchivedExecutionGraph executionGraph =
CompletableFuture.supplyAsync(
() -> scheduler.requestJob().getArchivedExecutionGraph(),
singleThreadMainThreadExecutor)
.join();
assertThat(executionGraph.getJobVertex(JOB_VERTEX.getID()).getParallelism())
.isEqualTo(numAvailableSlots);
assertThat(
JacksonMapperFactory.createObjectMapper()
.readTree(executionGraph.getJsonPlan())
.get("nodes")
.size())
.isEqualTo(1);
}
@Test
void testExecutionGraphGenerationSetsInitializationTimestamp() throws Exception {
final long initializationTimestamp = 42L;
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler adaptiveScheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setInitializationTimestamp(initializationTimestamp)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(PARALLELISM);
singleThreadMainThreadExecutor.execute(
() -> {
adaptiveScheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(1);
final ArchivedExecutionGraph executionGraph =
CompletableFuture.supplyAsync(
() -> adaptiveScheduler.requestJob().getArchivedExecutionGraph(),
singleThreadMainThreadExecutor)
.join();
assertThat(executionGraph.getStatusTimestamp(JobStatus.INITIALIZING))
.isEqualTo(initializationTimestamp);
}
@Test
void testInitializationTimestampForwarding() throws Exception {
final long expectedInitializationTimestamp = 42L;
final AdaptiveScheduler adaptiveScheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setInitializationTimestamp(expectedInitializationTimestamp)
.build(EXECUTOR_RESOURCE.getExecutor());
final long initializationTimestamp =
adaptiveScheduler
.requestJob()
.getArchivedExecutionGraph()
.getStatusTimestamp(JobStatus.INITIALIZING);
assertThat(initializationTimestamp).isEqualTo(expectedInitializationTimestamp);
}
@Test
void testFatalErrorsForwardedToFatalErrorHandler() throws Exception {
final TestingFatalErrorHandler fatalErrorHandler = new TestingFatalErrorHandler();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setFatalErrorHandler(fatalErrorHandler)
.build(EXECUTOR_RESOURCE.getExecutor());
final RuntimeException exception = new RuntimeException();
scheduler.runIfState(
scheduler.getState(),
() -> {
throw exception;
});
assertThat(fatalErrorHandler.getException()).isEqualTo(exception);
}
@Test
void testResourceTimeout() throws Exception {
final ManuallyTriggeredComponentMainThreadExecutor mainThreadExecutor =
new ManuallyTriggeredComponentMainThreadExecutor(Thread.currentThread());
final Duration resourceTimeout = Duration.ofMinutes(1234);
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, resourceTimeout);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
scheduler.startScheduling();
final boolean b =
mainThreadExecutor.getActiveNonPeriodicScheduledTask().stream()
.anyMatch(
scheduledTask ->
scheduledTask.getDelay(TimeUnit.MINUTES)
== resourceTimeout.toMinutes());
assertThat(b).isTrue();
}
@Test
void testNumRestartsMetric() throws Exception {
final CompletableFuture<Gauge<Long>> numRestartsMetricFuture = new CompletableFuture<>();
final MetricRegistry metricRegistry =
TestingMetricRegistry.builder()
.setRegisterConsumer(
(metric, name, group) -> {
if (MetricNames.NUM_RESTARTS.equals(name)) {
numRestartsMetricFuture.complete((Gauge<Long>) metric);
}
})
.build();
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
new DefaultDeclarativeSlotPool(
jobGraph.getJobID(),
new DefaultAllocatedSlotPool(),
ignored -> {},
Time.minutes(10),
Time.minutes(10));
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.MIN_PARALLELISM_INCREASE, 1);
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setJobMasterConfiguration(configuration)
.setJobManagerJobMetricGroup(
JobManagerMetricGroup.createJobManagerMetricGroup(
metricRegistry, "localhost")
.addJob(new JobID(), "jobName"))
.setDeclarativeSlotPool(declarativeSlotPool)
.build(EXECUTOR_RESOURCE.getExecutor());
final Gauge<Long> numRestartsMetric = numRestartsMetricFuture.get();
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(1 + PARALLELISM);
taskManagerGateway.setCancelConsumer(createCancelConsumer(scheduler));
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
declarativeSlotPool.offerSlots(
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
new LocalTaskManagerLocation(),
taskManagerGateway,
System.currentTimeMillis());
});
taskManagerGateway.waitForSubmissions(1);
assertThat(numRestartsMetric.getValue()).isEqualTo(0L);
singleThreadMainThreadExecutor.execute(
() -> {
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(PARALLELISM);
assertThat(numRestartsMetric.getValue()).isEqualTo(1L);
}
@Test
void testStatusMetrics() throws Exception {
final CompletableFuture<UpTimeGauge> upTimeMetricFuture = new CompletableFuture<>();
final CompletableFuture<DownTimeGauge> downTimeMetricFuture = new CompletableFuture<>();
final CompletableFuture<Gauge<Long>> restartTimeMetricFuture = new CompletableFuture<>();
final MetricRegistry metricRegistry =
TestingMetricRegistry.builder()
.setRegisterConsumer(
(metric, name, group) -> {
switch (name) {
case UpTimeGauge.METRIC_NAME:
upTimeMetricFuture.complete((UpTimeGauge) metric);
break;
case DownTimeGauge.METRIC_NAME:
downTimeMetricFuture.complete((DownTimeGauge) metric);
break;
case "restartingTimeTotal":
restartTimeMetricFuture.complete((Gauge<Long>) metric);
break;
}
})
.build();
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.MIN_PARALLELISM_INCREASE, 1);
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(10L));
configuration.set(
MetricOptions.JOB_STATUS_METRICS,
Arrays.asList(MetricOptions.JobStatusMetrics.TOTAL_TIME));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setJobMasterConfiguration(configuration)
.setJobManagerJobMetricGroup(
JobManagerMetricGroup.createJobManagerMetricGroup(
metricRegistry, "localhost")
.addJob(new JobID(), "jobName"))
.setDeclarativeSlotPool(declarativeSlotPool)
.build(EXECUTOR_RESOURCE.getExecutor());
final UpTimeGauge upTimeGauge = upTimeMetricFuture.get();
final DownTimeGauge downTimeGauge = downTimeMetricFuture.get();
final Gauge<Long> restartTimeGauge = restartTimeMetricFuture.get();
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(1 + PARALLELISM);
taskManagerGateway.setCancelConsumer(createCancelConsumer(scheduler));
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(1);
CommonTestUtils.waitUntilCondition(() -> upTimeGauge.getValue() > 0L);
assertThat(downTimeGauge.getValue()).isEqualTo(0L);
assertThat(restartTimeGauge.getValue()).isEqualTo(0L);
singleThreadMainThreadExecutor.execute(
() -> {
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(2);
CommonTestUtils.waitUntilCondition(() -> upTimeGauge.getValue() > 0L);
assertThat(downTimeGauge.getValue()).isEqualTo(0L);
assertThat(restartTimeGauge.getValue()).isGreaterThanOrEqualTo(0L);
}
@Test
void testStartSchedulingTransitionsToWaitingForResources() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
scheduler.startScheduling();
assertThat(scheduler.getState()).isInstanceOf(WaitingForResources.class);
}
@Test
void testStartSchedulingSetsResourceRequirementsForDefaultMode() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.build(EXECUTOR_RESOURCE.getExecutor());
scheduler.startScheduling();
assertThat(declarativeSlotPool.getResourceRequirements())
.contains(ResourceRequirement.create(ResourceProfile.UNKNOWN, PARALLELISM));
}
@Test
void testStartSchedulingSetsResourceRequirementsForReactiveMode() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.SCHEDULER_MODE, SchedulerExecutionMode.REACTIVE);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
scheduler.startScheduling();
final int expectedParallelism =
KeyGroupRangeAssignment.computeDefaultMaxParallelism(PARALLELISM);
assertThat(declarativeSlotPool.getResourceRequirements())
.contains(ResourceRequirement.create(ResourceProfile.UNKNOWN, expectedParallelism));
}
/** Tests that the listener for new slots is properly set up. */
@Test
void testResourceAcquisitionTriggersJobExecution() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(PARALLELISM);
CompletableFuture<State> startingStateFuture = new CompletableFuture<>();
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
startingStateFuture.complete(scheduler.getState());
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
assertThat(startingStateFuture.get()).isInstanceOf(WaitingForResources.class);
taskManagerGateway.waitForSubmissions(PARALLELISM);
final ArchivedExecutionGraph executionGraph =
CompletableFuture.supplyAsync(
() -> scheduler.requestJob().getArchivedExecutionGraph(),
singleThreadMainThreadExecutor)
.get();
assertThat(executionGraph.getJobVertex(JOB_VERTEX.getID()).getParallelism())
.isEqualTo(PARALLELISM);
}
@Test
void testGoToFinished() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
final ArchivedExecutionGraph archivedExecutionGraph =
new ArchivedExecutionGraphBuilder().setState(JobStatus.FAILED).build();
scheduler.goToFinished(archivedExecutionGraph);
assertThat(scheduler.getState()).isInstanceOf(Finished.class);
}
@Test
void testJobStatusListenerOnlyCalledIfJobStatusChanges() throws Exception {
final AtomicInteger numStatusUpdates = new AtomicInteger();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setJobStatusListener(
(jobId, newJobStatus, timestamp) ->
numStatusUpdates.incrementAndGet())
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.requestJobStatus())
.withFailMessage("Assumption about job status for Scheduler@Created is incorrect.")
.isEqualTo(JobStatus.INITIALIZING);
scheduler.transitionToState(new DummyState.Factory(JobStatus.INITIALIZING));
assertThat(numStatusUpdates.get()).isEqualTo(0);
}
@Test
void testJobStatusListenerNotifiedOfJobStatusChanges() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final CompletableFuture<Void> jobCreatedNotification = new CompletableFuture<>();
final CompletableFuture<Void> jobRunningNotification = new CompletableFuture<>();
final CompletableFuture<Void> jobFinishedNotification = new CompletableFuture<>();
final CompletableFuture<JobStatus> unexpectedJobStatusNotification =
new CompletableFuture<>();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setJobMasterConfiguration(configuration)
.setJobStatusListener(
(jobId, newJobStatus, timestamp) -> {
switch (newJobStatus) {
case CREATED:
jobCreatedNotification.complete(null);
break;
case RUNNING:
jobRunningNotification.complete(null);
break;
case FINISHED:
jobFinishedNotification.complete(null);
break;
default:
unexpectedJobStatusNotification.complete(newJobStatus);
}
})
.setDeclarativeSlotPool(declarativeSlotPool)
.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(1 + PARALLELISM);
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
taskManagerGateway);
});
final TaskDeploymentDescriptor submittedTask = taskManagerGateway.submittedTasks.take();
singleThreadMainThreadExecutor.execute(
() ->
scheduler.updateTaskExecutionState(
new TaskExecutionState(
submittedTask.getExecutionAttemptId(),
ExecutionState.FINISHED)));
jobCreatedNotification.get();
jobRunningNotification.get();
jobFinishedNotification.get();
assertThat(unexpectedJobStatusNotification.isDone()).isFalse();
}
@Test
void testCloseShutsDownCheckpointingComponents() throws Exception {
final CompletableFuture<JobStatus> completedCheckpointStoreShutdownFuture =
new CompletableFuture<>();
final CompletedCheckpointStore completedCheckpointStore =
TestingCompletedCheckpointStore
.createStoreWithShutdownCheckAndNoCompletedCheckpoints(
completedCheckpointStoreShutdownFuture);
final CompletableFuture<JobStatus> checkpointIdCounterShutdownFuture =
new CompletableFuture<>();
final CheckpointIDCounter checkpointIdCounter =
TestingCheckpointIDCounter.createStoreWithShutdownCheckAndNoStartAction(
checkpointIdCounterShutdownFuture);
final JobGraph jobGraph = createJobGraph();
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
CheckpointCoordinatorConfiguration.builder().build(), null));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setCheckpointRecoveryFactory(
new TestingCheckpointRecoveryFactory(
completedCheckpointStore, checkpointIdCounter))
.build(EXECUTOR_RESOURCE.getExecutor());
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
scheduler.handleGlobalFailure(new FlinkException("Test exception"));
scheduler.closeAsync();
});
assertThat(completedCheckpointStoreShutdownFuture.get()).isEqualTo(JobStatus.FAILED);
assertThat(checkpointIdCounterShutdownFuture.get()).isEqualTo(JobStatus.FAILED);
}
@Test
void testTransitionToStateCallsOnLeave() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
final LifecycleMethodCapturingState firstState = new LifecycleMethodCapturingState();
scheduler.transitionToState(new StateInstanceFactory(firstState));
firstState.reset();
scheduler.transitionToState(new DummyState.Factory());
assertThat(firstState.onLeaveCalled).isTrue();
assertThat(firstState.onLeaveNewStateArgument.equals(DummyState.class)).isTrue();
}
@Test
void testConsistentMaxParallelism() throws Exception {
final int parallelism = 240;
final int expectedMaxParallelism =
KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism);
final JobVertex vertex = createNoOpVertex(parallelism);
final JobGraph jobGraph = streamingJobGraph(vertex);
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(1 + parallelism);
taskManagerGateway.setCancelConsumer(createCancelConsumer(scheduler));
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(ResourceProfile.UNKNOWN, 1)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(1);
ArchivedExecutionGraph executionGraph =
getArchivedExecutionGraphForRunningJob(scheduler).get();
ArchivedExecutionJobVertex archivedVertex = executionGraph.getJobVertex(vertex.getID());
assertThat(archivedVertex.getParallelism()).isEqualTo(1);
assertThat(archivedVertex.getMaxParallelism()).isEqualTo(expectedMaxParallelism);
singleThreadMainThreadExecutor.execute(
() -> {
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, parallelism)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(parallelism);
ArchivedExecutionGraph resubmittedExecutionGraph =
getArchivedExecutionGraphForRunningJob(scheduler).get();
ArchivedExecutionJobVertex resubmittedArchivedVertex =
resubmittedExecutionGraph.getJobVertex(vertex.getID());
assertThat(resubmittedArchivedVertex.getParallelism()).isEqualTo(parallelism);
assertThat(resubmittedArchivedVertex.getMaxParallelism()).isEqualTo(expectedMaxParallelism);
}
@Test
void testRequirementIncreaseTriggersScaleUp() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final AdaptiveScheduler scheduler =
createSchedulerWithNoResourceWaitTimeout(jobGraph, declarativeSlotPool);
final int scaledUpParallelism = PARALLELISM * 2;
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
createSubmissionBufferingTaskManagerGateway(scaledUpParallelism, scheduler);
startJobWithSlotsMatchingParallelism(
scheduler, declarativeSlotPool, taskManagerGateway, PARALLELISM);
awaitJobReachingParallelism(taskManagerGateway, scheduler, PARALLELISM);
JobResourceRequirements newJobResourceRequirements =
createRequirementsWithUpperParallelism(scaledUpParallelism);
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.updateJobResourceRequirements(newJobResourceRequirements);
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
awaitJobReachingParallelism(taskManagerGateway, scheduler, scaledUpParallelism);
}
@Test
void testRequirementDecreaseTriggersScaleDown() throws Exception {
final JobGraph jobGraph = createJobGraph();
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final AdaptiveScheduler scheduler =
createSchedulerWithNoResourceWaitTimeout(jobGraph, declarativeSlotPool);
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
createSubmissionBufferingTaskManagerGateway(PARALLELISM, scheduler);
startJobWithSlotsMatchingParallelism(
scheduler, declarativeSlotPool, taskManagerGateway, PARALLELISM);
awaitJobReachingParallelism(taskManagerGateway, scheduler, PARALLELISM);
int scaledDownParallelism = PARALLELISM - 1;
JobResourceRequirements newJobResourceRequirements =
createRequirementsWithUpperParallelism(scaledDownParallelism);
singleThreadMainThreadExecutor.execute(
() -> scheduler.updateJobResourceRequirements(newJobResourceRequirements));
awaitJobReachingParallelism(taskManagerGateway, scheduler, scaledDownParallelism);
}
private AdaptiveScheduler createSchedulerWithNoResourceWaitTimeout(
JobGraph jobGraph, DeclarativeSlotPool declarativeSlotPool) throws Exception {
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
return new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
}
private SubmissionBufferingTaskManagerGateway createSubmissionBufferingTaskManagerGateway(
int parallelism, SchedulerNG scheduler) {
SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(parallelism);
taskManagerGateway.setCancelConsumer(
executionAttemptID ->
singleThreadMainThreadExecutor.execute(
() ->
scheduler.updateTaskExecutionState(
new TaskExecutionState(
executionAttemptID,
ExecutionState.CANCELED))));
return taskManagerGateway;
}
private void startJobWithSlotsMatchingParallelism(
SchedulerNG scheduler,
DeclarativeSlotPool declarativeSlotPool,
TaskManagerGateway taskManagerGateway,
int parallelism) {
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, parallelism)),
taskManagerGateway);
});
}
private void awaitJobReachingParallelism(
SubmissionBufferingTaskManagerGateway taskManagerGateway,
SchedulerNG scheduler,
int parallelism)
throws Exception {
taskManagerGateway.waitForSubmissions(parallelism);
final ArchivedExecutionGraph executionGraph =
CompletableFuture.supplyAsync(
() -> scheduler.requestJob().getArchivedExecutionGraph(),
singleThreadMainThreadExecutor)
.get();
assertThat(executionGraph.getJobVertex(JOB_VERTEX.getID()).getParallelism())
.isEqualTo(parallelism);
}
private static JobResourceRequirements createRequirementsWithUpperParallelism(int parallelism) {
return new JobResourceRequirements(
Collections.singletonMap(
JOB_VERTEX.getID(),
new JobVertexResourceRequirements(
new JobVertexResourceRequirements.Parallelism(1, parallelism))));
}
@Test
void testHowToHandleFailureRejectedByStrategy() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setRestartBackoffTimeStrategy(NoRestartBackoffTimeStrategy.INSTANCE)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.howToHandleFailure(new Exception("test")).canRestart()).isFalse();
}
@Test
void testHowToHandleFailureAllowedByStrategy() throws Exception {
final TestRestartBackoffTimeStrategy restartBackoffTimeStrategy =
new TestRestartBackoffTimeStrategy(true, 1234);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setRestartBackoffTimeStrategy(restartBackoffTimeStrategy)
.build(EXECUTOR_RESOURCE.getExecutor());
final FailureResult failureResult = scheduler.howToHandleFailure(new Exception("test"));
assertThat(failureResult.canRestart()).isTrue();
assertThat(failureResult.getBackoffTime().toMillis())
.isEqualTo(restartBackoffTimeStrategy.getBackoffTime());
}
@Test
void testHowToHandleFailureUnrecoverableFailure() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(
scheduler
.howToHandleFailure(
new SuppressRestartsException(new Exception("test")))
.canRestart())
.isFalse();
}
@Test
void testExceptionHistoryWithGlobalFailure() throws Exception {
final Exception expectedException = new Exception("Expected Global Exception");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> scheduler.handleGlobalFailure(expectedException);
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getTaskManagerLocation()).isNull();
assertThat(failure.getFailingTaskName()).isNull();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException);
}
/** Verify AdaptiveScheduler propagates failure labels as generated by Failure Enrichers. */
@Test
void testExceptionHistoryWithTaskFailureLabels() throws Exception {
final Exception taskException = new Exception("Task Exception");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.get(1);
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, taskException)));
};
final TestingFailureEnricher failureEnricher = new TestingFailureEnricher();
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withFailureEnrichers(Collections.singletonList(failureEnricher))
.withTestLogic(testLogic)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getException().deserializeError(classLoader)).isEqualTo(taskException);
assertThat(failure.getFailureLabels()).isEqualTo(failureEnricher.getFailureLabels());
}
@Test
@Test
void testExceptionHistoryWithTaskFailureWithRestart() throws Exception {
final Exception expectedException = new Exception("Expected Local Exception");
final Consumer<AdaptiveSchedulerBuilder> setupScheduler =
builder ->
builder.setRestartBackoffTimeStrategy(
new FixedDelayRestartBackoffTimeStrategy
.FixedDelayRestartBackoffTimeStrategyFactory(1, 100)
.create());
final BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.get(1);
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, expectedException)));
};
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.withModifiedScheduler(setupScheduler)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException);
}
@Test
void testExceptionHistoryWithTaskFailureFromStopWithSavepoint() throws Exception {
final Exception expectedException = new Exception("Expected Local Exception");
Consumer<JobGraph> setupJobGraph =
jobGraph ->
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
CheckpointCoordinatorConfiguration.builder()
.setCheckpointInterval(Long.MAX_VALUE)
.build(),
null));
final CompletedCheckpointStore completedCheckpointStore =
new StandaloneCompletedCheckpointStore(1);
final CheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter();
final CheckpointsCleaner checkpointCleaner = new CheckpointsCleaner();
TestingCheckpointRecoveryFactory checkpointRecoveryFactory =
new TestingCheckpointRecoveryFactory(completedCheckpointStore, checkpointIDCounter);
Consumer<AdaptiveSchedulerBuilder> setupScheduler =
builder ->
builder.setCheckpointRecoveryFactory(checkpointRecoveryFactory)
.setCheckpointCleaner(checkpointCleaner);
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.get(1);
scheduler.stopWithSavepoint(
"file:
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, expectedException)));
for (ExecutionAttemptID id : attemptIds) {
scheduler.declineCheckpoint(
new DeclineCheckpoint(
scheduler.requestJob().getJobId(),
id,
checkpointIDCounter.get() - 1,
new CheckpointException(
CheckpointFailureReason.IO_EXCEPTION)));
}
};
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.withModifiedScheduler(setupScheduler)
.withModifiedJobGraph(setupJobGraph)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException);
}
@Test
void testExceptionHistoryWithTaskConcurrentGlobalFailure() throws Exception {
final Exception expectedException1 = new Exception("Expected Global Exception 1");
final Exception expectedException2 = new Exception("Expected Global Exception 2");
final BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
scheduler.handleGlobalFailure(expectedException1);
scheduler.handleGlobalFailure(expectedException2);
};
final Iterable<RootExceptionHistoryEntry> entries =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.run();
assertThat(entries).hasSize(1);
final RootExceptionHistoryEntry failure = entries.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException1);
final Iterable<ExceptionHistoryEntry> concurrentExceptions =
failure.getConcurrentExceptions();
final List<Throwable> foundExceptions =
IterableUtils.toStream(concurrentExceptions)
.map(ExceptionHistoryEntry::getException)
.map(exception -> exception.deserializeError(classLoader))
.collect(Collectors.toList());
assertThat(foundExceptions).containsExactly(expectedException2);
}
@Test
void testExceptionHistoryWithTaskConcurrentFailure() throws Exception {
final Exception expectedException1 = new Exception("Expected Local Exception 1");
final Exception expectedException2 = new Exception("Expected Local Exception 2");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.remove(0);
final ExecutionAttemptID attemptId2 = attemptIds.remove(0);
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, expectedException1)));
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId2,
ExecutionState.FAILED,
expectedException2)));
};
final Iterable<RootExceptionHistoryEntry> entries =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.run();
assertThat(entries).hasSize(1);
final RootExceptionHistoryEntry failure = entries.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException1);
final Iterable<ExceptionHistoryEntry> concurrentExceptions =
failure.getConcurrentExceptions();
final List<Throwable> foundExceptions =
IterableUtils.toStream(concurrentExceptions)
.map(ExceptionHistoryEntry::getException)
.map(exception -> exception.deserializeError(classLoader))
.collect(Collectors.toList());
assertThat(foundExceptions).isEmpty();
}
@Test
void testRepeatedTransitionIntoCurrentStateFails() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
final State state = scheduler.getState();
assertThat(state).isInstanceOf(Created.class);
assertThatThrownBy(() -> scheduler.transitionToState(new Created.Factory(scheduler, LOG)))
.isInstanceOf(IllegalStateException.class);
}
@Test
void testTriggerSavepointFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatFuture(
scheduler.triggerSavepoint(
"some directory", false, SavepointFormatType.CANONICAL))
.eventuallyFailsWith(ExecutionException.class)
.withCauseInstanceOf(CheckpointException.class);
}
@Test
void testStopWithSavepointFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatFuture(
scheduler.triggerSavepoint(
"some directory", false, SavepointFormatType.CANONICAL))
.eventuallyFailsWith(ExecutionException.class)
.withCauseInstanceOf(CheckpointException.class);
}
@Test
void testDeliverOperatorEventToCoordinatorFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatThrownBy(
() ->
scheduler.deliverOperatorEventToCoordinator(
createExecutionAttemptId(),
new OperatorID(),
new TestOperatorEvent()))
.isInstanceOf(TaskNotRunningException.class);
}
@Test
void testDeliverCoordinationRequestToCoordinatorFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatFuture(
scheduler.deliverCoordinationRequestToCoordinator(
new OperatorID(), new CoordinationRequest() {}))
.eventuallyFailsWith(ExecutionException.class)
.withCauseInstanceOf(FlinkException.class);
}
@Test
void testUpdateTaskExecutionStateReturnsFalseInIllegalState() throws Exception {
final JobGraph jobGraph = createJobGraph();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
createExecutionAttemptId(),
ExecutionState.FAILED))))
.isFalse();
}
@Test
void testRequestNextInputSplitFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatThrownBy(
() ->
scheduler.requestNextInputSplit(
JOB_VERTEX.getID(), createExecutionAttemptId()))
.isInstanceOf(IOException.class);
}
@Test
public void testRequestPartitionStateFailsInIllegalState() throws Exception {
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatThrownBy(
() ->
scheduler.requestPartitionState(
new IntermediateDataSetID(), new ResultPartitionID()))
.isInstanceOf(PartitionProducerDisposedException.class);
}
@Test
void testTryToAssignSlotsReturnsNotPossibleIfExpectedResourcesAreNotAvailable()
throws Exception {
final TestingSlotAllocator slotAllocator =
TestingSlotAllocator.newBuilder()
.setTryReserveResourcesFunction(ignored -> Optional.empty())
.build();
final AdaptiveScheduler adaptiveScheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setSlotAllocator(slotAllocator)
.build(EXECUTOR_RESOURCE.getExecutor());
final CreatingExecutionGraph.AssignmentResult assignmentResult =
adaptiveScheduler.tryToAssignSlots(
CreatingExecutionGraph.ExecutionGraphWithVertexParallelism.create(
new StateTrackingMockExecutionGraph(), JobSchedulingPlan.empty()));
assertThat(assignmentResult.isSuccess()).isFalse();
}
@Test
void testComputeVertexParallelismStoreForExecutionInReactiveMode() {
JobVertex v1 = createNoOpVertex("v1", 1, 50);
JobVertex v2 = createNoOpVertex("v2", 50, 50);
JobGraph graph = streamingJobGraph(v1, v2);
VertexParallelismStore parallelismStore =
AdaptiveScheduler.computeVertexParallelismStoreForExecution(
graph,
SchedulerExecutionMode.REACTIVE,
SchedulerBase::getDefaultMaxParallelism);
for (JobVertex vertex : graph.getVertices()) {
VertexParallelismInformation info = parallelismStore.getParallelismInfo(vertex.getID());
assertThat(info.getParallelism()).isEqualTo(vertex.getParallelism());
assertThat(info.getMaxParallelism()).isEqualTo(vertex.getMaxParallelism());
}
}
@Test
void testComputeVertexParallelismStoreForExecutionInDefaultMode() {
JobVertex v1 = createNoOpVertex("v1", 1, 50);
JobVertex v2 = createNoOpVertex("v2", 50, 50);
JobGraph graph = streamingJobGraph(v1, v2);
VertexParallelismStore parallelismStore =
AdaptiveScheduler.computeVertexParallelismStoreForExecution(
graph, null, SchedulerBase::getDefaultMaxParallelism);
for (JobVertex vertex : graph.getVertices()) {
VertexParallelismInformation info = parallelismStore.getParallelismInfo(vertex.getID());
assertThat(info.getParallelism()).isEqualTo(vertex.getParallelism());
assertThat(info.getMaxParallelism()).isEqualTo(vertex.getMaxParallelism());
}
}
@Test
void testCheckpointCleanerIsClosedAfterCheckpointServices() throws Exception {
final ScheduledExecutorService executorService =
Executors.newSingleThreadScheduledExecutor();
try {
DefaultSchedulerTest.doTestCheckpointCleanerIsClosedAfterCheckpointServices(
(checkpointRecoveryFactory, checkpointCleaner) -> {
final JobGraph jobGraph = createJobGraph();
enableCheckpointing(jobGraph);
try {
return new AdaptiveSchedulerBuilder(
jobGraph,
ComponentMainThreadExecutorServiceAdapter
.forSingleThreadExecutor(executorService))
.setCheckpointRecoveryFactory(checkpointRecoveryFactory)
.setCheckpointCleaner(checkpointCleaner)
.build(EXECUTOR_RESOURCE.getExecutor());
} catch (Exception e) {
throw new RuntimeException(e);
}
},
executorService,
LOG);
} finally {
executorService.shutdownNow();
}
}
@Test
void testIdleSlotsAreReleasedAfterDownScalingTriggeredByLoweredResourceRequirements()
throws Exception {
final JobGraph jobGraph = createJobGraph();
final Duration slotIdleTimeout = Duration.ofMillis(10);
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.SLOT_IDLE_TIMEOUT, slotIdleTimeout.toMillis());
final DeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID(), slotIdleTimeout);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, singleThreadMainThreadExecutor)
.setDeclarativeSlotPool(declarativeSlotPool)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
try {
final int numInitialSlots = 4;
final int numSlotsAfterDownscaling = 2;
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(numInitialSlots);
taskManagerGateway.setCancelConsumer(createCancelConsumer(scheduler));
singleThreadMainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, numInitialSlots)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(numInitialSlots);
singleThreadMainThreadExecutor.execute(
() ->
scheduler.updateJobResourceRequirements(
JobResourceRequirements.newBuilder()
.setParallelismForJobVertex(
JOB_VERTEX.getID(), 1, numSlotsAfterDownscaling)
.build()));
taskManagerGateway.waitForSubmissions(numSlotsAfterDownscaling);
taskManagerGateway.waitForFreedSlots(numInitialSlots - numSlotsAfterDownscaling);
final CompletableFuture<JobStatus> jobStatusFuture = new CompletableFuture<>();
singleThreadMainThreadExecutor.execute(
() -> jobStatusFuture.complete(scheduler.getState().getJobStatus()));
assertThatFuture(jobStatusFuture).eventuallySucceeds().isEqualTo(JobStatus.RUNNING);
assertThat(taskManagerGateway.freedSlots).isEmpty();
} finally {
final CompletableFuture<Void> closeFuture = new CompletableFuture<>();
singleThreadMainThreadExecutor.execute(
() -> FutureUtils.forward(scheduler.closeAsync(), closeFuture));
assertThatFuture(closeFuture).eventuallySucceeds();
}
}
@Test
public void testUpdateResourceRequirementsInReactiveModeIsNotSupported() throws Exception {
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.SCHEDULER_MODE, SchedulerExecutionMode.REACTIVE);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(createJobGraph(), mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThatThrownBy(
() ->
scheduler.updateJobResourceRequirements(
JobResourceRequirements.empty()))
.isInstanceOf(UnsupportedOperationException.class);
}
@Test
public void testRequestDefaultResourceRequirements() throws Exception {
final JobGraph jobGraph = createJobGraph();
final Configuration configuration = new Configuration();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.requestJobResourceRequirements())
.isEqualTo(
JobResourceRequirements.newBuilder()
.setParallelismForJobVertex(
JOB_VERTEX.getID(), 1, JOB_VERTEX.getParallelism())
.build());
}
@Test
public void testRequestDefaultResourceRequirementsInReactiveMode() throws Exception {
final JobGraph jobGraph = createJobGraph();
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.SCHEDULER_MODE, SchedulerExecutionMode.REACTIVE);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
assertThat(scheduler.requestJobResourceRequirements())
.isEqualTo(
JobResourceRequirements.newBuilder()
.setParallelismForJobVertex(
JOB_VERTEX.getID(),
1,
SchedulerBase.getDefaultMaxParallelism(JOB_VERTEX))
.build());
}
@Test
public void testRequestUpdatedResourceRequirements() throws Exception {
final JobGraph jobGraph = createJobGraph();
final Configuration configuration = new Configuration();
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.build(EXECUTOR_RESOURCE.getExecutor());
final JobResourceRequirements newJobResourceRequirements =
JobResourceRequirements.newBuilder()
.setParallelismForJobVertex(JOB_VERTEX.getID(), 1, 12)
.build();
assertThat(scheduler.requestJobResourceRequirements())
.isNotEqualTo(newJobResourceRequirements);
scheduler.updateJobResourceRequirements(newJobResourceRequirements);
assertThat(scheduler.requestJobResourceRequirements())
.isEqualTo(newJobResourceRequirements);
}
private CompletableFuture<ArchivedExecutionGraph> getArchivedExecutionGraphForRunningJob(
SchedulerNG scheduler) {
return CompletableFuture.supplyAsync(
() -> {
ArchivedExecutionGraph graph = null;
while (graph == null || graph.getState() != JobStatus.RUNNING) {
graph = scheduler.requestJob().getArchivedExecutionGraph();
}
return graph;
},
singleThreadMainThreadExecutor);
}
private Consumer<ExecutionAttemptID> createCancelConsumer(SchedulerNG scheduler) {
return executionAttemptId ->
singleThreadMainThreadExecutor.execute(
() ->
scheduler.updateTaskExecutionState(
new TaskExecutionState(
executionAttemptId, ExecutionState.CANCELED)));
}
private static DefaultDeclarativeSlotPool createDeclarativeSlotPool(JobID jobId) {
return createDeclarativeSlotPool(jobId, DEFAULT_TIMEOUT);
}
private static DefaultDeclarativeSlotPool createDeclarativeSlotPool(
JobID jobId, Duration idleSlotTimeout) {
return new DefaultDeclarativeSlotPool(
jobId,
new DefaultAllocatedSlotPool(),
ignored -> {},
Time.fromDuration(idleSlotTimeout),
Time.fromDuration(DEFAULT_TIMEOUT));
}
private static JobGraph createJobGraph() {
return streamingJobGraph(JOB_VERTEX);
}
private static class LifecycleMethodCapturingState extends DummyState {
boolean onLeaveCalled = false;
@Nullable Class<? extends State> onLeaveNewStateArgument = null;
void reset() {
onLeaveCalled = false;
onLeaveNewStateArgument = null;
}
@Override
public void onLeave(Class<? extends State> newState) {
onLeaveCalled = true;
onLeaveNewStateArgument = newState;
}
}
/**
* A {@link SimpleAckingTaskManagerGateway} that buffers all the task submissions into a
* blocking queue, allowing one to wait for an arbitrary number of submissions.
*/
public static class SubmissionBufferingTaskManagerGateway
extends SimpleAckingTaskManagerGateway {
final BlockingQueue<TaskDeploymentDescriptor> submittedTasks;
final BlockingQueue<AllocationID> freedSlots;
public SubmissionBufferingTaskManagerGateway(int capacity) {
submittedTasks = new ArrayBlockingQueue<>(capacity);
freedSlots = new ArrayBlockingQueue<>(capacity);
initializeFunctions();
}
@Override
public void setSubmitConsumer(Consumer<TaskDeploymentDescriptor> submitConsumer) {
super.setSubmitConsumer(
taskDeploymentDescriptor -> {
Preconditions.checkState(submittedTasks.offer(taskDeploymentDescriptor));
submitConsumer.accept(taskDeploymentDescriptor);
});
}
@Override
public void setFreeSlotFunction(
BiFunction<AllocationID, Throwable, CompletableFuture<Acknowledge>>
freeSlotFunction) {
super.setFreeSlotFunction(
(allocationID, throwable) -> {
Preconditions.checkState(freedSlots.offer(allocationID));
return freeSlotFunction.apply(allocationID, throwable);
});
}
/**
* Block until an arbitrary number of submissions have been received.
*
* @param numSubmissions The number of submissions to wait for
* @return the list of the waited-for submissions
* @throws InterruptedException if a timeout is exceeded waiting for a submission
*/
public List<TaskDeploymentDescriptor> waitForSubmissions(int numSubmissions)
throws InterruptedException {
List<TaskDeploymentDescriptor> descriptors = new ArrayList<>();
for (int i = 0; i < numSubmissions; i++) {
descriptors.add(submittedTasks.take());
}
return descriptors;
}
public List<AllocationID> waitForFreedSlots(int numFreedSlots) throws InterruptedException {
final List<AllocationID> allocationIds = new ArrayList<>();
for (int i = 0; i < numFreedSlots; i++) {
allocationIds.add(freedSlots.take());
}
return allocationIds;
}
private void initializeFunctions() {
setSubmitConsumer(ignored -> {});
setFreeSlotFunction(
(allocationId, throwable) ->
CompletableFuture.completedFuture(Acknowledge.get()));
}
}
private static class StateInstanceFactory
implements StateFactory<LifecycleMethodCapturingState> {
private final LifecycleMethodCapturingState instance;
public StateInstanceFactory(LifecycleMethodCapturingState instance) {
this.instance = instance;
}
@Override
public Class<LifecycleMethodCapturingState> getStateClass() {
return LifecycleMethodCapturingState.class;
}
@Override
public LifecycleMethodCapturingState getState() {
return instance;
}
}
static class DummyState implements State {
private final JobStatus jobStatus;
public DummyState() {
this(JobStatus.RUNNING);
}
public DummyState(JobStatus jobStatus) {
this.jobStatus = jobStatus;
}
@Override
public void cancel() {}
@Override
public void suspend(Throwable cause) {}
@Override
public JobStatus getJobStatus() {
return jobStatus;
}
@Override
public ArchivedExecutionGraph getJob() {
return null;
}
@Override
public void handleGlobalFailure(Throwable cause) {}
@Override
public Logger getLogger() {
return null;
}
private static class Factory implements StateFactory<DummyState> {
private final JobStatus jobStatus;
public Factory() {
this(JobStatus.RUNNING);
}
public Factory(JobStatus jobStatus) {
this.jobStatus = jobStatus;
}
@Override
public Class<DummyState> getStateClass() {
return DummyState.class;
}
@Override
public DummyState getState() {
return new DummyState(jobStatus);
}
}
}
private static class ExceptionHistoryTester {
private final ComponentMainThreadExecutor mainThreadExecutor;
private BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attempts) -> {};
private Consumer<AdaptiveSchedulerBuilder> schedulerModifier = ignored -> {};
private Consumer<JobGraph> jobGraphModifier = ignored -> {};
private Collection<FailureEnricher> failureEnrichers = Collections.emptySet();
ExceptionHistoryTester(ComponentMainThreadExecutor mainThreadExecutor) {
this.mainThreadExecutor = mainThreadExecutor;
}
ExceptionHistoryTester withTestLogic(
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic) {
this.testLogic = testLogic;
return this;
}
ExceptionHistoryTester withModifiedScheduler(
Consumer<AdaptiveSchedulerBuilder> schedulerModifier) {
this.schedulerModifier = schedulerModifier;
return this;
}
ExceptionHistoryTester withModifiedJobGraph(Consumer<JobGraph> jobGraphModifier) {
this.jobGraphModifier = jobGraphModifier;
return this;
}
ExceptionHistoryTester withFailureEnrichers(Collection<FailureEnricher> failureEnrichers) {
this.failureEnrichers = failureEnrichers;
return this;
}
Iterable<RootExceptionHistoryEntry> run() throws Exception {
final JobGraph jobGraph = createJobGraph();
jobGraphModifier.accept(jobGraph);
final CompletedCheckpointStore completedCheckpointStore =
new StandaloneCompletedCheckpointStore(1);
final CheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter();
final CheckpointsCleaner checkpointCleaner = new CheckpointsCleaner();
TestingCheckpointRecoveryFactory checkpointRecoveryFactory =
new TestingCheckpointRecoveryFactory(
completedCheckpointStore, checkpointIDCounter);
final DefaultDeclarativeSlotPool declarativeSlotPool =
createDeclarativeSlotPool(jobGraph.getJobID());
final Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.RESOURCE_WAIT_TIMEOUT, Duration.ofMillis(1L));
AdaptiveSchedulerBuilder builder =
new AdaptiveSchedulerBuilder(jobGraph, mainThreadExecutor)
.setJobMasterConfiguration(configuration)
.setDeclarativeSlotPool(declarativeSlotPool)
.setCheckpointRecoveryFactory(checkpointRecoveryFactory)
.setCheckpointCleaner(checkpointCleaner)
.setFailureEnrichers(failureEnrichers);
schedulerModifier.accept(builder);
final AdaptiveScheduler scheduler = builder.build(EXECUTOR_RESOURCE.getExecutor());
final SubmissionBufferingTaskManagerGateway taskManagerGateway =
new SubmissionBufferingTaskManagerGateway(PARALLELISM);
taskManagerGateway.setCancelConsumer(
attemptId ->
mainThreadExecutor.execute(
() ->
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId,
ExecutionState.CANCELED,
null)))));
mainThreadExecutor.execute(
() -> {
scheduler.startScheduling();
offerSlots(
declarativeSlotPool,
createSlotOffersForResourceRequirements(
ResourceCounter.withResource(
ResourceProfile.UNKNOWN, PARALLELISM)),
taskManagerGateway);
});
taskManagerGateway.waitForSubmissions(PARALLELISM);
CompletableFuture<Iterable<ArchivedExecutionVertex>> vertexFuture =
new CompletableFuture<>();
mainThreadExecutor.execute(
() ->
vertexFuture.complete(
scheduler
.requestJob()
.getArchivedExecutionGraph()
.getAllExecutionVertices()));
final Iterable<ArchivedExecutionVertex> executionVertices = vertexFuture.get();
final List<ExecutionAttemptID> attemptIds =
IterableUtils.toStream(executionVertices)
.map(ArchivedExecutionVertex::getCurrentExecutionAttempt)
.map(ArchivedExecution::getAttemptId)
.collect(Collectors.toList());
CompletableFuture<Void> runTestLogicFuture =
CompletableFuture.runAsync(
() -> testLogic.accept(scheduler, attemptIds), mainThreadExecutor);
runTestLogicFuture.get();
mainThreadExecutor.execute(scheduler::cancel);
scheduler.getJobTerminationFuture().get();
return scheduler.requestJob().getExceptionHistory();
}
}
}
|
Lets handle float and decimal diffrently and other types use same isEqual which will minimise code duplication.
|
public static boolean isReferenceEqual(Object lhsValue, Object rhsValue) {
if (lhsValue == rhsValue) {
return true;
}
if (lhsValue == null || rhsValue == null) {
return false;
}
Type lhsType = getType(lhsValue);
Type rhsType = getType(rhsValue);
switch(lhsType.getTag()) {
case TypeTags.INT_TAG:
if (rhsType.getTag() != TypeTags.BYTE_TAG || rhsType.getTag() != TypeTags.INT_TAG) {
return false;
}
return lhsValue.equals(((Number) rhsValue).longValue());
case TypeTags.BYTE_TAG:
if (rhsType.getTag() != TypeTags.BYTE_TAG || rhsType.getTag() != TypeTags.INT_TAG) {
return false;
}
return lhsValue.equals(((Number) rhsValue).byteValue());
case TypeTags.FLOAT_TAG:
if (rhsType.getTag() != TypeTags.FLOAT_TAG) {
return false;
}
return lhsValue.equals(((Number) rhsValue).doubleValue());
case TypeTags.DECIMAL_TAG:
if (rhsType.getTag() != TypeTags.DECIMAL_TAG) {
return false;
}
return checkDecimalExactEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue);
case TypeTags.BOOLEAN_TAG:
case TypeTags.STRING_TAG:
return lhsValue.equals(rhsValue);
}
if (TypeTags.isXMLTypeTag(lhsType.getTag()) && TypeTags.isXMLTypeTag(rhsType.getTag())) {
return isXMLValueRefEqual((XmlValue) lhsValue, (XmlValue) rhsValue);
}
if (isHandleType(lhsType) && isHandleType(rhsType)) {
return isHandleValueRefEqual(lhsValue, rhsValue);
}
return false;
}
|
if (rhsType.getTag() != TypeTags.BYTE_TAG || rhsType.getTag() != TypeTags.INT_TAG) {
|
public static boolean isReferenceEqual(Object lhsValue, Object rhsValue) {
if (lhsValue == rhsValue) {
return true;
}
if (lhsValue == null || rhsValue == null) {
return false;
}
Type lhsType = getType(lhsValue);
Type rhsType = getType(rhsValue);
switch (lhsType.getTag()) {
case TypeTags.FLOAT_TAG:
if (rhsType.getTag() != TypeTags.FLOAT_TAG) {
return false;
}
return lhsValue.equals(((Number) rhsValue).doubleValue());
case TypeTags.DECIMAL_TAG:
if (rhsType.getTag() != TypeTags.DECIMAL_TAG) {
return false;
}
return checkDecimalExactEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue);
case TypeTags.INT_TAG:
case TypeTags.BYTE_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.STRING_TAG:
return isEqual(lhsValue, rhsValue);
case TypeTags.XML_TAG:
case TypeTags.XML_COMMENT_TAG:
case TypeTags.XML_ELEMENT_TAG:
case TypeTags.XML_PI_TAG:
case TypeTags.XML_TEXT_TAG:
if (!TypeTags.isXMLTypeTag(rhsType.getTag())) {
return false;
}
return isXMLValueRefEqual((XmlValue) lhsValue, (XmlValue) rhsValue);
case TypeTags.HANDLE_TAG:
if (rhsType.getTag() != TypeTags.HANDLE_TAG) {
return false;
}
return isHandleValueRefEqual(lhsValue, rhsValue);
}
return false;
}
|
class TypeChecker {
public static Object checkCast(Object sourceVal, Type targetType) {
if (checkIsType(sourceVal, targetType)) {
return sourceVal;
}
Type sourceType = getType(sourceVal);
if (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() <= TypeTags.BOOLEAN_TAG) {
return TypeConverter.castValues(targetType, sourceVal);
}
if (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() == TypeTags.UNION_TAG) {
for (Type memberType : ((BUnionType) targetType).getMemberTypes()) {
try {
return TypeConverter.castValues(memberType, sourceVal);
} catch (Exception e) {
}
}
}
throw ErrorUtils.createTypeCastError(sourceVal, targetType);
}
public static long anyToInt(Object sourceVal) {
return TypeConverter.anyToIntCast(sourceVal,
() -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT));
}
public static long anyToSigned32(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_32,
() -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_INT_SIGNED_32));
}
public static long anyToSigned16(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_16,
() -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_INT_SIGNED_16));
}
public static long anyToSigned8(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_8,
() -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_INT_SIGNED_8));
}
public static long anyToUnsigned32(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_32,
() -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_INT_UNSIGNED_32));
}
public static long anyToUnsigned16(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_16,
() -> ErrorUtils
.createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_16));
}
public static long anyToUnsigned8(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_8,
() -> ErrorUtils
.createTypeCastError(sourceVal,
TYPE_INT_UNSIGNED_8));
}
public static double anyToFloat(Object sourceVal) {
return TypeConverter.anyToFloatCast(sourceVal, () -> ErrorUtils
.createTypeCastError(sourceVal, TYPE_FLOAT));
}
public static boolean anyToBoolean(Object sourceVal) {
return TypeConverter.anyToBooleanCast(sourceVal, () -> ErrorUtils
.createTypeCastError(sourceVal, TYPE_BOOLEAN));
}
public static int anyToByte(Object sourceVal) {
return TypeConverter.anyToByteCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_BYTE));
}
public static DecimalValue anyToDecimal(Object sourceVal) {
return TypeConverter.anyToDecimalCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_DECIMAL));
}
public static byte anyToJByte(Object sourceVal) {
return TypeConverter.anyToJByteCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "byte"));
}
public static char anyToJChar(Object sourceVal) {
return TypeConverter.anyToJCharCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "char"));
}
public static short anyToJShort(Object sourceVal) {
return TypeConverter.anyToJShortCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "short"));
}
public static int anyToJInt(Object sourceVal) {
return TypeConverter.anyToJIntCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "int"));
}
public static long anyToJLong(Object sourceVal) {
return TypeConverter.anyToJLongCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "long"));
}
public static float anyToJFloat(Object sourceVal) {
return TypeConverter.anyToJFloatCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "float"));
}
public static double anyToJDouble(Object sourceVal) {
return TypeConverter.anyToJDoubleCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "double"));
}
public static boolean anyToJBoolean(Object sourceVal) {
return TypeConverter.anyToJBooleanCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "boolean"));
}
/**
* Check whether a given value belongs to the given type.
*
* @param sourceVal value to check the type
* @param targetType type to be test against
* @return true if the value belongs to the given type, false otherwise
*/
public static boolean checkIsType(Object sourceVal, Type targetType) {
return checkIsType(sourceVal, getType(sourceVal), targetType);
}
/**
* Check whether a given value belongs to the given type.
*
* @param sourceVal value to check the type
* @param sourceType type of the value
* @param targetType type to be test against
* @return true if the value belongs to the given type, false otherwise
*/
public static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType) {
if (checkIsType(sourceVal, sourceType, targetType, null)) {
return true;
}
if (sourceType.getTag() == TypeTags.XML_TAG) {
XmlValue val = (XmlValue) sourceVal;
if (val.getNodeType() == XmlNodeType.SEQUENCE) {
return checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false);
}
}
if (isMutable(sourceVal, sourceType)) {
return false;
}
return checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false);
}
/**
* Check whether a given value has the same shape as the given type.
*
* @param sourceValue value to check the shape
* @param targetType type to check the shape against
* @return true if the value has the same shape as the given type; false otherwise
*/
public static boolean checkIsLikeType(Object sourceValue, Type targetType) {
return checkIsLikeType(sourceValue, targetType, false);
}
/**
* Check whether a given value has the same shape as the given type.
*
* @param sourceValue value to check the shape
* @param targetType type to check the shape against
* @param allowNumericConversion whether numeric conversion is allowed to change the shape to the target type
* @return true if the value has the same shape as the given type; false otherwise
*/
public static boolean checkIsLikeType(Object sourceValue, Type targetType, boolean allowNumericConversion) {
return checkIsLikeType(sourceValue, targetType, new ArrayList<>(), allowNumericConversion);
}
/**
* Check whether two types are the same.
*
* @param sourceType type to test
* @param targetType type to test against
* @return true if the two types are same; false otherwise
*/
public static boolean isSameType(Type sourceType, Type targetType) {
int sourceTypeTag = sourceType.getTag();
int targetTypeTag = targetType.getTag();
if (sourceType == targetType) {
return true;
}
if (sourceTypeTag == targetTypeTag) {
if (sourceType.equals(targetType)) {
return true;
}
switch (sourceTypeTag) {
case TypeTags.ARRAY_TAG:
return checkArrayEquivalent(sourceType, targetType);
case TypeTags.FINITE_TYPE_TAG:
Set<Object> sourceValueSpace = ((BFiniteType) sourceType).valueSpace;
Set<Object> targetValueSpace = ((BFiniteType) targetType).valueSpace;
if (sourceValueSpace.size() != targetValueSpace.size()) {
return false;
}
for (Object sourceVal : sourceValueSpace) {
if (!containsType(targetValueSpace, getType(sourceVal))) {
return false;
}
}
return true;
default:
break;
}
}
if (sourceTypeTag == TypeTags.FINITE_TYPE_TAG) {
for (Object value : ((BFiniteType) sourceType).valueSpace) {
if (!isSameType(getType(value), targetType)) {
return false;
}
}
return true;
}
if (targetTypeTag == TypeTags.FINITE_TYPE_TAG) {
for (Object value : ((BFiniteType) targetType).valueSpace) {
if (!isSameType(getType(value), sourceType)) {
return false;
}
}
return true;
}
return false;
}
public static Type getType(Object value) {
if (value == null) {
return TYPE_NULL;
} else if (value instanceof Number) {
if (value instanceof Long) {
return TYPE_INT;
} else if (value instanceof Double) {
return TYPE_FLOAT;
} else if (value instanceof Integer || value instanceof Byte) {
return TYPE_BYTE;
}
} else if (value instanceof BString) {
return TYPE_STRING;
} else if (value instanceof Boolean) {
return TYPE_BOOLEAN;
}
return ((BValue) value).getType();
}
/**
* Deep value equality check for anydata.
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value on the right hand side
* @return True if values are equal, else false.
*/
public static boolean isEqual(Object lhsValue, Object rhsValue) {
return isEqual(lhsValue, rhsValue, new ArrayList<>());
}
/**
* Check if two decimal values are equal in value.
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value of the right hand side
* @return True if values are equal, else false.
*/
public static boolean checkDecimalEqual(DecimalValue lhsValue, DecimalValue rhsValue) {
return isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue) &&
lhsValue.decimalValue().compareTo(rhsValue.decimalValue()) == 0;
}
/**
* Check if two decimal values are exactly equal.
*
* @param lhsValue The value on the left-hand side
* @param rhsValue The value of the right-hand side
* @return True if values are exactly equal, else false.
*/
public static boolean checkDecimalExactEqual(DecimalValue lhsValue, DecimalValue rhsValue) {
return isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue)
&& lhsValue.decimalValue().equals(rhsValue.decimalValue());
}
/**
* Checks if the given decimal number is a real number.
*
* @param decimalValue The decimal value being checked
* @return True if the decimal value is a real number.
*/
private static boolean isDecimalRealNumber(DecimalValue decimalValue) {
return decimalValue.valueKind == DecimalValueKind.ZERO || decimalValue.valueKind == DecimalValueKind.OTHER;
}
/**
* Reference equality check for values. If both the values are simple basic types, returns the same
* result as {@link
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value on the right hand side
* @return True if values are reference equal or in the case of simple basic types if the values are equal,
* else false.
*/
private static boolean isXMLValueRefEqual(XmlValue lhsValue, XmlValue rhsValue) {
if (lhsValue.getNodeType() != rhsValue.getNodeType()) {
return false;
}
if (lhsValue.getNodeType() == XmlNodeType.SEQUENCE && rhsValue.getNodeType() == XmlNodeType.SEQUENCE) {
return isXMLSequenceRefEqual((XmlSequence) lhsValue, (XmlSequence) rhsValue);
}
if (lhsValue.getNodeType() == XmlNodeType.TEXT && rhsValue.getNodeType() == XmlNodeType.TEXT) {
return isEqual(lhsValue, rhsValue);
}
return false;
}
private static boolean isXMLSequenceRefEqual(XmlSequence lhsValue, XmlSequence rhsValue) {
Iterator<BXml> lhsIter = lhsValue.getChildrenList().iterator();
Iterator<BXml> rhsIter = rhsValue.getChildrenList().iterator();
while (lhsIter.hasNext() && rhsIter.hasNext()) {
BXml l = lhsIter.next();
BXml r = rhsIter.next();
if (!(l == r || isXMLValueRefEqual((XmlValue) l, (XmlValue) r))) {
return false;
}
}
return lhsIter.hasNext() == rhsIter.hasNext();
}
/**
* Get the typedesc of a value.
*
* @param value Value
* @return type desc associated with the value
*/
public static TypedescValue getTypedesc(Object value) {
Type type = TypeChecker.getType(value);
if (type == null) {
return null;
}
if (value instanceof MapValue) {
TypedescValue typedesc = (TypedescValue) ((MapValue) value).getTypedesc();
if (typedesc != null) {
return typedesc;
}
}
return new TypedescValueImpl(type);
}
/**
* Get the annotation value if present.
*
* @param typedescValue The typedesc value
* @param annotTag The annot-tag-reference
* @return the annotation value if present, nil else
*/
public static Object getAnnotValue(TypedescValue typedescValue, String annotTag) {
Type describingType = typedescValue.getDescribingType();
if (!(describingType instanceof BAnnotatableType)) {
return null;
}
return ((BAnnotatableType) describingType).getAnnotation(StringUtils.fromString(annotTag));
}
public static Object getAnnotValue(TypedescValue typedescValue, BString annotTag) {
Type describingType = typedescValue.getDescribingType();
if (!(describingType instanceof BAnnotatableType)) {
return null;
}
return ((BAnnotatableType) describingType).getAnnotation(annotTag);
}
/**
* Check whether a given type is equivalent to a target type.
*
* @param sourceType type to check
* @param targetType type to compare with
* @return flag indicating the the equivalence of the two types
*/
public static boolean checkIsType(Type sourceType, Type targetType) {
return checkIsType(sourceType, targetType, (List<TypePair>) null);
}
@Deprecated
public static boolean checkIsType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) {
if (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) {
return true;
}
if (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(sourceType)) {
return true;
}
if (targetType.isReadOnly() && !sourceType.isReadOnly()) {
return false;
}
int sourceTypeTag = sourceType.getTag();
int targetTypeTag = targetType.getTag();
if (sourceTypeTag == TypeTags.INTERSECTION_TAG) {
return checkIsType(((BIntersectionType) sourceType).getEffectiveType(),
targetTypeTag != TypeTags.INTERSECTION_TAG ? targetType :
((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes);
}
if (targetTypeTag == TypeTags.INTERSECTION_TAG) {
return checkIsType(sourceType, ((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes);
}
if (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) {
if (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) {
return checkIsType(((BParameterizedType) sourceType).getParamValueType(), targetType, unresolvedTypes);
}
return checkIsType(((BParameterizedType) sourceType).getParamValueType(),
((BParameterizedType) targetType).getParamValueType(), unresolvedTypes);
}
if (sourceTypeTag == TypeTags.READONLY_TAG) {
return checkIsType(PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE,
targetType, unresolvedTypes);
}
if (targetTypeTag == TypeTags.READONLY_TAG) {
return checkIsType(sourceType, PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE, unresolvedTypes);
}
if (sourceTypeTag == TypeTags.UNION_TAG) {
return isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes);
}
if (sourceTypeTag == TypeTags.FINITE_TYPE_TAG &&
(targetTypeTag == TypeTags.FINITE_TYPE_TAG || targetTypeTag <= TypeTags.NULL_TAG ||
targetTypeTag == TypeTags.XML_TEXT_TAG)) {
return isFiniteTypeMatch((BFiniteType) sourceType, targetType);
}
switch (targetTypeTag) {
case TypeTags.BYTE_TAG:
case TypeTags.SIGNED8_INT_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
case TypeTags.CHAR_STRING_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.NULL_TAG:
return sourceTypeTag == targetTypeTag;
case TypeTags.STRING_TAG:
return TypeTags.isStringTypeTag(sourceTypeTag);
case TypeTags.XML_TEXT_TAG:
if (sourceTypeTag == TypeTags.XML_TAG) {
return ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG;
}
return sourceTypeTag == targetTypeTag;
case TypeTags.INT_TAG:
return sourceTypeTag == TypeTags.INT_TAG || sourceTypeTag == TypeTags.BYTE_TAG ||
(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.UNSIGNED32_INT_TAG);
case TypeTags.SIGNED16_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG ||
(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED16_INT_TAG);
case TypeTags.SIGNED32_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG ||
(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED32_INT_TAG);
case TypeTags.UNSIGNED8_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG;
case TypeTags.UNSIGNED16_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG ||
sourceTypeTag == TypeTags.UNSIGNED16_INT_TAG;
case TypeTags.UNSIGNED32_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG ||
sourceTypeTag == TypeTags.UNSIGNED16_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED32_INT_TAG;
case TypeTags.ANY_TAG:
return checkIsAnyType(sourceType);
case TypeTags.ANYDATA_TAG:
return sourceType.isAnydata();
case TypeTags.SERVICE_TAG:
return checkIsServiceType(sourceType, targetType,
unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);
case TypeTags.HANDLE_TAG:
return sourceTypeTag == TypeTags.HANDLE_TAG;
case TypeTags.READONLY_TAG:
return isInherentlyImmutableType(sourceType) || sourceType.isReadOnly();
case TypeTags.XML_ELEMENT_TAG:
case TypeTags.XML_COMMENT_TAG:
case TypeTags.XML_PI_TAG:
return targetTypeTag == sourceTypeTag;
default:
return checkIsRecursiveType(sourceType, targetType,
unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);
}
}
private static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType,
List<TypePair> unresolvedTypes) {
int sourceTypeTag = sourceType.getTag();
int targetTypeTag = targetType.getTag();
if (sourceTypeTag != TypeTags.RECORD_TYPE_TAG && sourceTypeTag != TypeTags.OBJECT_TYPE_TAG) {
return checkIsType(sourceType, targetType);
}
if (targetTypeTag == TypeTags.INTERSECTION_TAG) {
targetType = ((BIntersectionType) targetType).getEffectiveType();
targetTypeTag = targetType.getTag();
}
if (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) {
return true;
}
if (targetType.isReadOnly() && !sourceType.isReadOnly()) {
return false;
}
switch (targetTypeTag) {
case TypeTags.ANY_TAG:
return checkIsAnyType(sourceType);
case TypeTags.READONLY_TAG:
return isInherentlyImmutableType(sourceType) || sourceType.isReadOnly();
default:
return checkIsRecursiveTypeOnValue(sourceVal, sourceType, targetType, sourceTypeTag, targetTypeTag,
unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);
}
}
private static boolean checkTypeDescType(Type sourceType, BTypedescType targetType,
List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.TYPEDESC_TAG) {
return false;
}
BTypedescType sourceTypedesc = (BTypedescType) sourceType;
return checkIsType(sourceTypedesc.getConstraint(), targetType.getConstraint(), unresolvedTypes);
}
private static boolean checkIsRecursiveType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) {
switch (targetType.getTag()) {
case TypeTags.MAP_TAG:
return checkIsMapType(sourceType, (BMapType) targetType, unresolvedTypes);
case TypeTags.STREAM_TAG:
return checkIsStreamType(sourceType, (BStreamType) targetType, unresolvedTypes);
case TypeTags.TABLE_TAG:
return checkIsTableType(sourceType, (BTableType) targetType, unresolvedTypes);
case TypeTags.JSON_TAG:
return checkIsJSONType(sourceType, unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
return checkIsRecordType(sourceType, (BRecordType) targetType, unresolvedTypes);
case TypeTags.FUNCTION_POINTER_TAG:
return checkIsFunctionType(sourceType, (BFunctionType) targetType);
case TypeTags.ARRAY_TAG:
return checkIsArrayType(sourceType, (BArrayType) targetType, unresolvedTypes);
case TypeTags.TUPLE_TAG:
return checkIsTupleType(sourceType, (BTupleType) targetType, unresolvedTypes);
case TypeTags.UNION_TAG:
return checkIsUnionType(sourceType, (BUnionType) targetType, unresolvedTypes);
case TypeTags.OBJECT_TYPE_TAG:
return checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes);
case TypeTags.FINITE_TYPE_TAG:
return checkIsFiniteType(sourceType, (BFiniteType) targetType);
case TypeTags.FUTURE_TAG:
return checkIsFutureType(sourceType, (BFutureType) targetType, unresolvedTypes);
case TypeTags.ERROR_TAG:
return checkIsErrorType(sourceType, (BErrorType) targetType, unresolvedTypes);
case TypeTags.TYPEDESC_TAG:
return checkTypeDescType(sourceType, (BTypedescType) targetType, unresolvedTypes);
case TypeTags.XML_TAG:
return checkIsXMLType(sourceType, targetType, unresolvedTypes);
default:
return false;
}
}
private static boolean checkIsRecursiveTypeOnValue(Object sourceVal, Type sourceType, Type targetType,
int sourceTypeTag, int targetTypeTag,
List<TypePair> unresolvedTypes) {
switch (targetTypeTag) {
case TypeTags.ANYDATA_TAG:
if (sourceTypeTag == TypeTags.OBJECT_TYPE_TAG) {
return false;
}
return checkRecordBelongsToAnydataType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes);
case TypeTags.MAP_TAG:
return checkIsMapType(sourceVal, sourceType, (BMapType) targetType, unresolvedTypes);
case TypeTags.JSON_TAG:
return checkIsMapType(sourceVal, sourceType,
new BMapType(targetType.isReadOnly() ? TYPE_READONLY_JSON :
TYPE_JSON), unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
return checkIsRecordType(sourceVal, sourceType, (BRecordType) targetType, unresolvedTypes);
case TypeTags.UNION_TAG:
for (Type type : ((BUnionType) targetType).getMemberTypes()) {
if (checkIsType(sourceVal, sourceType, type, unresolvedTypes)) {
return true;
}
}
return false;
case TypeTags.OBJECT_TYPE_TAG:
return checkObjectEquivalency(sourceVal, sourceType, (BObjectType) targetType, unresolvedTypes);
default:
return false;
}
}
private static boolean isFiniteTypeMatch(BFiniteType sourceType, Type targetType) {
for (Object bValue : sourceType.valueSpace) {
if (!checkIsType(bValue, targetType)) {
return false;
}
}
return true;
}
private static boolean isUnionTypeMatch(BUnionType sourceType, Type targetType, List<TypePair> unresolvedTypes) {
for (Type type : sourceType.getMemberTypes()) {
if (!checkIsType(type, targetType, unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkIsUnionType(Type sourceType, BUnionType targetType, List<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(sourceType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
switch (sourceType.getTag()) {
case TypeTags.UNION_TAG:
case TypeTags.JSON_TAG:
case TypeTags.ANYDATA_TAG:
return isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes);
case TypeTags.FINITE_TYPE_TAG:
return isFiniteTypeMatch((BFiniteType) sourceType, targetType);
default:
for (Type type : targetType.getMemberTypes()) {
if (checkIsType(sourceType, type, unresolvedTypes)) {
return true;
}
}
return false;
}
}
private static boolean checkIsMapType(Type sourceType, BMapType targetType, List<TypePair> unresolvedTypes) {
Type targetConstrainedType = targetType.getConstrainedType();
switch (sourceType.getTag()) {
case TypeTags.MAP_TAG:
return checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType,
unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
BRecordType recType = (BRecordType) sourceType;
BUnionType wideTypeUnion = new BUnionType(getWideTypeComponents(recType));
return checkConstraints(wideTypeUnion, targetConstrainedType, unresolvedTypes);
default:
return false;
}
}
private static boolean checkIsMapType(Object sourceVal, Type sourceType, BMapType targetType,
List<TypePair> unresolvedTypes) {
Type targetConstrainedType = targetType.getConstrainedType();
switch (sourceType.getTag()) {
case TypeTags.MAP_TAG:
return checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType,
unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
return checkIsMapType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes,
targetConstrainedType);
default:
return false;
}
}
private static boolean checkIsMapType(MapValue sourceVal, BRecordType sourceType, List<TypePair> unresolvedTypes,
Type targetConstrainedType) {
for (Field field : sourceType.getFields().values()) {
if (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {
if (!checkIsType(field.getFieldType(), targetConstrainedType, unresolvedTypes)) {
return false;
}
continue;
}
BString name = StringUtils.fromString(field.getFieldName());
if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(name)) {
continue;
}
if (!checkIsLikeType(sourceVal.get(name), targetConstrainedType)) {
return false;
}
}
if (sourceType.sealed) {
return true;
}
return checkIsType(sourceType.restFieldType, targetConstrainedType, unresolvedTypes);
}
private static boolean checkIsXMLType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) {
int sourceTag = sourceType.getTag();
if (sourceTag == TypeTags.FINITE_TYPE_TAG) {
return isFiniteTypeMatch((BFiniteType) sourceType, targetType);
}
BXmlType target = ((BXmlType) targetType);
if (sourceTag == TypeTags.XML_TAG) {
Type targetConstraint = target.constraint;
while (target.constraint.getTag() == TypeTags.XML_TAG) {
target = (BXmlType) target.constraint;
targetConstraint = target.constraint;
}
BXmlType source = (BXmlType) sourceType;
if (source.constraint.getTag() == TypeTags.NEVER_TAG) {
if (targetConstraint.getTag() == TypeTags.UNION_TAG) {
return checkIsUnionType(sourceType, (BUnionType) targetConstraint, unresolvedTypes);
}
return targetConstraint.getTag() == TypeTags.XML_TEXT_TAG ||
targetConstraint.getTag() == TypeTags.NEVER_TAG;
}
return checkIsType(source.constraint, targetConstraint, unresolvedTypes);
}
if (TypeTags.isXMLTypeTag(sourceTag)) {
return checkIsType(sourceType, target.constraint, unresolvedTypes);
}
return false;
}
private static List<Type> getWideTypeComponents(BRecordType recType) {
List<Type> types = new ArrayList<>();
for (Field f : recType.getFields().values()) {
types.add(f.getFieldType());
}
if (!recType.sealed) {
types.add(recType.restFieldType);
}
return types;
}
private static boolean checkIsStreamType(Type sourceType, BStreamType targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.STREAM_TAG) {
return false;
}
return checkConstraints(((BStreamType) sourceType).getConstrainedType(), targetType.getConstrainedType(),
unresolvedTypes)
&& checkConstraints(((BStreamType) sourceType).getCompletionType(), targetType.getCompletionType(),
unresolvedTypes);
}
private static boolean checkIsTableType(Type sourceType, BTableType targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.TABLE_TAG) {
return false;
}
BTableType srcTableType = (BTableType) sourceType;
if (!checkConstraints(srcTableType.getConstrainedType(), targetType.getConstrainedType(),
unresolvedTypes)) {
return false;
}
if (targetType.getKeyType() == null && targetType.getFieldNames() == null) {
return true;
}
if (targetType.getKeyType() != null) {
if (srcTableType.getKeyType() != null &&
(checkConstraints(srcTableType.getKeyType(), targetType.getKeyType(), unresolvedTypes))) {
return true;
}
if (srcTableType.getFieldNames() == null) {
return false;
}
List<Type> fieldTypes = new ArrayList<>();
Arrays.stream(srcTableType.getFieldNames()).forEach(field -> fieldTypes
.add(Objects.requireNonNull(getTableConstraintField(srcTableType.getConstrainedType(), field))
.getFieldType()));
if (fieldTypes.size() == 1) {
return checkConstraints(fieldTypes.get(0), targetType.getKeyType(), unresolvedTypes);
}
BTupleType tupleType = new BTupleType(fieldTypes);
return checkConstraints(tupleType, targetType.getKeyType(), unresolvedTypes);
}
return Arrays.equals(srcTableType.getFieldNames(), targetType.getFieldNames());
}
static BField getTableConstraintField(Type constraintType, String fieldName) {
switch (constraintType.getTag()) {
case TypeTags.RECORD_TYPE_TAG:
Map<String, Field> fieldList = ((BRecordType) constraintType).getFields();
return (BField) fieldList.get(fieldName);
case TypeTags.INTERSECTION_TAG:
Type effectiveType = ((BIntersectionType) constraintType).getEffectiveType();
return getTableConstraintField(effectiveType, fieldName);
case TypeTags.UNION_TAG:
BUnionType unionType = (BUnionType) constraintType;
List<Type> memTypes = unionType.getMemberTypes();
List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))
.filter(Objects::nonNull).collect(Collectors.toList());
if (fields.size() != memTypes.size()) {
return null;
}
if (fields.stream().allMatch(field -> isSameType(field.getFieldType(), fields.get(0).getFieldType()))) {
return fields.get(0);
}
}
return null;
}
private static boolean checkIsJSONType(Type sourceType, List<TypePair> unresolvedTypes) {
BJsonType jsonType = (BJsonType) TYPE_JSON;
TypePair pair = new TypePair(sourceType, jsonType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
switch (sourceType.getTag()) {
case TypeTags.STRING_TAG:
case TypeTags.CHAR_STRING_TAG:
case TypeTags.INT_TAG:
case TypeTags.SIGNED32_INT_TAG:
case TypeTags.SIGNED16_INT_TAG:
case TypeTags.SIGNED8_INT_TAG:
case TypeTags.UNSIGNED32_INT_TAG:
case TypeTags.UNSIGNED16_INT_TAG:
case TypeTags.UNSIGNED8_INT_TAG:
case TypeTags.BYTE_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.NULL_TAG:
case TypeTags.JSON_TAG:
return true;
case TypeTags.ARRAY_TAG:
return checkIsType(((BArrayType) sourceType).getElementType(), jsonType, unresolvedTypes);
case TypeTags.FINITE_TYPE_TAG:
return isFiniteTypeMatch((BFiniteType) sourceType, jsonType);
case TypeTags.MAP_TAG:
return checkIsType(((BMapType) sourceType).getConstrainedType(), jsonType, unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
BRecordType recordType = (BRecordType) sourceType;
for (Field field : recordType.getFields().values()) {
if (!checkIsJSONType(field.getFieldType(), unresolvedTypes)) {
return false;
}
}
if (!recordType.sealed) {
return checkIsJSONType(recordType.restFieldType, unresolvedTypes);
}
return true;
case TypeTags.TUPLE_TAG:
BTupleType sourceTupleType = (BTupleType) sourceType;
for (Type memberType : sourceTupleType.getTupleTypes()) {
if (!checkIsJSONType(memberType, unresolvedTypes)) {
return false;
}
}
Type tupleRestType = sourceTupleType.getRestType();
if (tupleRestType != null) {
return checkIsJSONType(tupleRestType, unresolvedTypes);
}
return true;
case TypeTags.UNION_TAG:
for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {
if (!checkIsJSONType(memberType, unresolvedTypes)) {
return false;
}
}
return true;
default:
return false;
}
}
private static boolean checkIsRecordType(Type sourceType, BRecordType targetType, List<TypePair> unresolvedTypes) {
switch (sourceType.getTag()) {
case TypeTags.RECORD_TYPE_TAG:
return checkIsRecordType((BRecordType) sourceType, targetType, unresolvedTypes);
case TypeTags.MAP_TAG:
return checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes);
}
return false;
}
private static boolean checkIsRecordType(BRecordType sourceRecordType, BRecordType targetType,
List<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(sourceRecordType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (targetType.sealed && !sourceRecordType.sealed) {
return false;
}
if (!sourceRecordType.sealed &&
!checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) {
return false;
}
Map<String, Field> sourceFields = sourceRecordType.getFields();
Set<String> targetFieldNames = targetType.getFields().keySet();
for (Map.Entry<String, Field> targetFieldEntry : targetType.getFields().entrySet()) {
Field targetField = targetFieldEntry.getValue();
Field sourceField = sourceFields.get(targetFieldEntry.getKey());
if (sourceField == null) {
return false;
}
if (hasIncompatibleReadOnlyFlags(targetField, sourceField)) {
return false;
}
if (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)
&& SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL)) {
return false;
}
if (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) {
return false;
}
}
if (targetType.sealed) {
return targetFieldNames.containsAll(sourceFields.keySet());
}
for (Map.Entry<String, Field> sourceFieldEntry : sourceFields.entrySet()) {
if (targetFieldNames.contains(sourceFieldEntry.getKey())) {
continue;
}
if (!checkIsType(sourceFieldEntry.getValue().getFieldType(), targetType.restFieldType, unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkIsRecordType(BMapType sourceType, BRecordType targetType,
List<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(sourceType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (targetType.sealed) {
return false;
}
Type constraintType = sourceType.getConstrainedType();
for (Field field : targetType.getFields().values()) {
var flags = field.getFlags();
if (!SymbolFlags.isFlagOn(flags, SymbolFlags.OPTIONAL)) {
return false;
}
if (SymbolFlags.isFlagOn(flags, SymbolFlags.READONLY) && !sourceType.isReadOnly()) {
return false;
}
if (!checkIsType(constraintType, field.getFieldType(), unresolvedTypes)) {
return false;
}
}
return checkIsType(constraintType, targetType.restFieldType, unresolvedTypes);
}
private static boolean checkRecordBelongsToAnydataType(MapValue sourceVal, BRecordType recordType,
List<TypePair> unresolvedTypes) {
Type targetType = TYPE_ANYDATA;
TypePair pair = new TypePair(recordType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
Map<String, Field> fields = recordType.getFields();
for (Map.Entry<String, Field> fieldEntry : fields.entrySet()) {
String fieldName = fieldEntry.getKey();
Field field = fieldEntry.getValue();
if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {
BString fieldNameBString = StringUtils.fromString(fieldName);
if (SymbolFlags
.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(fieldNameBString)) {
continue;
}
if (!checkIsLikeType(sourceVal.get(fieldNameBString), targetType)) {
return false;
}
} else {
if (!checkIsType(field.getFieldType(), targetType, unresolvedTypes)) {
return false;
}
}
}
if (recordType.sealed) {
return true;
}
return checkIsType(recordType.restFieldType, targetType, unresolvedTypes);
}
private static boolean checkIsRecordType(Object sourceVal, Type sourceType, BRecordType targetType,
List<TypePair> unresolvedTypes) {
switch (sourceType.getTag()) {
case TypeTags.RECORD_TYPE_TAG:
return checkIsRecordType((MapValue) sourceVal, (BRecordType) sourceType, targetType, unresolvedTypes);
case TypeTags.MAP_TAG:
return checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes);
}
return false;
}
private static boolean checkIsRecordType(MapValue sourceRecordValue, BRecordType sourceRecordType,
BRecordType targetType, List<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(sourceRecordType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (targetType.sealed && !sourceRecordType.sealed) {
return false;
}
if (!sourceRecordType.sealed &&
!checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) {
return false;
}
Map<String, Field> sourceFields = sourceRecordType.getFields();
Set<String> targetFieldNames = targetType.getFields().keySet();
for (Map.Entry<String, Field> targetFieldEntry : targetType.getFields().entrySet()) {
String fieldName = targetFieldEntry.getKey();
Field targetField = targetFieldEntry.getValue();
Field sourceField = sourceFields.get(fieldName);
if (sourceField == null) {
if (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)) {
return false;
}
continue;
}
if (hasIncompatibleReadOnlyFlags(targetField, sourceField)) {
return false;
}
boolean optionalTargetField = SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL);
boolean optionalSourceField = SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL);
if (SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.READONLY)) {
BString fieldNameBString = StringUtils.fromString(fieldName);
if (optionalSourceField && !sourceRecordValue.containsKey(fieldNameBString)) {
if (!optionalTargetField) {
return false;
}
continue;
}
if (!checkIsLikeType(sourceRecordValue.get(fieldNameBString), targetField.getFieldType())) {
return false;
}
} else {
if (!optionalTargetField && optionalSourceField) {
return false;
}
if (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) {
return false;
}
}
}
if (targetType.sealed) {
for (String sourceFieldName : sourceFields.keySet()) {
if (targetFieldNames.contains(sourceFieldName)) {
continue;
}
if (!checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(
sourceFields.get(sourceFieldName).getFieldType())) {
return false;
}
}
return true;
}
for (Map.Entry<String, Field> targetFieldEntry : sourceFields.entrySet()) {
String fieldName = targetFieldEntry.getKey();
Field field = targetFieldEntry.getValue();
if (targetFieldNames.contains(fieldName)) {
continue;
}
if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {
if (!checkIsLikeType(sourceRecordValue.get(StringUtils.fromString(fieldName)),
targetType.restFieldType)) {
return false;
}
} else if (!checkIsType(field.getFieldType(), targetType.restFieldType, unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean hasIncompatibleReadOnlyFlags(Field targetField, Field sourceField) {
return SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.READONLY) && !SymbolFlags
.isFlagOn(sourceField.getFlags(),
SymbolFlags.READONLY);
}
private static boolean checkIsArrayType(BArrayType sourceType, BArrayType targetType,
List<TypePair> unresolvedTypes) {
switch (sourceType.getState()) {
case OPEN:
if (targetType.getState() != ArrayState.OPEN) {
return false;
}
break;
case CLOSED:
if (targetType.getState() == ArrayState.CLOSED &&
sourceType.getSize() != targetType.getSize()) {
return false;
}
break;
}
return checkIsType(sourceType.getElementType(), targetType.getElementType(), unresolvedTypes);
}
private static boolean checkIsArrayType(BTupleType sourceType, BArrayType targetType,
List<TypePair> unresolvedTypes) {
List<Type> tupleTypes = sourceType.getTupleTypes();
Type sourceRestType = sourceType.getRestType();
Type targetElementType = targetType.getElementType();
if (targetType.getState() == ArrayState.OPEN) {
for (Type sourceElementType : tupleTypes) {
if (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) {
return false;
}
}
if (sourceRestType != null) {
return checkIsType(sourceRestType, targetElementType, unresolvedTypes);
}
return true;
}
if (sourceRestType != null) {
return false;
}
if (tupleTypes.size() != targetType.getSize()) {
return false;
}
for (Type sourceElementType : tupleTypes) {
if (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkIsArrayType(Type sourceType, BArrayType targetType, List<TypePair> unresolvedTypes) {
int sourceTypeTag = sourceType.getTag();
if (sourceTypeTag == TypeTags.UNION_TAG) {
for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {
if (!checkIsArrayType(memberType, targetType, unresolvedTypes)) {
return false;
}
}
return true;
}
if (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) {
return false;
}
if (sourceTypeTag == TypeTags.ARRAY_TAG) {
return checkIsArrayType((BArrayType) sourceType, targetType, unresolvedTypes);
}
return checkIsArrayType((BTupleType) sourceType, targetType, unresolvedTypes);
}
private static boolean checkIsTupleType(BArrayType sourceType, BTupleType targetType,
List<TypePair> unresolvedTypes) {
Type sourceElementType = sourceType.getElementType();
List<Type> targetTypes = targetType.getTupleTypes();
Type targetRestType = targetType.getRestType();
switch (sourceType.getState()) {
case OPEN:
if (targetRestType == null) {
return false;
}
if (targetTypes.isEmpty()) {
return checkIsType(sourceElementType, targetRestType, unresolvedTypes);
}
return false;
case CLOSED:
if (sourceType.getSize() < targetTypes.size()) {
return false;
}
if (targetTypes.isEmpty()) {
if (targetRestType != null) {
return checkIsType(sourceElementType, targetRestType, unresolvedTypes);
}
return sourceType.getSize() == 0;
}
for (Type targetElementType : targetTypes) {
if (!(checkIsType(sourceElementType, targetElementType, unresolvedTypes))) {
return false;
}
}
if (sourceType.getSize() == targetTypes.size()) {
return true;
}
if (targetRestType != null) {
return checkIsType(sourceElementType, targetRestType, unresolvedTypes);
}
return false;
default:
return false;
}
}
private static boolean checkIsTupleType(BTupleType sourceType, BTupleType targetType,
List<TypePair> unresolvedTypes) {
List<Type> sourceTypes = sourceType.getTupleTypes();
Type sourceRestType = sourceType.getRestType();
List<Type> targetTypes = targetType.getTupleTypes();
Type targetRestType = targetType.getRestType();
if (sourceRestType != null && targetRestType == null) {
return false;
}
int sourceTypeSize = sourceTypes.size();
int targetTypeSize = targetTypes.size();
if (sourceRestType == null && targetRestType == null && sourceTypeSize != targetTypeSize) {
return false;
}
if (sourceTypeSize < targetTypeSize) {
return false;
}
for (int i = 0; i < targetTypeSize; i++) {
if (!checkIsType(sourceTypes.get(i), targetTypes.get(i), unresolvedTypes)) {
return false;
}
}
if (sourceTypeSize == targetTypeSize) {
if (sourceRestType != null) {
return checkIsType(sourceRestType, targetRestType, unresolvedTypes);
}
return true;
}
for (int i = targetTypeSize; i < sourceTypeSize; i++) {
if (!checkIsType(sourceTypes.get(i), targetRestType, unresolvedTypes)) {
return false;
}
}
if (sourceRestType != null) {
return checkIsType(sourceRestType, targetRestType, unresolvedTypes);
}
return true;
}
private static boolean checkIsTupleType(Type sourceType, BTupleType targetType, List<TypePair> unresolvedTypes) {
int sourceTypeTag = sourceType.getTag();
if (sourceTypeTag == TypeTags.UNION_TAG) {
for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {
if (!checkIsTupleType(memberType, targetType, unresolvedTypes)) {
return false;
}
}
return true;
}
if (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) {
return false;
}
if (sourceTypeTag == TypeTags.ARRAY_TAG) {
return checkIsTupleType((BArrayType) sourceType, targetType, unresolvedTypes);
}
return checkIsTupleType((BTupleType) sourceType, targetType, unresolvedTypes);
}
private static boolean checkIsAnyType(Type sourceType) {
switch (sourceType.getTag()) {
case TypeTags.ERROR_TAG:
case TypeTags.READONLY_TAG:
return false;
case TypeTags.UNION_TAG:
case TypeTags.ANYDATA_TAG:
case TypeTags.JSON_TAG:
for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {
if (!checkIsAnyType(memberType)) {
return false;
}
}
return true;
}
return true;
}
private static boolean checkIsFiniteType(Type sourceType, BFiniteType targetType) {
if (sourceType.getTag() != TypeTags.FINITE_TYPE_TAG) {
return false;
}
BFiniteType sourceFiniteType = (BFiniteType) sourceType;
if (sourceFiniteType.valueSpace.size() != targetType.valueSpace.size()) {
return false;
}
return targetType.valueSpace.containsAll(sourceFiniteType.valueSpace);
}
private static boolean checkIsFutureType(Type sourceType, BFutureType targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.FUTURE_TAG) {
return false;
}
return checkConstraints(((BFutureType) sourceType).getConstrainedType(), targetType.getConstrainedType(),
unresolvedTypes);
}
private static boolean checkObjectEquivalency(Type sourceType, BObjectType targetType,
List<TypePair> unresolvedTypes) {
return checkObjectEquivalency(null, sourceType, targetType, unresolvedTypes);
}
private static boolean checkObjectEquivalency(Object sourceVal, Type sourceType, BObjectType targetType,
List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.OBJECT_TYPE_TAG && sourceType.getTag() != TypeTags.SERVICE_TAG) {
return false;
}
TypePair pair = new TypePair(sourceType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BObjectType sourceObjectType = (BObjectType) sourceType;
if (SymbolFlags.isFlagOn(targetType.flags, SymbolFlags.ISOLATED) &&
!SymbolFlags.isFlagOn(sourceObjectType.flags, SymbolFlags.ISOLATED)) {
return false;
}
Map<String, Field> targetFields = targetType.getFields();
Map<String, Field> sourceFields = sourceObjectType.getFields();
MethodType[] targetFuncs = targetType.getMethods();
MethodType[] sourceFuncs = sourceObjectType.getMethods();
if (targetType.getFields().values().stream().anyMatch(field -> SymbolFlags
.isFlagOn(field.getFlags(), SymbolFlags.PRIVATE))
|| Stream.of(targetFuncs).anyMatch(func -> SymbolFlags.isFlagOn(func.getFlags(),
SymbolFlags.PRIVATE))) {
return false;
}
if (targetFields.size() > sourceFields.size() || targetFuncs.length > sourceFuncs.length) {
return false;
}
String targetTypeModule = Optional.ofNullable(targetType.getPackage()).map(Module::toString).orElse("");
String sourceTypeModule = Optional.ofNullable(sourceObjectType.getPackage()).map(Module::toString).orElse("");
if (sourceVal == null) {
if (!checkObjectSubTypeForFields(targetFields, sourceFields, targetTypeModule, sourceTypeModule,
unresolvedTypes)) {
return false;
}
} else if (!checkObjectSubTypeForFieldsByValue(targetFields, sourceFields, targetTypeModule, sourceTypeModule,
(BObject) sourceVal, unresolvedTypes)) {
return false;
}
return checkObjectSubTypeForMethods(unresolvedTypes, targetFuncs, sourceFuncs, targetTypeModule,
sourceTypeModule, sourceObjectType, targetType);
}
private static boolean checkObjectSubTypeForFields(Map<String, Field> targetFields,
Map<String, Field> sourceFields, String targetTypeModule,
String sourceTypeModule, List<TypePair> unresolvedTypes) {
for (Field lhsField : targetFields.values()) {
Field rhsField = sourceFields.get(lhsField.getFieldName());
if (rhsField == null ||
!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(),
rhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField,
rhsField) ||
!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkObjectSubTypeForFieldsByValue(Map<String, Field> targetFields,
Map<String, Field> sourceFields, String targetTypeModule,
String sourceTypeModule, BObject sourceObjVal,
List<TypePair> unresolvedTypes) {
for (Field lhsField : targetFields.values()) {
String name = lhsField.getFieldName();
Field rhsField = sourceFields.get(name);
if (rhsField == null ||
!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(),
rhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField,
rhsField)) {
return false;
}
if (SymbolFlags.isFlagOn(rhsField.getFlags(), SymbolFlags.FINAL)) {
Object fieldValue = sourceObjVal.get(StringUtils.fromString(name));
Type fieldValueType = getType(fieldValue);
if (fieldValueType.isReadOnly()) {
if (!checkIsLikeType(fieldValue, lhsField.getFieldType())) {
return false;
}
continue;
}
if (!checkIsType(fieldValueType, lhsField.getFieldType(), unresolvedTypes)) {
return false;
}
} else if (!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkObjectSubTypeForMethods(List<TypePair> unresolvedTypes,
MethodType[] targetFuncs,
MethodType[] sourceFuncs,
String targetTypeModule, String sourceTypeModule,
BObjectType sourceType, BObjectType targetType) {
for (MethodType lhsFunc : targetFuncs) {
if (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.RESOURCE)) {
continue;
}
MethodType rhsFunc = getMatchingInvokableType(sourceFuncs, lhsFunc, unresolvedTypes);
if (rhsFunc == null ||
!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsFunc.getFlags(),
rhsFunc.getFlags())) {
return false;
}
if (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.REMOTE) != SymbolFlags
.isFlagOn(rhsFunc.getFlags(), SymbolFlags.REMOTE)) {
return false;
}
}
BTypeIdSet targetTypeIdSet = targetType.typeIdSet;
if (targetTypeIdSet == null) {
return true;
}
BTypeIdSet sourceTypeIdSet = sourceType.typeIdSet;
if (sourceTypeIdSet == null) {
return false;
}
return sourceTypeIdSet.containsAll(targetTypeIdSet);
}
private static boolean isInSameVisibilityRegion(String lhsTypePkg, String rhsTypePkg, long lhsFlags,
long rhsFlags) {
if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PRIVATE)) {
return lhsTypePkg.equals(rhsTypePkg);
} else if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PUBLIC)) {
return SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PUBLIC);
}
return !SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PRIVATE) && !SymbolFlags
.isFlagOn(rhsFlags, SymbolFlags.PUBLIC) &&
lhsTypePkg.equals(rhsTypePkg);
}
private static MethodType getMatchingInvokableType(MethodType[] rhsFuncs,
MethodType lhsFunc,
List<TypePair> unresolvedTypes) {
return Arrays.stream(rhsFuncs)
.filter(rhsFunc -> lhsFunc.getName().equals(rhsFunc.getName()))
.filter(rhsFunc -> checkFunctionTypeEqualityForObjectType(rhsFunc.getType(), lhsFunc.getType(),
unresolvedTypes))
.findFirst()
.orElse(null);
}
private static boolean checkFunctionTypeEqualityForObjectType(FunctionType source, FunctionType target,
List<TypePair> unresolvedTypes) {
if (hasIncompatibleIsolatedFlags(target, source)) {
return false;
}
if (source.getParameterTypes().length != target.getParameterTypes().length) {
return false;
}
for (int i = 0; i < source.getParameterTypes().length; i++) {
if (!checkIsType(target.getParameterTypes()[i], source.getParameterTypes()[i], unresolvedTypes)) {
return false;
}
}
if (source.getReturnType() == null && target.getReturnType() == null) {
return true;
} else if (source.getReturnType() == null || target.getReturnType() == null) {
return false;
}
return checkIsType(source.getReturnType(), target.getReturnType(), unresolvedTypes);
}
private static boolean checkIsFunctionType(Type sourceType, BFunctionType targetType) {
if (sourceType.getTag() != TypeTags.FUNCTION_POINTER_TAG) {
return false;
}
BFunctionType source = (BFunctionType) sourceType;
if (hasIncompatibleIsolatedFlags(targetType, source) || hasIncompatibleTransactionalFlags(targetType, source)) {
return false;
}
if (SymbolFlags.isFlagOn(targetType.getFlags(), SymbolFlags.ANY_FUNCTION)) {
return true;
}
if (source.paramTypes.length != targetType.paramTypes.length) {
return false;
}
for (int i = 0; i < source.paramTypes.length; i++) {
if (!checkIsType(targetType.paramTypes[i], source.paramTypes[i], new ArrayList<>())) {
return false;
}
}
return checkIsType(source.retType, targetType.retType, new ArrayList<>());
}
private static boolean hasIncompatibleIsolatedFlags(FunctionType target, FunctionType source) {
return SymbolFlags.isFlagOn(target.getFlags(), SymbolFlags.ISOLATED) && !SymbolFlags
.isFlagOn(source.getFlags(), SymbolFlags.ISOLATED);
}
private static boolean hasIncompatibleTransactionalFlags(FunctionType target, FunctionType source) {
return SymbolFlags.isFlagOn(source.getFlags(), SymbolFlags.TRANSACTIONAL) && !SymbolFlags
.isFlagOn(target.getFlags(), SymbolFlags.TRANSACTIONAL);
}
private static boolean checkIsServiceType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() == TypeTags.SERVICE_TAG) {
return checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes);
}
if (sourceType.getTag() == TypeTags.OBJECT_TYPE_TAG) {
var flags = ((BObjectType) sourceType).flags;
return (flags & SymbolFlags.SERVICE) == SymbolFlags.SERVICE;
}
return false;
}
public static boolean isInherentlyImmutableType(Type sourceType) {
if (isSimpleBasicType(sourceType)) {
return true;
}
switch (sourceType.getTag()) {
case TypeTags.XML_TEXT_TAG:
case TypeTags.FINITE_TYPE_TAG:
case TypeTags.READONLY_TAG:
case TypeTags.NULL_TAG:
case TypeTags.ERROR_TAG:
case TypeTags.INVOKABLE_TAG:
case TypeTags.SERVICE_TAG:
case TypeTags.TYPEDESC_TAG:
case TypeTags.FUNCTION_POINTER_TAG:
case TypeTags.HANDLE_TAG:
return true;
case TypeTags.XML_TAG:
return ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG;
}
return false;
}
public static boolean isSelectivelyImmutableType(Type type, Set<Type> unresolvedTypes) {
if (!unresolvedTypes.add(type)) {
return true;
}
switch (type.getTag()) {
case TypeTags.ANY_TAG:
case TypeTags.ANYDATA_TAG:
case TypeTags.JSON_TAG:
case TypeTags.XML_TAG:
case TypeTags.XML_COMMENT_TAG:
case TypeTags.XML_ELEMENT_TAG:
case TypeTags.XML_PI_TAG:
return true;
case TypeTags.ARRAY_TAG:
Type elementType = ((BArrayType) type).getElementType();
return isInherentlyImmutableType(elementType) ||
isSelectivelyImmutableType(elementType, unresolvedTypes);
case TypeTags.TUPLE_TAG:
BTupleType tupleType = (BTupleType) type;
for (Type tupMemType : tupleType.getTupleTypes()) {
if (!isInherentlyImmutableType(tupMemType) &&
!isSelectivelyImmutableType(tupMemType, unresolvedTypes)) {
return false;
}
}
Type tupRestType = tupleType.getRestType();
if (tupRestType == null) {
return true;
}
return isInherentlyImmutableType(tupRestType) ||
isSelectivelyImmutableType(tupRestType, unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
BRecordType recordType = (BRecordType) type;
for (Field field : recordType.getFields().values()) {
Type fieldType = field.getFieldType();
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes)) {
return false;
}
}
Type recordRestType = recordType.restFieldType;
if (recordRestType == null) {
return true;
}
return isInherentlyImmutableType(recordRestType) ||
isSelectivelyImmutableType(recordRestType, unresolvedTypes);
case TypeTags.OBJECT_TYPE_TAG:
BObjectType objectType = (BObjectType) type;
if (SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.CLASS) &&
!SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.READONLY)) {
return false;
}
for (Field field : objectType.getFields().values()) {
Type fieldType = field.getFieldType();
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes)) {
return false;
}
}
return true;
case TypeTags.MAP_TAG:
Type constraintType = ((BMapType) type).getConstrainedType();
return isInherentlyImmutableType(constraintType) ||
isSelectivelyImmutableType(constraintType, unresolvedTypes);
case TypeTags.TABLE_TAG:
Type tableConstraintType = ((BTableType) type).getConstrainedType();
return isInherentlyImmutableType(tableConstraintType) ||
isSelectivelyImmutableType(tableConstraintType, unresolvedTypes);
case TypeTags.UNION_TAG:
boolean readonlyIntersectionExists = false;
for (Type memberType : ((BUnionType) type).getMemberTypes()) {
if (isInherentlyImmutableType(memberType) ||
isSelectivelyImmutableType(memberType, unresolvedTypes)) {
readonlyIntersectionExists = true;
break;
}
}
return readonlyIntersectionExists;
case TypeTags.INTERSECTION_TAG:
return isSelectivelyImmutableType(((BIntersectionType) type).getEffectiveType(), unresolvedTypes);
}
return false;
}
private static boolean checkConstraints(Type sourceConstraint, Type targetConstraint,
List<TypePair> unresolvedTypes) {
if (sourceConstraint == null) {
sourceConstraint = TYPE_ANY;
}
if (targetConstraint == null) {
targetConstraint = TYPE_ANY;
}
return checkIsType(sourceConstraint, targetConstraint, unresolvedTypes);
}
private static boolean isMutable(Object value, Type sourceType) {
if (value == null || sourceType.getTag() < TypeTags.NULL_TAG ||
sourceType.getTag() == TypeTags.FINITE_TYPE_TAG) {
return false;
}
return !((RefValue) value).isFrozen();
}
private static boolean checkArrayEquivalent(Type actualType, Type expType) {
if (expType.getTag() == TypeTags.ARRAY_TAG && actualType.getTag() == TypeTags.ARRAY_TAG) {
BArrayType lhrArrayType = (BArrayType) expType;
BArrayType rhsArrayType = (BArrayType) actualType;
return checkIsArrayType(rhsArrayType, lhrArrayType, new ArrayList<>());
}
return expType == actualType;
}
private static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type) {
Set<String> visitedTypeSet = new HashSet<>();
return checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(type, visitedTypeSet);
}
private static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type,
Set<String> visitedTypeSet) {
switch (type.getTag()) {
case TypeTags.NEVER_TAG:
return true;
case TypeTags.RECORD_TYPE_TAG:
BRecordType recordType = (BRecordType) type;
visitedTypeSet.add(recordType.getName());
for (Field field : recordType.getFields().values()) {
if ((SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED) ||
!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) &&
!visitedTypeSet.contains(field.getFieldType()) &&
checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(field.getFieldType(),
visitedTypeSet)) {
return true;
}
}
return false;
case TypeTags.TUPLE_TAG:
BTupleType tupleType = (BTupleType) type;
visitedTypeSet.add(tupleType.getName());
List<Type> tupleTypes = tupleType.getTupleTypes();
for (Type mem : tupleTypes) {
if (!visitedTypeSet.add(mem.getName())) {
continue;
}
if (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(mem, visitedTypeSet)) {
return true;
}
}
return false;
case TypeTags.ARRAY_TAG:
BArrayType arrayType = (BArrayType) type;
visitedTypeSet.add(arrayType.getName());
Type elemType = arrayType.getElementType();
visitedTypeSet.add(elemType.getName());
return arrayType.getState() != ArrayState.OPEN &&
checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(elemType, visitedTypeSet);
default:
return false;
}
}
/**
* Check whether a given value confirms to a given type. First it checks if the type of the value, and
* if fails then falls back to checking the value.
*
* @param sourceValue Value to check
* @param targetType Target type
* @param unresolvedValues Values that are unresolved so far
* @param allowNumericConversion Flag indicating whether to perform numeric conversions
* @return True if the value confirms to the provided type. False, otherwise.
*/
private static boolean checkIsLikeType(Object sourceValue, Type targetType, List<TypeValuePair> unresolvedValues,
boolean allowNumericConversion) {
Type sourceType = getType(sourceValue);
if (checkIsType(sourceType, targetType, new ArrayList<>())) {
return true;
}
return checkIsLikeOnValue(sourceValue, sourceType, targetType, unresolvedValues, allowNumericConversion);
}
/**
* Check whether a given value confirms to a given type. Strictly checks the value only, and does not consider the
* type of the value for consideration.
*
* @param sourceValue Value to check
* @param sourceType Type of the value
* @param targetType Target type
* @param unresolvedValues Values that are unresolved so far
* @param allowNumericConversion Flag indicating whether to perform numeric conversions
* @return True if the value confirms to the provided type. False, otherwise.
*/
private static boolean checkIsLikeOnValue(Object sourceValue, Type sourceType, Type targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
int sourceTypeTag = sourceType.getTag();
int targetTypeTag = targetType.getTag();
if (sourceTypeTag == TypeTags.INTERSECTION_TAG) {
return checkIsLikeOnValue(sourceValue, ((BIntersectionType) sourceType).getEffectiveType(),
targetTypeTag != TypeTags.INTERSECTION_TAG ? targetType :
((BIntersectionType) targetType).getEffectiveType(),
unresolvedValues, allowNumericConversion);
}
if (targetTypeTag == TypeTags.INTERSECTION_TAG) {
return checkIsLikeOnValue(sourceValue, sourceType, ((BIntersectionType) targetType).getEffectiveType(),
unresolvedValues, allowNumericConversion);
}
if (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) {
if (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) {
return checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(),
targetType, unresolvedValues, allowNumericConversion);
}
return checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(),
((BParameterizedType) targetType).getParamValueType(), unresolvedValues,
allowNumericConversion);
}
switch (targetTypeTag) {
case TypeTags.READONLY_TAG:
return true;
case TypeTags.BYTE_TAG:
if (TypeTags.isIntegerTypeTag(sourceTypeTag)) {
return isByteLiteral((Long) sourceValue);
}
return allowNumericConversion && TypeConverter.isConvertibleToByte(sourceValue);
case TypeTags.INT_TAG:
return allowNumericConversion && TypeConverter.isConvertibleToInt(sourceValue);
case TypeTags.SIGNED32_INT_TAG:
case TypeTags.SIGNED16_INT_TAG:
case TypeTags.SIGNED8_INT_TAG:
case TypeTags.UNSIGNED32_INT_TAG:
case TypeTags.UNSIGNED16_INT_TAG:
case TypeTags.UNSIGNED8_INT_TAG:
if (TypeTags.isIntegerTypeTag(sourceTypeTag) || targetTypeTag == TypeTags.BYTE_TAG) {
return TypeConverter.isConvertibleToIntSubType(sourceValue, targetType);
}
return allowNumericConversion && TypeConverter.isConvertibleToIntSubType(sourceValue, targetType);
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
return allowNumericConversion && TypeConverter.isConvertibleToFloatingPointTypes(sourceValue);
case TypeTags.CHAR_STRING_TAG:
return TypeConverter.isConvertibleToChar(sourceValue);
case TypeTags.RECORD_TYPE_TAG:
return checkIsLikeRecordType(sourceValue, (BRecordType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.TABLE_TAG:
return checkIsLikeTableType(sourceValue, (BTableType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.JSON_TAG:
return checkIsLikeJSONType(sourceValue, sourceType, (BJsonType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.MAP_TAG:
return checkIsLikeMapType(sourceValue, (BMapType) targetType, unresolvedValues, allowNumericConversion);
case TypeTags.STREAM_TAG:
return checkIsLikeStreamType(sourceValue, (BStreamType) targetType);
case TypeTags.ARRAY_TAG:
return checkIsLikeArrayType(sourceValue, (BArrayType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.TUPLE_TAG:
return checkIsLikeTupleType(sourceValue, (BTupleType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.ERROR_TAG:
return checkIsLikeErrorType(sourceValue, (BErrorType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.ANYDATA_TAG:
return checkIsLikeAnydataType(sourceValue, sourceType, unresolvedValues, allowNumericConversion);
case TypeTags.FINITE_TYPE_TAG:
return checkFiniteTypeAssignable(sourceValue, sourceType, (BFiniteType) targetType);
case TypeTags.XML_ELEMENT_TAG:
if (sourceTypeTag == TypeTags.XML_TAG) {
XmlValue xmlSource = (XmlValue) sourceValue;
return xmlSource.isSingleton();
}
return false;
case TypeTags.XML_COMMENT_TAG:
case TypeTags.XML_PI_TAG:
case TypeTags.XML_TEXT_TAG:
if (sourceTypeTag == TypeTags.XML_TAG) {
return checkIsLikeNonElementSingleton((XmlValue) sourceValue, targetType);
}
return false;
case TypeTags.XML_TAG:
if (sourceTypeTag == TypeTags.XML_TAG) {
return checkIsLikeXMLSequenceType((XmlValue) sourceValue, targetType);
}
return false;
case TypeTags.UNION_TAG:
if (allowNumericConversion) {
List<Type> compatibleTypesWithNumConversion = new ArrayList<>();
List<Type> compatibleTypesWithoutNumConversion = new ArrayList<>();
for (Type type : ((BUnionType) targetType).getMemberTypes()) {
List<TypeValuePair> tempList = new ArrayList<>(unresolvedValues.size());
tempList.addAll(unresolvedValues);
if (checkIsLikeType(sourceValue, type, tempList, false)) {
compatibleTypesWithoutNumConversion.add(type);
}
if (checkIsLikeType(sourceValue, type, unresolvedValues, true)) {
compatibleTypesWithNumConversion.add(type);
}
}
return compatibleTypesWithNumConversion.size() != 0 &&
compatibleTypesWithNumConversion.size() - compatibleTypesWithoutNumConversion.size() <= 1;
} else {
for (Type type : ((BUnionType) targetType).getMemberTypes()) {
if (checkIsLikeType(sourceValue, type, unresolvedValues, false)) {
return true;
}
}
}
return false;
default:
return false;
}
}
private static XmlNodeType getXmlNodeType(Type type) {
XmlNodeType nodeType = null;
switch (type.getTag()) {
case TypeTags.XML_ELEMENT_TAG:
nodeType = XmlNodeType.ELEMENT;
break;
case TypeTags.XML_COMMENT_TAG:
nodeType = XmlNodeType.COMMENT;
break;
case TypeTags.XML_PI_TAG:
nodeType = XmlNodeType.PI;
break;
case TypeTags.XML_TEXT_TAG:
nodeType = XmlNodeType.TEXT;
break;
default:
return null;
}
return nodeType;
}
private static boolean checkIsLikeNonElementSingleton(XmlValue xmlSource, Type targetType) {
XmlNodeType nodeType = getXmlNodeType(targetType);
if (nodeType == null) {
return false;
}
if (xmlSource.getNodeType() == nodeType) {
return true;
}
if (xmlSource.getNodeType() == XmlNodeType.SEQUENCE) {
XmlSequence seq = (XmlSequence) xmlSource;
return seq.size() == 1 && seq.getChildrenList().get(0).getNodeType() == nodeType ||
(nodeType == XmlNodeType.TEXT && seq.isEmpty());
}
return false;
}
private static boolean checkIsLikeXMLSequenceType(XmlValue xmlSource, Type targetType) {
if (xmlSource.getNodeType() != XmlNodeType.SEQUENCE) {
return false;
}
Set<XmlNodeType> acceptedNodes = new HashSet<>();
BXmlType target = (BXmlType) targetType;
if (target.constraint.getTag() == TypeTags.UNION_TAG) {
getXMLNodeOnUnion((BUnionType) target.constraint, acceptedNodes);
} else {
acceptedNodes.add(getXmlNodeType(((BXmlType) targetType).constraint));
}
XmlSequence seq = (XmlSequence) xmlSource;
for (BXml m : seq.getChildrenList()) {
if (!acceptedNodes.contains(m.getNodeType())) {
return false;
}
}
return true;
}
private static void getXMLNodeOnUnion(BUnionType unionType, Set<XmlNodeType> nodeTypes) {
if (nodeTypes.size() == 4) {
return;
}
for (Type memberType : unionType.getMemberTypes()) {
if (memberType.getTag() == TypeTags.UNION_TAG) {
getXMLNodeOnUnion((BUnionType) memberType, nodeTypes);
} else {
nodeTypes.add(getXmlNodeType(memberType));
}
}
}
public static boolean isNumericType(Type type) {
return type.getTag() < TypeTags.STRING_TAG || TypeTags.isIntegerTypeTag(type.getTag());
}
private static boolean checkIsLikeAnydataType(Object sourceValue, Type sourceType,
List<TypeValuePair> unresolvedValues,
boolean allowNumericConversion) {
switch (sourceType.getTag()) {
case TypeTags.RECORD_TYPE_TAG:
case TypeTags.JSON_TAG:
case TypeTags.MAP_TAG:
return isLikeType(((MapValueImpl) sourceValue).values().toArray(), TYPE_ANYDATA,
unresolvedValues, allowNumericConversion);
case TypeTags.ARRAY_TAG:
ArrayValue arr = (ArrayValue) sourceValue;
BArrayType arrayType = (BArrayType) arr.getType();
switch (arrayType.getElementType().getTag()) {
case TypeTags.INT_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
case TypeTags.STRING_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.BYTE_TAG:
return true;
default:
return isLikeType(arr.getValues(), TYPE_ANYDATA, unresolvedValues,
allowNumericConversion);
}
case TypeTags.TUPLE_TAG:
return isLikeType(((ArrayValue) sourceValue).getValues(), TYPE_ANYDATA, unresolvedValues,
allowNumericConversion);
case TypeTags.ANYDATA_TAG:
return true;
case TypeTags.FINITE_TYPE_TAG:
case TypeTags.UNION_TAG:
return checkIsLikeType(sourceValue, TYPE_ANYDATA, unresolvedValues, allowNumericConversion);
default:
return false;
}
}
private static boolean isLikeType(Object[] objects, Type targetType, List<TypeValuePair> unresolvedValues,
boolean allowNumericConversion) {
for (Object value : objects) {
if (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
private static boolean checkIsLikeTupleType(Object sourceValue, BTupleType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof ArrayValue)) {
return false;
}
ArrayValue source = (ArrayValue) sourceValue;
List<Type> targetTypes = targetType.getTupleTypes();
int sourceTypeSize = source.size();
int targetTypeSize = targetTypes.size();
Type targetRestType = targetType.getRestType();
if (sourceTypeSize < targetTypeSize) {
return false;
}
if (targetRestType == null && sourceTypeSize > targetTypeSize) {
return false;
}
for (int i = 0; i < targetTypeSize; i++) {
if (!checkIsLikeType(source.getRefValue(i), targetTypes.get(i), unresolvedValues, allowNumericConversion)) {
return false;
}
}
for (int i = targetTypeSize; i < sourceTypeSize; i++) {
if (!checkIsLikeType(source.getRefValue(i), targetRestType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
static boolean isByteLiteral(long longValue) {
return (longValue >= BBYTE_MIN_VALUE && longValue <= BBYTE_MAX_VALUE);
}
static boolean isSigned32LiteralValue(Long longObject) {
return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);
}
static boolean isSigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);
}
static boolean isSigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);
}
static boolean isUnsigned32LiteralValue(Long longObject) {
return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);
}
static boolean isUnsigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);
}
static boolean isUnsigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);
}
static boolean isCharLiteralValue(Object object) {
String value;
if (object instanceof BString) {
value = ((BString) object).getValue();
} else if (object instanceof String) {
value = (String) object;
} else {
return false;
}
return value.codePoints().count() == 1;
}
private static boolean checkIsLikeArrayType(Object sourceValue, BArrayType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof ArrayValue)) {
return false;
}
ArrayValue source = (ArrayValue) sourceValue;
Type targetTypeElementType = targetType.getElementType();
if (source.getType().getTag() == TypeTags.ARRAY_TAG) {
Type sourceElementType = ((BArrayType) source.getType()).getElementType();
if (isValueType(sourceElementType)) {
if (checkIsType(sourceElementType, targetTypeElementType, new ArrayList<>())) {
return true;
}
if (allowNumericConversion && isNumericType(sourceElementType)) {
if (isNumericType(targetTypeElementType)) {
return true;
}
if (targetTypeElementType.getTag() != TypeTags.UNION_TAG) {
return false;
}
List<Type> targetNumericTypes = new ArrayList<>();
for (Type memType : ((BUnionType) targetTypeElementType).getMemberTypes()) {
if (isNumericType(memType) && !targetNumericTypes.contains(memType)) {
targetNumericTypes.add(memType);
}
}
return targetNumericTypes.size() == 1;
}
if (targetTypeElementType.getTag() == TypeTags.FLOAT_TAG ||
targetTypeElementType.getTag() == TypeTags.DECIMAL_TAG) {
return false;
}
}
}
for (int i = 0; i < source.size(); i++) {
if (!checkIsLikeType(source.get(i), targetTypeElementType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
private static boolean checkIsLikeMapType(Object sourceValue, BMapType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof MapValueImpl)) {
return false;
}
for (Object mapEntry : ((MapValueImpl) sourceValue).values()) {
if (!checkIsLikeType(mapEntry, targetType.getConstrainedType(), unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
private static boolean checkIsLikeStreamType(Object sourceValue, BStreamType targetType) {
if (!(sourceValue instanceof StreamValue)) {
return false;
}
BStreamType streamType = (BStreamType) ((StreamValue) sourceValue).getType();
return streamType.getConstrainedType() == targetType.getConstrainedType();
}
private static boolean checkIsLikeJSONType(Object sourceValue, Type sourceType, BJsonType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (sourceType.getTag() == TypeTags.ARRAY_TAG) {
ArrayValue source = (ArrayValue) sourceValue;
Type elementType = ((BArrayType) source.getType()).getElementType();
if (isValueType(elementType)) {
return checkIsType(elementType, targetType, new ArrayList<>());
}
Object[] arrayValues = source.getValues();
for (int i = 0; i < ((ArrayValue) sourceValue).size(); i++) {
if (!checkIsLikeType(arrayValues[i], targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
} else if (sourceType.getTag() == TypeTags.MAP_TAG) {
for (Object value : ((MapValueImpl) sourceValue).values()) {
if (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
} else if (sourceType.getTag() == TypeTags.RECORD_TYPE_TAG) {
TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);
if (unresolvedValues.contains(typeValuePair)) {
return true;
}
unresolvedValues.add(typeValuePair);
for (Object object : ((MapValueImpl) sourceValue).values()) {
if (!checkIsLikeType(object, targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
} else if (sourceType.getTag() == TypeTags.TUPLE_TAG) {
for (Object obj : ((TupleValueImpl) sourceValue).getValues()) {
if (!checkIsLikeType(obj, targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
return false;
}
private static boolean checkIsLikeRecordType(Object sourceValue, BRecordType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof MapValueImpl)) {
return false;
}
TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);
if (unresolvedValues.contains(typeValuePair)) {
return true;
}
unresolvedValues.add(typeValuePair);
Map<String, Type> targetTypeField = new HashMap<>();
Type restFieldType = targetType.restFieldType;
for (Field field : targetType.getFields().values()) {
targetTypeField.put(field.getFieldName(), field.getFieldType());
}
for (Map.Entry targetTypeEntry : targetTypeField.entrySet()) {
Object fieldName = StringUtils.fromString(targetTypeEntry.getKey().toString());
if (!(((MapValueImpl) sourceValue).containsKey(fieldName)) &&
!SymbolFlags.isFlagOn(targetType.getFields().get(fieldName.toString()).getFlags(),
SymbolFlags.OPTIONAL)) {
return false;
}
}
for (Object object : ((MapValueImpl) sourceValue).entrySet()) {
Map.Entry valueEntry = (Map.Entry) object;
String fieldName = valueEntry.getKey().toString();
if (targetTypeField.containsKey(fieldName)) {
if (!checkIsLikeType((valueEntry.getValue()), targetTypeField.get(fieldName),
unresolvedValues, allowNumericConversion)) {
return false;
}
} else {
if (!targetType.sealed) {
if (!checkIsLikeType((valueEntry.getValue()), restFieldType, unresolvedValues,
allowNumericConversion)) {
return false;
}
} else {
return false;
}
}
}
return true;
}
private static boolean checkIsLikeTableType(Object sourceValue, BTableType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof TableValueImpl)) {
return false;
}
TableValueImpl tableValue = (TableValueImpl) sourceValue;
BTableType sourceType = (BTableType) tableValue.getType();
if (targetType.getKeyType() != null && sourceType.getFieldNames() == null) {
return false;
}
if (sourceType.getKeyType() != null && !checkIsType(tableValue.getKeyType(), targetType.getKeyType())) {
return false;
}
TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);
if (unresolvedValues.contains(typeValuePair)) {
return true;
}
Object[] objects = tableValue.values().toArray();
for (Object object : objects) {
if (!checkIsLikeType(object, targetType.getConstrainedType(), allowNumericConversion)) {
return false;
}
}
return true;
}
private static boolean checkFiniteTypeAssignable(Object sourceValue, Type sourceType, BFiniteType targetType) {
for (Object valueSpaceItem : targetType.valueSpace) {
if (isFiniteTypeValue(sourceValue, sourceType, valueSpaceItem)) {
return true;
}
}
return false;
}
protected static boolean isFiniteTypeValue(Object sourceValue, Type sourceType, Object valueSpaceItem) {
Type valueSpaceItemType = getType(valueSpaceItem);
if (valueSpaceItemType.getTag() > TypeTags.FLOAT_TAG) {
return valueSpaceItemType.getTag() == sourceType.getTag() &&
(valueSpaceItem == sourceValue || valueSpaceItem.equals(sourceValue));
}
switch (sourceType.getTag()) {
case TypeTags.BYTE_TAG:
case TypeTags.INT_TAG:
return ((Number) sourceValue).longValue() == ((Number) valueSpaceItem).longValue();
case TypeTags.FLOAT_TAG:
if (sourceType.getTag() != valueSpaceItemType.getTag()) {
return false;
}
return ((Number) sourceValue).doubleValue() == ((Number) valueSpaceItem).doubleValue();
case TypeTags.DECIMAL_TAG:
default:
if (sourceType.getTag() != valueSpaceItemType.getTag()) {
return false;
}
return valueSpaceItem.equals(sourceValue);
}
}
private static boolean checkIsErrorType(Type sourceType, BErrorType targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.ERROR_TAG) {
return false;
}
TypePair pair = new TypePair(sourceType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BErrorType bErrorType = (BErrorType) sourceType;
if (!checkIsType(bErrorType.detailType, targetType.detailType, unresolvedTypes)) {
return false;
}
if (targetType.typeIdSet == null) {
return true;
}
BTypeIdSet sourceTypeIdSet = bErrorType.typeIdSet;
if (sourceTypeIdSet == null) {
return false;
}
return sourceTypeIdSet.containsAll(targetType.typeIdSet);
}
private static boolean checkIsLikeErrorType(Object sourceValue, BErrorType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
Type sourceType = getType(sourceValue);
if (sourceValue == null || sourceType.getTag() != TypeTags.ERROR_TAG) {
return false;
}
if (!checkIsLikeType(((ErrorValue) sourceValue).getDetails(), targetType.detailType, unresolvedValues,
allowNumericConversion)) {
return false;
}
if (targetType.typeIdSet == null) {
return true;
}
BTypeIdSet sourceIdSet = ((BErrorType) sourceType).typeIdSet;
if (sourceIdSet == null) {
return false;
}
return sourceIdSet.containsAll(targetType.typeIdSet);
}
private static boolean isSimpleBasicType(Type type) {
return type.getTag() < TypeTags.NULL_TAG;
}
private static boolean isHandleType(Type type) {
return type.getTag() == TypeTags.HANDLE_TAG;
}
/**
* Deep value equality check for anydata.
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value on the right hand side
* @param checkedValues Structured value pairs already compared or being compared
* @return True if values are equal, else false.
*/
private static boolean isEqual(Object lhsValue, Object rhsValue, List<ValuePair> checkedValues) {
if (lhsValue == rhsValue) {
return true;
}
if (null == lhsValue || null == rhsValue) {
return false;
}
int lhsValTypeTag = getType(lhsValue).getTag();
int rhsValTypeTag = getType(rhsValue).getTag();
switch (lhsValTypeTag) {
case TypeTags.STRING_TAG:
case TypeTags.BOOLEAN_TAG:
return lhsValue.equals(rhsValue);
case TypeTags.INT_TAG:
if (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) {
return false;
}
return lhsValue.equals(((Number) rhsValue).longValue());
case TypeTags.BYTE_TAG:
if (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) {
return false;
}
return ((Number) lhsValue).byteValue() == ((Number) rhsValue).byteValue();
case TypeTags.FLOAT_TAG:
if (rhsValTypeTag != TypeTags.FLOAT_TAG) {
return false;
}
if (Double.isNaN((Double) lhsValue) && Double.isNaN((Double) rhsValue)) {
return true;
}
return ((Number) lhsValue).doubleValue() == ((Number) rhsValue).doubleValue();
case TypeTags.DECIMAL_TAG:
if (rhsValTypeTag != TypeTags.DECIMAL_TAG) {
return false;
}
return checkDecimalEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue);
case TypeTags.XML_TAG:
if (lhsValue instanceof XmlText) {
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue);
}
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlSequence) lhsValue, (XmlValue) rhsValue);
case TypeTags.XML_ELEMENT_TAG:
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlItem) lhsValue, (XmlValue) rhsValue);
case TypeTags.XML_COMMENT_TAG:
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlComment) lhsValue, (XmlValue) rhsValue);
case TypeTags.XML_TEXT_TAG:
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue);
case TypeTags.XML_PI_TAG:
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlPi) lhsValue, (XmlValue) rhsValue);
case TypeTags.MAP_TAG:
case TypeTags.JSON_TAG:
case TypeTags.RECORD_TYPE_TAG:
return isMappingType(rhsValTypeTag) && isEqual((MapValueImpl) lhsValue, (MapValueImpl) rhsValue,
checkedValues);
case TypeTags.TUPLE_TAG:
case TypeTags.ARRAY_TAG:
return isListType(rhsValTypeTag) &&
isEqual((ArrayValue) lhsValue, (ArrayValue) rhsValue, checkedValues);
case TypeTags.ERROR_TAG:
return rhsValTypeTag == TypeTags.ERROR_TAG &&
isEqual((ErrorValue) lhsValue, (ErrorValue) rhsValue, checkedValues);
case TypeTags.SERVICE_TAG:
break;
case TypeTags.TABLE_TAG:
return rhsValTypeTag == TypeTags.TABLE_TAG &&
isEqual((TableValueImpl) lhsValue, (TableValueImpl) rhsValue, checkedValues);
}
return false;
}
private static boolean isListType(int typeTag) {
return typeTag == TypeTags.ARRAY_TAG || typeTag == TypeTags.TUPLE_TAG;
}
private static boolean isMappingType(int typeTag) {
return typeTag == TypeTags.MAP_TAG || typeTag == TypeTags.RECORD_TYPE_TAG || typeTag == TypeTags.JSON_TAG;
}
/**
* Deep equality check for an array/tuple.
*
* @param lhsList The array/tuple on the left hand side
* @param rhsList The array/tuple on the right hand side
* @param checkedValues Structured value pairs already compared or being compared
* @return True if the array/tuple values are equal, else false.
*/
private static boolean isEqual(ArrayValue lhsList, ArrayValue rhsList, List<ValuePair> checkedValues) {
ValuePair compValuePair = new ValuePair(lhsList, rhsList);
if (checkedValues.contains(compValuePair)) {
return true;
}
checkedValues.add(compValuePair);
if (lhsList.size() != rhsList.size()) {
return false;
}
for (int i = 0; i < lhsList.size(); i++) {
if (!isEqual(lhsList.get(i), rhsList.get(i), checkedValues)) {
return false;
}
}
return true;
}
/**
* Deep equality check for a map.
*
* @param lhsMap Map on the left hand side
* @param rhsMap Map on the right hand side
* @param checkedValues Structured value pairs already compared or being compared
* @return True if the map values are equal, else false.
*/
private static boolean isEqual(MapValueImpl lhsMap, MapValueImpl rhsMap, List<ValuePair> checkedValues) {
ValuePair compValuePair = new ValuePair(lhsMap, rhsMap);
if (checkedValues.contains(compValuePair)) {
return true;
}
checkedValues.add(compValuePair);
if (lhsMap.size() != rhsMap.size()) {
return false;
}
if (!lhsMap.keySet().containsAll(rhsMap.keySet())) {
return false;
}
Iterator<Map.Entry<BString, Object>> mapIterator = lhsMap.entrySet().iterator();
while (mapIterator.hasNext()) {
Map.Entry<BString, Object> lhsMapEntry = mapIterator.next();
if (!isEqual(lhsMapEntry.getValue(), rhsMap.get(lhsMapEntry.getKey()), checkedValues)) {
return false;
}
}
return true;
}
/**
* Deep equality check for a table.
*
* @param lhsTable Table on the left hand side
* @param rhsTable Table on the right hand side
* @param checkedValues Structured value pairs already compared or being compared
* @return True if the table values are equal, else false.
*/
private static boolean isEqual(TableValueImpl lhsTable, TableValueImpl rhsTable, List<ValuePair> checkedValues) {
ValuePair compValuePair = new ValuePair(lhsTable, rhsTable);
if (checkedValues.contains(compValuePair)) {
return true;
}
checkedValues.add(compValuePair);
if (lhsTable.size() != rhsTable.size()) {
return false;
}
boolean isLhsKeyedTable = ((BTableType) lhsTable.getType()).getFieldNames() != null &&
((BTableType) lhsTable.getType()).getFieldNames().length > 0;
boolean isRhsKeyedTable = ((BTableType) rhsTable.getType()).getFieldNames() != null &&
((BTableType) rhsTable.getType()).getFieldNames().length > 0;
Object[] lhsTableValues = lhsTable.values().toArray();
Object[] rhsTableValues = rhsTable.values().toArray();
if (isLhsKeyedTable == isRhsKeyedTable) {
for (int i = 0; i < lhsTableValues.length; i++) {
if (!isEqual(lhsTableValues[i], rhsTableValues[i], checkedValues)) {
return false;
}
}
return true;
}
return false;
}
/**
* Deep equality check for error.
*
* @param lhsError The error on the left hand side
* @param rhsError The error on the right hand side
* @param checkedValues Errors already compared or being compared
* @return True if the error values are equal, else false.
*/
private static boolean isEqual(ErrorValue lhsError, ErrorValue rhsError, List<ValuePair> checkedValues) {
ValuePair compValuePair = new ValuePair(lhsError, rhsError);
if (checkedValues.contains(compValuePair)) {
return true;
}
checkedValues.add(compValuePair);
return isEqual(lhsError.getMessage(), rhsError.getMessage(), checkedValues) &&
isEqual((MapValueImpl) lhsError.getDetails(), (MapValueImpl) rhsError.getDetails(), checkedValues) &&
isEqual(lhsError.getCause(), rhsError.getCause(), checkedValues);
}
/**
* Deep equality check for XML Sequence.
*
* @param lhsXMLSequence The XML sequence on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlSequence lhsXMLSequence, XmlValue rhsXml) {
if (rhsXml instanceof XmlSequence) {
XmlSequence rhsXMLSequence = (XmlSequence) rhsXml;
return isXMLSequenceChildrenEqual(lhsXMLSequence.getChildrenList(), rhsXMLSequence.getChildrenList());
}
if (rhsXml instanceof XmlItem) {
return lhsXMLSequence.getChildrenList().size() == 1 &&
isEqual(lhsXMLSequence.getChildrenList().get(0), rhsXml);
}
return lhsXMLSequence.getChildrenList().isEmpty() &&
TypeUtils.getType(rhsXml) == PredefinedTypes.TYPE_XML_NEVER;
}
/**
* Deep equality check for XML item.
*
* @param lhsXMLItem The XML item on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlItem lhsXMLItem, XmlValue rhsXml) {
if (rhsXml instanceof XmlItem) {
XmlItem rhsXMLItem = (XmlItem) rhsXml;
if (!(rhsXMLItem.getQName().equals(lhsXMLItem.getQName()))) {
return false;
}
if (!(rhsXMLItem.getAttributesMap().entrySet().equals(lhsXMLItem.getAttributesMap().entrySet()))) {
return false;
}
return isEqual(rhsXMLItem.getChildrenSeq(), lhsXMLItem.getChildrenSeq());
}
if (rhsXml instanceof XmlSequence) {
XmlSequence rhsXMLSequence = (XmlSequence) rhsXml;
return rhsXMLSequence.getChildrenList().size() == 1 &&
isEqual(lhsXMLItem, rhsXMLSequence.getChildrenList().get(0));
}
return false;
}
/**
* Deep equality check for XML Text.
*
* @param lhsXMLText The XML text on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlText lhsXMLText, XmlValue rhsXml) {
if (rhsXml instanceof XmlText) {
XmlText rhsXMLText = (XmlText) rhsXml;
return lhsXMLText.getTextValue().equals(rhsXMLText.getTextValue());
}
return lhsXMLText.getType() == PredefinedTypes.TYPE_XML_NEVER && rhsXml instanceof XmlSequence &&
((XmlSequence) rhsXml).getChildrenList().isEmpty();
}
/**
* Deep equality check for XML Comment.
*
* @param lhsXMLComment The XML comment on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlComment lhsXMLComment, XmlValue rhsXml) {
if (!(rhsXml instanceof XmlComment)) {
return false;
}
XmlComment rhXMLComment = (XmlComment) rhsXml;
return lhsXMLComment.getTextValue().equals(rhXMLComment.getTextValue());
}
/**
* Deep equality check for XML Processing Instruction.
*
* @param lhsXMLPi The XML processing instruction on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlPi lhsXMLPi, XmlValue rhsXml) {
if (!(rhsXml instanceof XmlPi)) {
return false;
}
XmlPi rhsXMLPi = (XmlPi) rhsXml;
return lhsXMLPi.getData().equals(rhsXMLPi.getData()) && lhsXMLPi.getTarget().equals(rhsXMLPi.getTarget());
}
private static boolean isXMLSequenceChildrenEqual(List<BXml> lhsList, List<BXml> rhsList) {
if (lhsList.size() != rhsList.size()) {
return false;
}
for (int i = 0; i < lhsList.size(); i++) {
if (!isEqual(lhsList.get(i), rhsList.get(i))) {
return false;
}
}
return true;
}
/**
* Type vector of size two, to hold the source and the target types.
*
* @since 0.995.0
*/
private static class TypePair {
Type sourceType;
Type targetType;
public TypePair(Type sourceType, Type targetType) {
this.sourceType = sourceType;
this.targetType = targetType;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof TypePair)) {
return false;
}
TypePair other = (TypePair) obj;
return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);
}
}
/**
* Check the reference equality of handle values.
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value on the right hand side
* @return True if values are equal, else false.
*/
private static boolean isHandleValueRefEqual(Object lhsValue, Object rhsValue) {
HandleValue lhsHandle = (HandleValue) lhsValue;
HandleValue rhsHandle = (HandleValue) rhsValue;
return lhsHandle.getValue() == rhsHandle.getValue();
}
/**
* Unordered value vector of size two, to hold two values being compared.
*
* @since 0.995.0
*/
private static class ValuePair {
ArrayList<Object> valueList = new ArrayList<>(2);
ValuePair(Object valueOne, Object valueTwo) {
valueList.add(valueOne);
valueList.add(valueTwo);
}
@Override
public boolean equals(Object otherPair) {
if (!(otherPair instanceof ValuePair)) {
return false;
}
ArrayList otherList = ((ValuePair) otherPair).valueList;
ArrayList currentList = valueList;
if (otherList.size() != currentList.size()) {
return false;
}
for (int i = 0; i < otherList.size(); i++) {
if (!otherList.get(i).equals(currentList.get(i))) {
return false;
}
}
return true;
}
}
/**
* Checks whether a given {@link BType} has an implicit initial value or not.
* @param type {@link BType} to be analyzed.
* @return whether there's an implicit initial value or not.
*/
public static boolean hasFillerValue(Type type) {
return hasFillerValue(type, new ArrayList<>());
}
private static boolean hasFillerValue(Type type, List<Type> unanalyzedTypes) {
if (type == null) {
return true;
}
if (type.getTag() < TypeTags.RECORD_TYPE_TAG &&
!(type.getTag() == TypeTags.CHAR_STRING_TAG || type.getTag() == TypeTags.NEVER_TAG)) {
return true;
}
switch (type.getTag()) {
case TypeTags.STREAM_TAG:
case TypeTags.MAP_TAG:
case TypeTags.ANY_TAG:
return true;
case TypeTags.ARRAY_TAG:
return checkFillerValue((BArrayType) type, unanalyzedTypes);
case TypeTags.FINITE_TYPE_TAG:
return checkFillerValue((BFiniteType) type);
case TypeTags.OBJECT_TYPE_TAG:
return checkFillerValue((BObjectType) type);
case TypeTags.RECORD_TYPE_TAG:
return checkFillerValue((BRecordType) type, unanalyzedTypes);
case TypeTags.TUPLE_TAG:
return checkFillerValue((BTupleType) type, unanalyzedTypes);
case TypeTags.UNION_TAG:
return checkFillerValue((BUnionType) type, unanalyzedTypes);
default:
return false;
}
}
private static boolean checkFillerValue(BTupleType tupleType, List<Type> unAnalyzedTypes) {
if (unAnalyzedTypes.contains(tupleType)) {
return true;
}
unAnalyzedTypes.add(tupleType);
for (Type member : tupleType.getTupleTypes()) {
if (!hasFillerValue(member, unAnalyzedTypes)) {
return false;
}
}
return true;
}
private static boolean checkFillerValue(BUnionType type, List<Type> unAnalyzedTypes) {
if (unAnalyzedTypes.contains(type)) {
return true;
}
unAnalyzedTypes.add(type);
if (type.isNullable()) {
return true;
}
Iterator<Type> iterator = type.getMemberTypes().iterator();
Type firstMember;
for (firstMember = iterator.next(); iterator.hasNext(); ) {
if (!isSameType(firstMember, iterator.next())) {
return false;
}
}
return isValueType(firstMember) && hasFillerValue(firstMember);
}
private static boolean checkFillerValue(BRecordType type, List<Type> unAnalyzedTypes) {
if (unAnalyzedTypes.contains(type)) {
return true;
}
unAnalyzedTypes.add(type);
for (Field field : type.getFields().values()) {
if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) {
continue;
}
if (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED)) {
continue;
}
return false;
}
return true;
}
private static boolean checkFillerValue(BArrayType type, List<Type> unAnalyzedTypes) {
return type.getState() == ArrayState.OPEN || hasFillerValue(type.getElementType(), unAnalyzedTypes);
}
private static boolean checkFillerValue(BObjectType type) {
if (type.getTag() == TypeTags.SERVICE_TAG) {
return false;
} else {
MethodType generatedInitializer = type.generatedInitializer;
if (generatedInitializer == null) {
return false;
}
FunctionType initFuncType = generatedInitializer.getType();
boolean noParams = initFuncType.getParameterTypes().length == 0;
boolean nilReturn = initFuncType.getReturnType().getTag() == TypeTags.NULL_TAG;
return noParams && nilReturn;
}
}
private static boolean checkFillerValue(BFiniteType type) {
for (Object value: type.valueSpace) {
if (value == null) {
return true;
}
}
if (type.valueSpace.size() == 1) {
return true;
}
Object firstElement = type.valueSpace.iterator().next();
for (Object value : type.valueSpace) {
if (value.getClass() != firstElement.getClass()) {
return false;
}
}
if (firstElement instanceof String) {
return containsElement(type.valueSpace, "\"\"");
} else if (firstElement instanceof Byte
|| firstElement instanceof Integer
|| firstElement instanceof Long) {
return containsElement(type.valueSpace, "0");
} else if (firstElement instanceof Float
|| firstElement instanceof Double
|| firstElement instanceof BigDecimal) {
return containsElement(type.valueSpace, "0.0");
} else if (firstElement instanceof Boolean) {
return containsElement(type.valueSpace, "false");
} else {
return false;
}
}
private static boolean containsElement(Set<Object> valueSpace, String e) {
for (Object value : valueSpace) {
if (value != null && value.toString().equals(e)) {
return true;
}
}
return false;
}
private static boolean containsType(Set<Object> valueSpace, Type type) {
for (Object value : valueSpace) {
if (!isSameType(type, getType(value))) {
return false;
}
}
return true;
}
public static Object handleAnydataValues(Object sourceVal, Type targetType) {
if (sourceVal != null && !(sourceVal instanceof Number) && !(sourceVal instanceof BString) &&
!(sourceVal instanceof Boolean) && !(sourceVal instanceof BValue)) {
throw ErrorUtils.createJToBTypeCastError(sourceVal.getClass(), targetType);
}
return sourceVal;
}
private TypeChecker() {
}
}
|
class TypeChecker {
public static Object checkCast(Object sourceVal, Type targetType) {
if (checkIsType(sourceVal, targetType)) {
return sourceVal;
}
Type sourceType = getType(sourceVal);
if (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() <= TypeTags.BOOLEAN_TAG) {
return TypeConverter.castValues(targetType, sourceVal);
}
if (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() == TypeTags.UNION_TAG) {
for (Type memberType : ((BUnionType) targetType).getMemberTypes()) {
try {
return TypeConverter.castValues(memberType, sourceVal);
} catch (Exception e) {
}
}
}
throw ErrorUtils.createTypeCastError(sourceVal, targetType);
}
public static long anyToInt(Object sourceVal) {
return TypeConverter.anyToIntCast(sourceVal,
() -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT));
}
public static long anyToSigned32(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_32,
() -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_INT_SIGNED_32));
}
public static long anyToSigned16(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_16,
() -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_INT_SIGNED_16));
}
public static long anyToSigned8(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_8,
() -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_INT_SIGNED_8));
}
public static long anyToUnsigned32(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_32,
() -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_INT_UNSIGNED_32));
}
public static long anyToUnsigned16(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_16,
() -> ErrorUtils
.createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_16));
}
public static long anyToUnsigned8(Object sourceVal) {
return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_8,
() -> ErrorUtils
.createTypeCastError(sourceVal,
TYPE_INT_UNSIGNED_8));
}
public static double anyToFloat(Object sourceVal) {
return TypeConverter.anyToFloatCast(sourceVal, () -> ErrorUtils
.createTypeCastError(sourceVal, TYPE_FLOAT));
}
public static boolean anyToBoolean(Object sourceVal) {
return TypeConverter.anyToBooleanCast(sourceVal, () -> ErrorUtils
.createTypeCastError(sourceVal, TYPE_BOOLEAN));
}
public static int anyToByte(Object sourceVal) {
return TypeConverter.anyToByteCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_BYTE));
}
public static DecimalValue anyToDecimal(Object sourceVal) {
return TypeConverter.anyToDecimalCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal,
TYPE_DECIMAL));
}
public static byte anyToJByte(Object sourceVal) {
return TypeConverter.anyToJByteCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "byte"));
}
public static char anyToJChar(Object sourceVal) {
return TypeConverter.anyToJCharCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "char"));
}
public static short anyToJShort(Object sourceVal) {
return TypeConverter.anyToJShortCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "short"));
}
public static int anyToJInt(Object sourceVal) {
return TypeConverter.anyToJIntCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "int"));
}
public static long anyToJLong(Object sourceVal) {
return TypeConverter.anyToJLongCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "long"));
}
public static float anyToJFloat(Object sourceVal) {
return TypeConverter.anyToJFloatCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "float"));
}
public static double anyToJDouble(Object sourceVal) {
return TypeConverter.anyToJDoubleCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "double"));
}
public static boolean anyToJBoolean(Object sourceVal) {
return TypeConverter.anyToJBooleanCast(sourceVal,
() -> ErrorUtils.createBToJTypeCastError(sourceVal, "boolean"));
}
/**
* Check whether a given value belongs to the given type.
*
* @param sourceVal value to check the type
* @param targetType type to be test against
* @return true if the value belongs to the given type, false otherwise
*/
public static boolean checkIsType(Object sourceVal, Type targetType) {
return checkIsType(sourceVal, getType(sourceVal), targetType);
}
/**
* Check whether a given value belongs to the given type.
*
* @param sourceVal value to check the type
* @param sourceType type of the value
* @param targetType type to be test against
* @return true if the value belongs to the given type, false otherwise
*/
public static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType) {
if (checkIsType(sourceVal, sourceType, targetType, null)) {
return true;
}
if (sourceType.getTag() == TypeTags.XML_TAG) {
XmlValue val = (XmlValue) sourceVal;
if (val.getNodeType() == XmlNodeType.SEQUENCE) {
return checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false);
}
}
if (isMutable(sourceVal, sourceType)) {
return false;
}
return checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false);
}
/**
* Check whether a given value has the same shape as the given type.
*
* @param sourceValue value to check the shape
* @param targetType type to check the shape against
* @return true if the value has the same shape as the given type; false otherwise
*/
public static boolean checkIsLikeType(Object sourceValue, Type targetType) {
return checkIsLikeType(sourceValue, targetType, false);
}
/**
* Check whether a given value has the same shape as the given type.
*
* @param sourceValue value to check the shape
* @param targetType type to check the shape against
* @param allowNumericConversion whether numeric conversion is allowed to change the shape to the target type
* @return true if the value has the same shape as the given type; false otherwise
*/
public static boolean checkIsLikeType(Object sourceValue, Type targetType, boolean allowNumericConversion) {
return checkIsLikeType(sourceValue, targetType, new ArrayList<>(), allowNumericConversion);
}
/**
* Check whether two types are the same.
*
* @param sourceType type to test
* @param targetType type to test against
* @return true if the two types are same; false otherwise
*/
public static boolean isSameType(Type sourceType, Type targetType) {
int sourceTypeTag = sourceType.getTag();
int targetTypeTag = targetType.getTag();
if (sourceType == targetType) {
return true;
}
if (sourceTypeTag == targetTypeTag) {
if (sourceType.equals(targetType)) {
return true;
}
switch (sourceTypeTag) {
case TypeTags.ARRAY_TAG:
return checkArrayEquivalent(sourceType, targetType);
case TypeTags.FINITE_TYPE_TAG:
Set<Object> sourceValueSpace = ((BFiniteType) sourceType).valueSpace;
Set<Object> targetValueSpace = ((BFiniteType) targetType).valueSpace;
if (sourceValueSpace.size() != targetValueSpace.size()) {
return false;
}
for (Object sourceVal : sourceValueSpace) {
if (!containsType(targetValueSpace, getType(sourceVal))) {
return false;
}
}
return true;
default:
break;
}
}
if (sourceTypeTag == TypeTags.FINITE_TYPE_TAG) {
for (Object value : ((BFiniteType) sourceType).valueSpace) {
if (!isSameType(getType(value), targetType)) {
return false;
}
}
return true;
}
if (targetTypeTag == TypeTags.FINITE_TYPE_TAG) {
for (Object value : ((BFiniteType) targetType).valueSpace) {
if (!isSameType(getType(value), sourceType)) {
return false;
}
}
return true;
}
return false;
}
public static Type getType(Object value) {
if (value == null) {
return TYPE_NULL;
} else if (value instanceof Number) {
if (value instanceof Long) {
return TYPE_INT;
} else if (value instanceof Double) {
return TYPE_FLOAT;
} else if (value instanceof Integer || value instanceof Byte) {
return TYPE_BYTE;
}
} else if (value instanceof BString) {
return TYPE_STRING;
} else if (value instanceof Boolean) {
return TYPE_BOOLEAN;
}
return ((BValue) value).getType();
}
/**
* Deep value equality check for anydata.
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value on the right hand side
* @return True if values are equal, else false.
*/
public static boolean isEqual(Object lhsValue, Object rhsValue) {
return isEqual(lhsValue, rhsValue, new ArrayList<>());
}
/**
* Check if two decimal values are equal in value.
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value of the right hand side
* @return True if values are equal, else false.
*/
public static boolean checkDecimalEqual(DecimalValue lhsValue, DecimalValue rhsValue) {
return isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue) &&
lhsValue.decimalValue().compareTo(rhsValue.decimalValue()) == 0;
}
/**
* Check if two decimal values are exactly equal.
*
* @param lhsValue The value on the left-hand side
* @param rhsValue The value of the right-hand side
* @return True if values are exactly equal, else false.
*/
public static boolean checkDecimalExactEqual(DecimalValue lhsValue, DecimalValue rhsValue) {
return isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue)
&& lhsValue.decimalValue().equals(rhsValue.decimalValue());
}
/**
* Checks if the given decimal number is a real number.
*
* @param decimalValue The decimal value being checked
* @return True if the decimal value is a real number.
*/
private static boolean isDecimalRealNumber(DecimalValue decimalValue) {
return decimalValue.valueKind == DecimalValueKind.ZERO || decimalValue.valueKind == DecimalValueKind.OTHER;
}
/**
* Reference equality check for values. If both the values are simple basic types, returns the same
* result as {@link
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value on the right hand side
* @return True if values are reference equal or in the case of simple basic types if the values are equal,
* else false.
*/
private static boolean isXMLValueRefEqual(XmlValue lhsValue, XmlValue rhsValue) {
if (lhsValue.getNodeType() != rhsValue.getNodeType()) {
return false;
}
if (lhsValue.getNodeType() == XmlNodeType.SEQUENCE && rhsValue.getNodeType() == XmlNodeType.SEQUENCE) {
return isXMLSequenceRefEqual((XmlSequence) lhsValue, (XmlSequence) rhsValue);
}
if (lhsValue.getNodeType() == XmlNodeType.TEXT && rhsValue.getNodeType() == XmlNodeType.TEXT) {
return isEqual(lhsValue, rhsValue);
}
return false;
}
private static boolean isXMLSequenceRefEqual(XmlSequence lhsValue, XmlSequence rhsValue) {
Iterator<BXml> lhsIter = lhsValue.getChildrenList().iterator();
Iterator<BXml> rhsIter = rhsValue.getChildrenList().iterator();
while (lhsIter.hasNext() && rhsIter.hasNext()) {
BXml l = lhsIter.next();
BXml r = rhsIter.next();
if (!(l == r || isXMLValueRefEqual((XmlValue) l, (XmlValue) r))) {
return false;
}
}
return lhsIter.hasNext() == rhsIter.hasNext();
}
/**
* Get the typedesc of a value.
*
* @param value Value
* @return type desc associated with the value
*/
public static TypedescValue getTypedesc(Object value) {
Type type = TypeChecker.getType(value);
if (type == null) {
return null;
}
if (isSimpleBasicType(type)) {
return new TypedescValueImpl(new BFiniteType(value.toString(), Set.of(value), 0));
}
if (value instanceof RefValue) {
return (TypedescValue) ((RefValue) value).getTypedesc();
}
return new TypedescValueImpl(type);
}
/**
* Get the annotation value if present.
*
* @param typedescValue The typedesc value
* @param annotTag The annot-tag-reference
* @return the annotation value if present, nil else
*/
public static Object getAnnotValue(TypedescValue typedescValue, String annotTag) {
Type describingType = typedescValue.getDescribingType();
if (!(describingType instanceof BAnnotatableType)) {
return null;
}
return ((BAnnotatableType) describingType).getAnnotation(StringUtils.fromString(annotTag));
}
public static Object getAnnotValue(TypedescValue typedescValue, BString annotTag) {
Type describingType = typedescValue.getDescribingType();
if (!(describingType instanceof BAnnotatableType)) {
return null;
}
return ((BAnnotatableType) describingType).getAnnotation(annotTag);
}
/**
* Check whether a given type is equivalent to a target type.
*
* @param sourceType type to check
* @param targetType type to compare with
* @return flag indicating the the equivalence of the two types
*/
public static boolean checkIsType(Type sourceType, Type targetType) {
return checkIsType(sourceType, targetType, (List<TypePair>) null);
}
@Deprecated
public static boolean checkIsType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) {
if (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) {
return true;
}
if (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(sourceType)) {
return true;
}
if (targetType.isReadOnly() && !sourceType.isReadOnly()) {
return false;
}
int sourceTypeTag = sourceType.getTag();
int targetTypeTag = targetType.getTag();
if (sourceTypeTag == TypeTags.INTERSECTION_TAG) {
return checkIsType(((BIntersectionType) sourceType).getEffectiveType(),
targetTypeTag != TypeTags.INTERSECTION_TAG ? targetType :
((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes);
}
if (targetTypeTag == TypeTags.INTERSECTION_TAG) {
return checkIsType(sourceType, ((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes);
}
if (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) {
if (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) {
return checkIsType(((BParameterizedType) sourceType).getParamValueType(), targetType, unresolvedTypes);
}
return checkIsType(((BParameterizedType) sourceType).getParamValueType(),
((BParameterizedType) targetType).getParamValueType(), unresolvedTypes);
}
if (sourceTypeTag == TypeTags.READONLY_TAG) {
return checkIsType(PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE,
targetType, unresolvedTypes);
}
if (targetTypeTag == TypeTags.READONLY_TAG) {
return checkIsType(sourceType, PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE, unresolvedTypes);
}
if (sourceTypeTag == TypeTags.UNION_TAG) {
return isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes);
}
if (sourceTypeTag == TypeTags.FINITE_TYPE_TAG &&
(targetTypeTag == TypeTags.FINITE_TYPE_TAG || targetTypeTag <= TypeTags.NULL_TAG ||
targetTypeTag == TypeTags.XML_TEXT_TAG)) {
return isFiniteTypeMatch((BFiniteType) sourceType, targetType);
}
switch (targetTypeTag) {
case TypeTags.BYTE_TAG:
case TypeTags.SIGNED8_INT_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
case TypeTags.CHAR_STRING_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.NULL_TAG:
return sourceTypeTag == targetTypeTag;
case TypeTags.STRING_TAG:
return TypeTags.isStringTypeTag(sourceTypeTag);
case TypeTags.XML_TEXT_TAG:
if (sourceTypeTag == TypeTags.XML_TAG) {
return ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG;
}
return sourceTypeTag == targetTypeTag;
case TypeTags.INT_TAG:
return sourceTypeTag == TypeTags.INT_TAG || sourceTypeTag == TypeTags.BYTE_TAG ||
(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.UNSIGNED32_INT_TAG);
case TypeTags.SIGNED16_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG ||
(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED16_INT_TAG);
case TypeTags.SIGNED32_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG ||
(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED32_INT_TAG);
case TypeTags.UNSIGNED8_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG;
case TypeTags.UNSIGNED16_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG ||
sourceTypeTag == TypeTags.UNSIGNED16_INT_TAG;
case TypeTags.UNSIGNED32_INT_TAG:
return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG ||
sourceTypeTag == TypeTags.UNSIGNED16_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED32_INT_TAG;
case TypeTags.ANY_TAG:
return checkIsAnyType(sourceType);
case TypeTags.ANYDATA_TAG:
return sourceType.isAnydata();
case TypeTags.SERVICE_TAG:
return checkIsServiceType(sourceType, targetType,
unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);
case TypeTags.HANDLE_TAG:
return sourceTypeTag == TypeTags.HANDLE_TAG;
case TypeTags.READONLY_TAG:
return isInherentlyImmutableType(sourceType) || sourceType.isReadOnly();
case TypeTags.XML_ELEMENT_TAG:
case TypeTags.XML_COMMENT_TAG:
case TypeTags.XML_PI_TAG:
return targetTypeTag == sourceTypeTag;
default:
return checkIsRecursiveType(sourceType, targetType,
unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);
}
}
private static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType,
List<TypePair> unresolvedTypes) {
int sourceTypeTag = sourceType.getTag();
int targetTypeTag = targetType.getTag();
if (sourceTypeTag != TypeTags.RECORD_TYPE_TAG && sourceTypeTag != TypeTags.OBJECT_TYPE_TAG) {
return checkIsType(sourceType, targetType);
}
if (targetTypeTag == TypeTags.INTERSECTION_TAG) {
targetType = ((BIntersectionType) targetType).getEffectiveType();
targetTypeTag = targetType.getTag();
}
if (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) {
return true;
}
if (targetType.isReadOnly() && !sourceType.isReadOnly()) {
return false;
}
switch (targetTypeTag) {
case TypeTags.ANY_TAG:
return checkIsAnyType(sourceType);
case TypeTags.READONLY_TAG:
return isInherentlyImmutableType(sourceType) || sourceType.isReadOnly();
default:
return checkIsRecursiveTypeOnValue(sourceVal, sourceType, targetType, sourceTypeTag, targetTypeTag,
unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);
}
}
private static boolean checkTypeDescType(Type sourceType, BTypedescType targetType,
List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.TYPEDESC_TAG) {
return false;
}
BTypedescType sourceTypedesc = (BTypedescType) sourceType;
return checkIsType(sourceTypedesc.getConstraint(), targetType.getConstraint(), unresolvedTypes);
}
private static boolean checkIsRecursiveType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) {
switch (targetType.getTag()) {
case TypeTags.MAP_TAG:
return checkIsMapType(sourceType, (BMapType) targetType, unresolvedTypes);
case TypeTags.STREAM_TAG:
return checkIsStreamType(sourceType, (BStreamType) targetType, unresolvedTypes);
case TypeTags.TABLE_TAG:
return checkIsTableType(sourceType, (BTableType) targetType, unresolvedTypes);
case TypeTags.JSON_TAG:
return checkIsJSONType(sourceType, unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
return checkIsRecordType(sourceType, (BRecordType) targetType, unresolvedTypes);
case TypeTags.FUNCTION_POINTER_TAG:
return checkIsFunctionType(sourceType, (BFunctionType) targetType);
case TypeTags.ARRAY_TAG:
return checkIsArrayType(sourceType, (BArrayType) targetType, unresolvedTypes);
case TypeTags.TUPLE_TAG:
return checkIsTupleType(sourceType, (BTupleType) targetType, unresolvedTypes);
case TypeTags.UNION_TAG:
return checkIsUnionType(sourceType, (BUnionType) targetType, unresolvedTypes);
case TypeTags.OBJECT_TYPE_TAG:
return checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes);
case TypeTags.FINITE_TYPE_TAG:
return checkIsFiniteType(sourceType, (BFiniteType) targetType);
case TypeTags.FUTURE_TAG:
return checkIsFutureType(sourceType, (BFutureType) targetType, unresolvedTypes);
case TypeTags.ERROR_TAG:
return checkIsErrorType(sourceType, (BErrorType) targetType, unresolvedTypes);
case TypeTags.TYPEDESC_TAG:
return checkTypeDescType(sourceType, (BTypedescType) targetType, unresolvedTypes);
case TypeTags.XML_TAG:
return checkIsXMLType(sourceType, targetType, unresolvedTypes);
default:
return false;
}
}
private static boolean checkIsRecursiveTypeOnValue(Object sourceVal, Type sourceType, Type targetType,
int sourceTypeTag, int targetTypeTag,
List<TypePair> unresolvedTypes) {
switch (targetTypeTag) {
case TypeTags.ANYDATA_TAG:
if (sourceTypeTag == TypeTags.OBJECT_TYPE_TAG) {
return false;
}
return checkRecordBelongsToAnydataType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes);
case TypeTags.MAP_TAG:
return checkIsMapType(sourceVal, sourceType, (BMapType) targetType, unresolvedTypes);
case TypeTags.JSON_TAG:
return checkIsMapType(sourceVal, sourceType,
new BMapType(targetType.isReadOnly() ? TYPE_READONLY_JSON :
TYPE_JSON), unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
return checkIsRecordType(sourceVal, sourceType, (BRecordType) targetType, unresolvedTypes);
case TypeTags.UNION_TAG:
for (Type type : ((BUnionType) targetType).getMemberTypes()) {
if (checkIsType(sourceVal, sourceType, type, unresolvedTypes)) {
return true;
}
}
return false;
case TypeTags.OBJECT_TYPE_TAG:
return checkObjectEquivalency(sourceVal, sourceType, (BObjectType) targetType, unresolvedTypes);
default:
return false;
}
}
private static boolean isFiniteTypeMatch(BFiniteType sourceType, Type targetType) {
for (Object bValue : sourceType.valueSpace) {
if (!checkIsType(bValue, targetType)) {
return false;
}
}
return true;
}
private static boolean isUnionTypeMatch(BUnionType sourceType, Type targetType, List<TypePair> unresolvedTypes) {
for (Type type : sourceType.getMemberTypes()) {
if (!checkIsType(type, targetType, unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkIsUnionType(Type sourceType, BUnionType targetType, List<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(sourceType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
switch (sourceType.getTag()) {
case TypeTags.UNION_TAG:
case TypeTags.JSON_TAG:
case TypeTags.ANYDATA_TAG:
return isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes);
case TypeTags.FINITE_TYPE_TAG:
return isFiniteTypeMatch((BFiniteType) sourceType, targetType);
default:
for (Type type : targetType.getMemberTypes()) {
if (checkIsType(sourceType, type, unresolvedTypes)) {
return true;
}
}
return false;
}
}
private static boolean checkIsMapType(Type sourceType, BMapType targetType, List<TypePair> unresolvedTypes) {
Type targetConstrainedType = targetType.getConstrainedType();
switch (sourceType.getTag()) {
case TypeTags.MAP_TAG:
return checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType,
unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
BRecordType recType = (BRecordType) sourceType;
BUnionType wideTypeUnion = new BUnionType(getWideTypeComponents(recType));
return checkConstraints(wideTypeUnion, targetConstrainedType, unresolvedTypes);
default:
return false;
}
}
private static boolean checkIsMapType(Object sourceVal, Type sourceType, BMapType targetType,
List<TypePair> unresolvedTypes) {
Type targetConstrainedType = targetType.getConstrainedType();
switch (sourceType.getTag()) {
case TypeTags.MAP_TAG:
return checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType,
unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
return checkIsMapType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes,
targetConstrainedType);
default:
return false;
}
}
private static boolean checkIsMapType(MapValue sourceVal, BRecordType sourceType, List<TypePair> unresolvedTypes,
Type targetConstrainedType) {
for (Field field : sourceType.getFields().values()) {
if (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {
if (!checkIsType(field.getFieldType(), targetConstrainedType, unresolvedTypes)) {
return false;
}
continue;
}
BString name = StringUtils.fromString(field.getFieldName());
if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(name)) {
continue;
}
if (!checkIsLikeType(sourceVal.get(name), targetConstrainedType)) {
return false;
}
}
if (sourceType.sealed) {
return true;
}
return checkIsType(sourceType.restFieldType, targetConstrainedType, unresolvedTypes);
}
private static boolean checkIsXMLType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) {
int sourceTag = sourceType.getTag();
if (sourceTag == TypeTags.FINITE_TYPE_TAG) {
return isFiniteTypeMatch((BFiniteType) sourceType, targetType);
}
BXmlType target = ((BXmlType) targetType);
if (sourceTag == TypeTags.XML_TAG) {
Type targetConstraint = target.constraint;
while (target.constraint.getTag() == TypeTags.XML_TAG) {
target = (BXmlType) target.constraint;
targetConstraint = target.constraint;
}
BXmlType source = (BXmlType) sourceType;
if (source.constraint.getTag() == TypeTags.NEVER_TAG) {
if (targetConstraint.getTag() == TypeTags.UNION_TAG) {
return checkIsUnionType(sourceType, (BUnionType) targetConstraint, unresolvedTypes);
}
return targetConstraint.getTag() == TypeTags.XML_TEXT_TAG ||
targetConstraint.getTag() == TypeTags.NEVER_TAG;
}
return checkIsType(source.constraint, targetConstraint, unresolvedTypes);
}
if (TypeTags.isXMLTypeTag(sourceTag)) {
return checkIsType(sourceType, target.constraint, unresolvedTypes);
}
return false;
}
private static List<Type> getWideTypeComponents(BRecordType recType) {
List<Type> types = new ArrayList<>();
for (Field f : recType.getFields().values()) {
types.add(f.getFieldType());
}
if (!recType.sealed) {
types.add(recType.restFieldType);
}
return types;
}
private static boolean checkIsStreamType(Type sourceType, BStreamType targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.STREAM_TAG) {
return false;
}
return checkConstraints(((BStreamType) sourceType).getConstrainedType(), targetType.getConstrainedType(),
unresolvedTypes)
&& checkConstraints(((BStreamType) sourceType).getCompletionType(), targetType.getCompletionType(),
unresolvedTypes);
}
private static boolean checkIsTableType(Type sourceType, BTableType targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.TABLE_TAG) {
return false;
}
BTableType srcTableType = (BTableType) sourceType;
if (!checkConstraints(srcTableType.getConstrainedType(), targetType.getConstrainedType(),
unresolvedTypes)) {
return false;
}
if (targetType.getKeyType() == null && targetType.getFieldNames() == null) {
return true;
}
if (targetType.getKeyType() != null) {
if (srcTableType.getKeyType() != null &&
(checkConstraints(srcTableType.getKeyType(), targetType.getKeyType(), unresolvedTypes))) {
return true;
}
if (srcTableType.getFieldNames() == null) {
return false;
}
List<Type> fieldTypes = new ArrayList<>();
Arrays.stream(srcTableType.getFieldNames()).forEach(field -> fieldTypes
.add(Objects.requireNonNull(getTableConstraintField(srcTableType.getConstrainedType(), field))
.getFieldType()));
if (fieldTypes.size() == 1) {
return checkConstraints(fieldTypes.get(0), targetType.getKeyType(), unresolvedTypes);
}
BTupleType tupleType = new BTupleType(fieldTypes);
return checkConstraints(tupleType, targetType.getKeyType(), unresolvedTypes);
}
return Arrays.equals(srcTableType.getFieldNames(), targetType.getFieldNames());
}
static BField getTableConstraintField(Type constraintType, String fieldName) {
switch (constraintType.getTag()) {
case TypeTags.RECORD_TYPE_TAG:
Map<String, Field> fieldList = ((BRecordType) constraintType).getFields();
return (BField) fieldList.get(fieldName);
case TypeTags.INTERSECTION_TAG:
Type effectiveType = ((BIntersectionType) constraintType).getEffectiveType();
return getTableConstraintField(effectiveType, fieldName);
case TypeTags.UNION_TAG:
BUnionType unionType = (BUnionType) constraintType;
List<Type> memTypes = unionType.getMemberTypes();
List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))
.filter(Objects::nonNull).collect(Collectors.toList());
if (fields.size() != memTypes.size()) {
return null;
}
if (fields.stream().allMatch(field -> isSameType(field.getFieldType(), fields.get(0).getFieldType()))) {
return fields.get(0);
}
}
return null;
}
private static boolean checkIsJSONType(Type sourceType, List<TypePair> unresolvedTypes) {
BJsonType jsonType = (BJsonType) TYPE_JSON;
TypePair pair = new TypePair(sourceType, jsonType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
switch (sourceType.getTag()) {
case TypeTags.STRING_TAG:
case TypeTags.CHAR_STRING_TAG:
case TypeTags.INT_TAG:
case TypeTags.SIGNED32_INT_TAG:
case TypeTags.SIGNED16_INT_TAG:
case TypeTags.SIGNED8_INT_TAG:
case TypeTags.UNSIGNED32_INT_TAG:
case TypeTags.UNSIGNED16_INT_TAG:
case TypeTags.UNSIGNED8_INT_TAG:
case TypeTags.BYTE_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.NULL_TAG:
case TypeTags.JSON_TAG:
return true;
case TypeTags.ARRAY_TAG:
return checkIsType(((BArrayType) sourceType).getElementType(), jsonType, unresolvedTypes);
case TypeTags.FINITE_TYPE_TAG:
return isFiniteTypeMatch((BFiniteType) sourceType, jsonType);
case TypeTags.MAP_TAG:
return checkIsType(((BMapType) sourceType).getConstrainedType(), jsonType, unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
BRecordType recordType = (BRecordType) sourceType;
for (Field field : recordType.getFields().values()) {
if (!checkIsJSONType(field.getFieldType(), unresolvedTypes)) {
return false;
}
}
if (!recordType.sealed) {
return checkIsJSONType(recordType.restFieldType, unresolvedTypes);
}
return true;
case TypeTags.TUPLE_TAG:
BTupleType sourceTupleType = (BTupleType) sourceType;
for (Type memberType : sourceTupleType.getTupleTypes()) {
if (!checkIsJSONType(memberType, unresolvedTypes)) {
return false;
}
}
Type tupleRestType = sourceTupleType.getRestType();
if (tupleRestType != null) {
return checkIsJSONType(tupleRestType, unresolvedTypes);
}
return true;
case TypeTags.UNION_TAG:
for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {
if (!checkIsJSONType(memberType, unresolvedTypes)) {
return false;
}
}
return true;
default:
return false;
}
}
private static boolean checkIsRecordType(Type sourceType, BRecordType targetType, List<TypePair> unresolvedTypes) {
switch (sourceType.getTag()) {
case TypeTags.RECORD_TYPE_TAG:
return checkIsRecordType((BRecordType) sourceType, targetType, unresolvedTypes);
case TypeTags.MAP_TAG:
return checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes);
}
return false;
}
private static boolean checkIsRecordType(BRecordType sourceRecordType, BRecordType targetType,
List<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(sourceRecordType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (targetType.sealed && !sourceRecordType.sealed) {
return false;
}
if (!sourceRecordType.sealed &&
!checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) {
return false;
}
Map<String, Field> sourceFields = sourceRecordType.getFields();
Set<String> targetFieldNames = targetType.getFields().keySet();
for (Map.Entry<String, Field> targetFieldEntry : targetType.getFields().entrySet()) {
Field targetField = targetFieldEntry.getValue();
Field sourceField = sourceFields.get(targetFieldEntry.getKey());
if (sourceField == null) {
return false;
}
if (hasIncompatibleReadOnlyFlags(targetField, sourceField)) {
return false;
}
if (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)
&& SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL)) {
return false;
}
if (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) {
return false;
}
}
if (targetType.sealed) {
return targetFieldNames.containsAll(sourceFields.keySet());
}
for (Map.Entry<String, Field> sourceFieldEntry : sourceFields.entrySet()) {
if (targetFieldNames.contains(sourceFieldEntry.getKey())) {
continue;
}
if (!checkIsType(sourceFieldEntry.getValue().getFieldType(), targetType.restFieldType, unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkIsRecordType(BMapType sourceType, BRecordType targetType,
List<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(sourceType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (targetType.sealed) {
return false;
}
Type constraintType = sourceType.getConstrainedType();
for (Field field : targetType.getFields().values()) {
var flags = field.getFlags();
if (!SymbolFlags.isFlagOn(flags, SymbolFlags.OPTIONAL)) {
return false;
}
if (SymbolFlags.isFlagOn(flags, SymbolFlags.READONLY) && !sourceType.isReadOnly()) {
return false;
}
if (!checkIsType(constraintType, field.getFieldType(), unresolvedTypes)) {
return false;
}
}
return checkIsType(constraintType, targetType.restFieldType, unresolvedTypes);
}
private static boolean checkRecordBelongsToAnydataType(MapValue sourceVal, BRecordType recordType,
List<TypePair> unresolvedTypes) {
Type targetType = TYPE_ANYDATA;
TypePair pair = new TypePair(recordType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
Map<String, Field> fields = recordType.getFields();
for (Map.Entry<String, Field> fieldEntry : fields.entrySet()) {
String fieldName = fieldEntry.getKey();
Field field = fieldEntry.getValue();
if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {
BString fieldNameBString = StringUtils.fromString(fieldName);
if (SymbolFlags
.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(fieldNameBString)) {
continue;
}
if (!checkIsLikeType(sourceVal.get(fieldNameBString), targetType)) {
return false;
}
} else {
if (!checkIsType(field.getFieldType(), targetType, unresolvedTypes)) {
return false;
}
}
}
if (recordType.sealed) {
return true;
}
return checkIsType(recordType.restFieldType, targetType, unresolvedTypes);
}
private static boolean checkIsRecordType(Object sourceVal, Type sourceType, BRecordType targetType,
List<TypePair> unresolvedTypes) {
switch (sourceType.getTag()) {
case TypeTags.RECORD_TYPE_TAG:
return checkIsRecordType((MapValue) sourceVal, (BRecordType) sourceType, targetType, unresolvedTypes);
case TypeTags.MAP_TAG:
return checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes);
}
return false;
}
private static boolean checkIsRecordType(MapValue sourceRecordValue, BRecordType sourceRecordType,
BRecordType targetType, List<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(sourceRecordType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (targetType.sealed && !sourceRecordType.sealed) {
return false;
}
if (!sourceRecordType.sealed &&
!checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) {
return false;
}
Map<String, Field> sourceFields = sourceRecordType.getFields();
Set<String> targetFieldNames = targetType.getFields().keySet();
for (Map.Entry<String, Field> targetFieldEntry : targetType.getFields().entrySet()) {
String fieldName = targetFieldEntry.getKey();
Field targetField = targetFieldEntry.getValue();
Field sourceField = sourceFields.get(fieldName);
if (sourceField == null) {
if (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)) {
return false;
}
continue;
}
if (hasIncompatibleReadOnlyFlags(targetField, sourceField)) {
return false;
}
boolean optionalTargetField = SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL);
boolean optionalSourceField = SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL);
if (SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.READONLY)) {
BString fieldNameBString = StringUtils.fromString(fieldName);
if (optionalSourceField && !sourceRecordValue.containsKey(fieldNameBString)) {
if (!optionalTargetField) {
return false;
}
continue;
}
if (!checkIsLikeType(sourceRecordValue.get(fieldNameBString), targetField.getFieldType())) {
return false;
}
} else {
if (!optionalTargetField && optionalSourceField) {
return false;
}
if (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) {
return false;
}
}
}
if (targetType.sealed) {
for (String sourceFieldName : sourceFields.keySet()) {
if (targetFieldNames.contains(sourceFieldName)) {
continue;
}
if (!checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(
sourceFields.get(sourceFieldName).getFieldType())) {
return false;
}
}
return true;
}
for (Map.Entry<String, Field> targetFieldEntry : sourceFields.entrySet()) {
String fieldName = targetFieldEntry.getKey();
Field field = targetFieldEntry.getValue();
if (targetFieldNames.contains(fieldName)) {
continue;
}
if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {
if (!checkIsLikeType(sourceRecordValue.get(StringUtils.fromString(fieldName)),
targetType.restFieldType)) {
return false;
}
} else if (!checkIsType(field.getFieldType(), targetType.restFieldType, unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean hasIncompatibleReadOnlyFlags(Field targetField, Field sourceField) {
return SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.READONLY) && !SymbolFlags
.isFlagOn(sourceField.getFlags(),
SymbolFlags.READONLY);
}
private static boolean checkIsArrayType(BArrayType sourceType, BArrayType targetType,
List<TypePair> unresolvedTypes) {
switch (sourceType.getState()) {
case OPEN:
if (targetType.getState() != ArrayState.OPEN) {
return false;
}
break;
case CLOSED:
if (targetType.getState() == ArrayState.CLOSED &&
sourceType.getSize() != targetType.getSize()) {
return false;
}
break;
}
return checkIsType(sourceType.getElementType(), targetType.getElementType(), unresolvedTypes);
}
private static boolean checkIsArrayType(BTupleType sourceType, BArrayType targetType,
List<TypePair> unresolvedTypes) {
List<Type> tupleTypes = sourceType.getTupleTypes();
Type sourceRestType = sourceType.getRestType();
Type targetElementType = targetType.getElementType();
if (targetType.getState() == ArrayState.OPEN) {
for (Type sourceElementType : tupleTypes) {
if (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) {
return false;
}
}
if (sourceRestType != null) {
return checkIsType(sourceRestType, targetElementType, unresolvedTypes);
}
return true;
}
if (sourceRestType != null) {
return false;
}
if (tupleTypes.size() != targetType.getSize()) {
return false;
}
for (Type sourceElementType : tupleTypes) {
if (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkIsArrayType(Type sourceType, BArrayType targetType, List<TypePair> unresolvedTypes) {
int sourceTypeTag = sourceType.getTag();
if (sourceTypeTag == TypeTags.UNION_TAG) {
for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {
if (!checkIsArrayType(memberType, targetType, unresolvedTypes)) {
return false;
}
}
return true;
}
if (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) {
return false;
}
if (sourceTypeTag == TypeTags.ARRAY_TAG) {
return checkIsArrayType((BArrayType) sourceType, targetType, unresolvedTypes);
}
return checkIsArrayType((BTupleType) sourceType, targetType, unresolvedTypes);
}
private static boolean checkIsTupleType(BArrayType sourceType, BTupleType targetType,
List<TypePair> unresolvedTypes) {
Type sourceElementType = sourceType.getElementType();
List<Type> targetTypes = targetType.getTupleTypes();
Type targetRestType = targetType.getRestType();
switch (sourceType.getState()) {
case OPEN:
if (targetRestType == null) {
return false;
}
if (targetTypes.isEmpty()) {
return checkIsType(sourceElementType, targetRestType, unresolvedTypes);
}
return false;
case CLOSED:
if (sourceType.getSize() < targetTypes.size()) {
return false;
}
if (targetTypes.isEmpty()) {
if (targetRestType != null) {
return checkIsType(sourceElementType, targetRestType, unresolvedTypes);
}
return sourceType.getSize() == 0;
}
for (Type targetElementType : targetTypes) {
if (!(checkIsType(sourceElementType, targetElementType, unresolvedTypes))) {
return false;
}
}
if (sourceType.getSize() == targetTypes.size()) {
return true;
}
if (targetRestType != null) {
return checkIsType(sourceElementType, targetRestType, unresolvedTypes);
}
return false;
default:
return false;
}
}
private static boolean checkIsTupleType(BTupleType sourceType, BTupleType targetType,
List<TypePair> unresolvedTypes) {
List<Type> sourceTypes = sourceType.getTupleTypes();
Type sourceRestType = sourceType.getRestType();
List<Type> targetTypes = targetType.getTupleTypes();
Type targetRestType = targetType.getRestType();
if (sourceRestType != null && targetRestType == null) {
return false;
}
int sourceTypeSize = sourceTypes.size();
int targetTypeSize = targetTypes.size();
if (sourceRestType == null && targetRestType == null && sourceTypeSize != targetTypeSize) {
return false;
}
if (sourceTypeSize < targetTypeSize) {
return false;
}
for (int i = 0; i < targetTypeSize; i++) {
if (!checkIsType(sourceTypes.get(i), targetTypes.get(i), unresolvedTypes)) {
return false;
}
}
if (sourceTypeSize == targetTypeSize) {
if (sourceRestType != null) {
return checkIsType(sourceRestType, targetRestType, unresolvedTypes);
}
return true;
}
for (int i = targetTypeSize; i < sourceTypeSize; i++) {
if (!checkIsType(sourceTypes.get(i), targetRestType, unresolvedTypes)) {
return false;
}
}
if (sourceRestType != null) {
return checkIsType(sourceRestType, targetRestType, unresolvedTypes);
}
return true;
}
private static boolean checkIsTupleType(Type sourceType, BTupleType targetType, List<TypePair> unresolvedTypes) {
int sourceTypeTag = sourceType.getTag();
if (sourceTypeTag == TypeTags.UNION_TAG) {
for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {
if (!checkIsTupleType(memberType, targetType, unresolvedTypes)) {
return false;
}
}
return true;
}
if (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) {
return false;
}
if (sourceTypeTag == TypeTags.ARRAY_TAG) {
return checkIsTupleType((BArrayType) sourceType, targetType, unresolvedTypes);
}
return checkIsTupleType((BTupleType) sourceType, targetType, unresolvedTypes);
}
private static boolean checkIsAnyType(Type sourceType) {
switch (sourceType.getTag()) {
case TypeTags.ERROR_TAG:
case TypeTags.READONLY_TAG:
return false;
case TypeTags.UNION_TAG:
case TypeTags.ANYDATA_TAG:
case TypeTags.JSON_TAG:
for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {
if (!checkIsAnyType(memberType)) {
return false;
}
}
return true;
}
return true;
}
private static boolean checkIsFiniteType(Type sourceType, BFiniteType targetType) {
if (sourceType.getTag() != TypeTags.FINITE_TYPE_TAG) {
return false;
}
BFiniteType sourceFiniteType = (BFiniteType) sourceType;
if (sourceFiniteType.valueSpace.size() != targetType.valueSpace.size()) {
return false;
}
return targetType.valueSpace.containsAll(sourceFiniteType.valueSpace);
}
private static boolean checkIsFutureType(Type sourceType, BFutureType targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.FUTURE_TAG) {
return false;
}
return checkConstraints(((BFutureType) sourceType).getConstrainedType(), targetType.getConstrainedType(),
unresolvedTypes);
}
private static boolean checkObjectEquivalency(Type sourceType, BObjectType targetType,
List<TypePair> unresolvedTypes) {
return checkObjectEquivalency(null, sourceType, targetType, unresolvedTypes);
}
private static boolean checkObjectEquivalency(Object sourceVal, Type sourceType, BObjectType targetType,
List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.OBJECT_TYPE_TAG && sourceType.getTag() != TypeTags.SERVICE_TAG) {
return false;
}
TypePair pair = new TypePair(sourceType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BObjectType sourceObjectType = (BObjectType) sourceType;
if (SymbolFlags.isFlagOn(targetType.flags, SymbolFlags.ISOLATED) &&
!SymbolFlags.isFlagOn(sourceObjectType.flags, SymbolFlags.ISOLATED)) {
return false;
}
Map<String, Field> targetFields = targetType.getFields();
Map<String, Field> sourceFields = sourceObjectType.getFields();
MethodType[] targetFuncs = targetType.getMethods();
MethodType[] sourceFuncs = sourceObjectType.getMethods();
if (targetType.getFields().values().stream().anyMatch(field -> SymbolFlags
.isFlagOn(field.getFlags(), SymbolFlags.PRIVATE))
|| Stream.of(targetFuncs).anyMatch(func -> SymbolFlags.isFlagOn(func.getFlags(),
SymbolFlags.PRIVATE))) {
return false;
}
if (targetFields.size() > sourceFields.size() || targetFuncs.length > sourceFuncs.length) {
return false;
}
String targetTypeModule = Optional.ofNullable(targetType.getPackage()).map(Module::toString).orElse("");
String sourceTypeModule = Optional.ofNullable(sourceObjectType.getPackage()).map(Module::toString).orElse("");
if (sourceVal == null) {
if (!checkObjectSubTypeForFields(targetFields, sourceFields, targetTypeModule, sourceTypeModule,
unresolvedTypes)) {
return false;
}
} else if (!checkObjectSubTypeForFieldsByValue(targetFields, sourceFields, targetTypeModule, sourceTypeModule,
(BObject) sourceVal, unresolvedTypes)) {
return false;
}
return checkObjectSubTypeForMethods(unresolvedTypes, targetFuncs, sourceFuncs, targetTypeModule,
sourceTypeModule, sourceObjectType, targetType);
}
private static boolean checkObjectSubTypeForFields(Map<String, Field> targetFields,
Map<String, Field> sourceFields, String targetTypeModule,
String sourceTypeModule, List<TypePair> unresolvedTypes) {
for (Field lhsField : targetFields.values()) {
Field rhsField = sourceFields.get(lhsField.getFieldName());
if (rhsField == null ||
!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(),
rhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField,
rhsField) ||
!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkObjectSubTypeForFieldsByValue(Map<String, Field> targetFields,
Map<String, Field> sourceFields, String targetTypeModule,
String sourceTypeModule, BObject sourceObjVal,
List<TypePair> unresolvedTypes) {
for (Field lhsField : targetFields.values()) {
String name = lhsField.getFieldName();
Field rhsField = sourceFields.get(name);
if (rhsField == null ||
!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(),
rhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField,
rhsField)) {
return false;
}
if (SymbolFlags.isFlagOn(rhsField.getFlags(), SymbolFlags.FINAL)) {
Object fieldValue = sourceObjVal.get(StringUtils.fromString(name));
Type fieldValueType = getType(fieldValue);
if (fieldValueType.isReadOnly()) {
if (!checkIsLikeType(fieldValue, lhsField.getFieldType())) {
return false;
}
continue;
}
if (!checkIsType(fieldValueType, lhsField.getFieldType(), unresolvedTypes)) {
return false;
}
} else if (!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) {
return false;
}
}
return true;
}
private static boolean checkObjectSubTypeForMethods(List<TypePair> unresolvedTypes,
MethodType[] targetFuncs,
MethodType[] sourceFuncs,
String targetTypeModule, String sourceTypeModule,
BObjectType sourceType, BObjectType targetType) {
for (MethodType lhsFunc : targetFuncs) {
if (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.RESOURCE)) {
continue;
}
MethodType rhsFunc = getMatchingInvokableType(sourceFuncs, lhsFunc, unresolvedTypes);
if (rhsFunc == null ||
!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsFunc.getFlags(),
rhsFunc.getFlags())) {
return false;
}
if (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.REMOTE) != SymbolFlags
.isFlagOn(rhsFunc.getFlags(), SymbolFlags.REMOTE)) {
return false;
}
}
BTypeIdSet targetTypeIdSet = targetType.typeIdSet;
if (targetTypeIdSet == null) {
return true;
}
BTypeIdSet sourceTypeIdSet = sourceType.typeIdSet;
if (sourceTypeIdSet == null) {
return false;
}
return sourceTypeIdSet.containsAll(targetTypeIdSet);
}
private static boolean isInSameVisibilityRegion(String lhsTypePkg, String rhsTypePkg, long lhsFlags,
long rhsFlags) {
if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PRIVATE)) {
return lhsTypePkg.equals(rhsTypePkg);
} else if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PUBLIC)) {
return SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PUBLIC);
}
return !SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PRIVATE) && !SymbolFlags
.isFlagOn(rhsFlags, SymbolFlags.PUBLIC) &&
lhsTypePkg.equals(rhsTypePkg);
}
private static MethodType getMatchingInvokableType(MethodType[] rhsFuncs,
MethodType lhsFunc,
List<TypePair> unresolvedTypes) {
return Arrays.stream(rhsFuncs)
.filter(rhsFunc -> lhsFunc.getName().equals(rhsFunc.getName()))
.filter(rhsFunc -> checkFunctionTypeEqualityForObjectType(rhsFunc.getType(), lhsFunc.getType(),
unresolvedTypes))
.findFirst()
.orElse(null);
}
private static boolean checkFunctionTypeEqualityForObjectType(FunctionType source, FunctionType target,
List<TypePair> unresolvedTypes) {
if (hasIncompatibleIsolatedFlags(target, source)) {
return false;
}
if (source.getParameters().length != target.getParameters().length) {
return false;
}
for (int i = 0; i < source.getParameters().length; i++) {
if (!checkIsType(target.getParameters()[i].type, source.getParameters()[i].type, unresolvedTypes)) {
return false;
}
}
if (source.getReturnType() == null && target.getReturnType() == null) {
return true;
} else if (source.getReturnType() == null || target.getReturnType() == null) {
return false;
}
return checkIsType(source.getReturnType(), target.getReturnType(), unresolvedTypes);
}
private static boolean checkIsFunctionType(Type sourceType, BFunctionType targetType) {
if (sourceType.getTag() != TypeTags.FUNCTION_POINTER_TAG) {
return false;
}
BFunctionType source = (BFunctionType) sourceType;
if (hasIncompatibleIsolatedFlags(targetType, source) || hasIncompatibleTransactionalFlags(targetType, source)) {
return false;
}
if (SymbolFlags.isFlagOn(targetType.getFlags(), SymbolFlags.ANY_FUNCTION)) {
return true;
}
if (source.parameters.length != targetType.parameters.length) {
return false;
}
for (int i = 0; i < source.parameters.length; i++) {
if (!checkIsType(targetType.parameters[i].type, source.parameters[i].type, new ArrayList<>())) {
return false;
}
}
return checkIsType(source.retType, targetType.retType, new ArrayList<>());
}
private static boolean hasIncompatibleIsolatedFlags(FunctionType target, FunctionType source) {
return SymbolFlags.isFlagOn(target.getFlags(), SymbolFlags.ISOLATED) && !SymbolFlags
.isFlagOn(source.getFlags(), SymbolFlags.ISOLATED);
}
private static boolean hasIncompatibleTransactionalFlags(FunctionType target, FunctionType source) {
return SymbolFlags.isFlagOn(source.getFlags(), SymbolFlags.TRANSACTIONAL) && !SymbolFlags
.isFlagOn(target.getFlags(), SymbolFlags.TRANSACTIONAL);
}
private static boolean checkIsServiceType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() == TypeTags.SERVICE_TAG) {
return checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes);
}
if (sourceType.getTag() == TypeTags.OBJECT_TYPE_TAG) {
var flags = ((BObjectType) sourceType).flags;
return (flags & SymbolFlags.SERVICE) == SymbolFlags.SERVICE;
}
return false;
}
public static boolean isInherentlyImmutableType(Type sourceType) {
if (isSimpleBasicType(sourceType)) {
return true;
}
switch (sourceType.getTag()) {
case TypeTags.XML_TEXT_TAG:
case TypeTags.FINITE_TYPE_TAG:
case TypeTags.READONLY_TAG:
case TypeTags.NULL_TAG:
case TypeTags.ERROR_TAG:
case TypeTags.INVOKABLE_TAG:
case TypeTags.SERVICE_TAG:
case TypeTags.TYPEDESC_TAG:
case TypeTags.FUNCTION_POINTER_TAG:
case TypeTags.HANDLE_TAG:
return true;
case TypeTags.XML_TAG:
return ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG;
}
return false;
}
public static boolean isSelectivelyImmutableType(Type type, Set<Type> unresolvedTypes) {
if (!unresolvedTypes.add(type)) {
return true;
}
switch (type.getTag()) {
case TypeTags.ANY_TAG:
case TypeTags.ANYDATA_TAG:
case TypeTags.JSON_TAG:
case TypeTags.XML_TAG:
case TypeTags.XML_COMMENT_TAG:
case TypeTags.XML_ELEMENT_TAG:
case TypeTags.XML_PI_TAG:
return true;
case TypeTags.ARRAY_TAG:
Type elementType = ((BArrayType) type).getElementType();
return isInherentlyImmutableType(elementType) ||
isSelectivelyImmutableType(elementType, unresolvedTypes);
case TypeTags.TUPLE_TAG:
BTupleType tupleType = (BTupleType) type;
for (Type tupMemType : tupleType.getTupleTypes()) {
if (!isInherentlyImmutableType(tupMemType) &&
!isSelectivelyImmutableType(tupMemType, unresolvedTypes)) {
return false;
}
}
Type tupRestType = tupleType.getRestType();
if (tupRestType == null) {
return true;
}
return isInherentlyImmutableType(tupRestType) ||
isSelectivelyImmutableType(tupRestType, unresolvedTypes);
case TypeTags.RECORD_TYPE_TAG:
BRecordType recordType = (BRecordType) type;
for (Field field : recordType.getFields().values()) {
Type fieldType = field.getFieldType();
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes)) {
return false;
}
}
Type recordRestType = recordType.restFieldType;
if (recordRestType == null) {
return true;
}
return isInherentlyImmutableType(recordRestType) ||
isSelectivelyImmutableType(recordRestType, unresolvedTypes);
case TypeTags.OBJECT_TYPE_TAG:
BObjectType objectType = (BObjectType) type;
if (SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.CLASS) &&
!SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.READONLY)) {
return false;
}
for (Field field : objectType.getFields().values()) {
Type fieldType = field.getFieldType();
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes)) {
return false;
}
}
return true;
case TypeTags.MAP_TAG:
Type constraintType = ((BMapType) type).getConstrainedType();
return isInherentlyImmutableType(constraintType) ||
isSelectivelyImmutableType(constraintType, unresolvedTypes);
case TypeTags.TABLE_TAG:
Type tableConstraintType = ((BTableType) type).getConstrainedType();
return isInherentlyImmutableType(tableConstraintType) ||
isSelectivelyImmutableType(tableConstraintType, unresolvedTypes);
case TypeTags.UNION_TAG:
boolean readonlyIntersectionExists = false;
for (Type memberType : ((BUnionType) type).getMemberTypes()) {
if (isInherentlyImmutableType(memberType) ||
isSelectivelyImmutableType(memberType, unresolvedTypes)) {
readonlyIntersectionExists = true;
break;
}
}
return readonlyIntersectionExists;
case TypeTags.INTERSECTION_TAG:
return isSelectivelyImmutableType(((BIntersectionType) type).getEffectiveType(), unresolvedTypes);
}
return false;
}
private static boolean checkConstraints(Type sourceConstraint, Type targetConstraint,
List<TypePair> unresolvedTypes) {
if (sourceConstraint == null) {
sourceConstraint = TYPE_ANY;
}
if (targetConstraint == null) {
targetConstraint = TYPE_ANY;
}
return checkIsType(sourceConstraint, targetConstraint, unresolvedTypes);
}
private static boolean isMutable(Object value, Type sourceType) {
if (value == null || sourceType.getTag() < TypeTags.NULL_TAG ||
sourceType.getTag() == TypeTags.FINITE_TYPE_TAG) {
return false;
}
return !((RefValue) value).isFrozen();
}
private static boolean checkArrayEquivalent(Type actualType, Type expType) {
if (expType.getTag() == TypeTags.ARRAY_TAG && actualType.getTag() == TypeTags.ARRAY_TAG) {
BArrayType lhrArrayType = (BArrayType) expType;
BArrayType rhsArrayType = (BArrayType) actualType;
return checkIsArrayType(rhsArrayType, lhrArrayType, new ArrayList<>());
}
return expType == actualType;
}
private static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type) {
Set<String> visitedTypeSet = new HashSet<>();
return checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(type, visitedTypeSet);
}
private static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type,
Set<String> visitedTypeSet) {
switch (type.getTag()) {
case TypeTags.NEVER_TAG:
return true;
case TypeTags.RECORD_TYPE_TAG:
BRecordType recordType = (BRecordType) type;
visitedTypeSet.add(recordType.getName());
for (Field field : recordType.getFields().values()) {
if ((SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED) ||
!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) &&
!visitedTypeSet.contains(field.getFieldType()) &&
checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(field.getFieldType(),
visitedTypeSet)) {
return true;
}
}
return false;
case TypeTags.TUPLE_TAG:
BTupleType tupleType = (BTupleType) type;
visitedTypeSet.add(tupleType.getName());
List<Type> tupleTypes = tupleType.getTupleTypes();
for (Type mem : tupleTypes) {
if (!visitedTypeSet.add(mem.getName())) {
continue;
}
if (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(mem, visitedTypeSet)) {
return true;
}
}
return false;
case TypeTags.ARRAY_TAG:
BArrayType arrayType = (BArrayType) type;
visitedTypeSet.add(arrayType.getName());
Type elemType = arrayType.getElementType();
visitedTypeSet.add(elemType.getName());
return arrayType.getState() != ArrayState.OPEN &&
checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(elemType, visitedTypeSet);
default:
return false;
}
}
/**
* Check whether a given value confirms to a given type. First it checks if the type of the value, and
* if fails then falls back to checking the value.
*
* @param sourceValue Value to check
* @param targetType Target type
* @param unresolvedValues Values that are unresolved so far
* @param allowNumericConversion Flag indicating whether to perform numeric conversions
* @return True if the value confirms to the provided type. False, otherwise.
*/
private static boolean checkIsLikeType(Object sourceValue, Type targetType, List<TypeValuePair> unresolvedValues,
boolean allowNumericConversion) {
Type sourceType = getType(sourceValue);
if (checkIsType(sourceType, targetType, new ArrayList<>())) {
return true;
}
return checkIsLikeOnValue(sourceValue, sourceType, targetType, unresolvedValues, allowNumericConversion);
}
/**
* Check whether a given value confirms to a given type. Strictly checks the value only, and does not consider the
* type of the value for consideration.
*
* @param sourceValue Value to check
* @param sourceType Type of the value
* @param targetType Target type
* @param unresolvedValues Values that are unresolved so far
* @param allowNumericConversion Flag indicating whether to perform numeric conversions
* @return True if the value confirms to the provided type. False, otherwise.
*/
private static boolean checkIsLikeOnValue(Object sourceValue, Type sourceType, Type targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
int sourceTypeTag = sourceType.getTag();
int targetTypeTag = targetType.getTag();
if (sourceTypeTag == TypeTags.INTERSECTION_TAG) {
return checkIsLikeOnValue(sourceValue, ((BIntersectionType) sourceType).getEffectiveType(),
targetTypeTag != TypeTags.INTERSECTION_TAG ? targetType :
((BIntersectionType) targetType).getEffectiveType(),
unresolvedValues, allowNumericConversion);
}
if (targetTypeTag == TypeTags.INTERSECTION_TAG) {
return checkIsLikeOnValue(sourceValue, sourceType, ((BIntersectionType) targetType).getEffectiveType(),
unresolvedValues, allowNumericConversion);
}
if (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) {
if (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) {
return checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(),
targetType, unresolvedValues, allowNumericConversion);
}
return checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(),
((BParameterizedType) targetType).getParamValueType(), unresolvedValues,
allowNumericConversion);
}
switch (targetTypeTag) {
case TypeTags.READONLY_TAG:
return true;
case TypeTags.BYTE_TAG:
if (TypeTags.isIntegerTypeTag(sourceTypeTag)) {
return isByteLiteral((Long) sourceValue);
}
return allowNumericConversion && TypeConverter.isConvertibleToByte(sourceValue);
case TypeTags.INT_TAG:
return allowNumericConversion && TypeConverter.isConvertibleToInt(sourceValue);
case TypeTags.SIGNED32_INT_TAG:
case TypeTags.SIGNED16_INT_TAG:
case TypeTags.SIGNED8_INT_TAG:
case TypeTags.UNSIGNED32_INT_TAG:
case TypeTags.UNSIGNED16_INT_TAG:
case TypeTags.UNSIGNED8_INT_TAG:
if (TypeTags.isIntegerTypeTag(sourceTypeTag) || targetTypeTag == TypeTags.BYTE_TAG) {
return TypeConverter.isConvertibleToIntSubType(sourceValue, targetType);
}
return allowNumericConversion && TypeConverter.isConvertibleToIntSubType(sourceValue, targetType);
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
return allowNumericConversion && TypeConverter.isConvertibleToFloatingPointTypes(sourceValue);
case TypeTags.CHAR_STRING_TAG:
return TypeConverter.isConvertibleToChar(sourceValue);
case TypeTags.RECORD_TYPE_TAG:
return checkIsLikeRecordType(sourceValue, (BRecordType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.TABLE_TAG:
return checkIsLikeTableType(sourceValue, (BTableType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.JSON_TAG:
return checkIsLikeJSONType(sourceValue, sourceType, (BJsonType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.MAP_TAG:
return checkIsLikeMapType(sourceValue, (BMapType) targetType, unresolvedValues, allowNumericConversion);
case TypeTags.STREAM_TAG:
return checkIsLikeStreamType(sourceValue, (BStreamType) targetType);
case TypeTags.ARRAY_TAG:
return checkIsLikeArrayType(sourceValue, (BArrayType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.TUPLE_TAG:
return checkIsLikeTupleType(sourceValue, (BTupleType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.ERROR_TAG:
return checkIsLikeErrorType(sourceValue, (BErrorType) targetType, unresolvedValues,
allowNumericConversion);
case TypeTags.ANYDATA_TAG:
return checkIsLikeAnydataType(sourceValue, sourceType, unresolvedValues, allowNumericConversion);
case TypeTags.FINITE_TYPE_TAG:
return checkFiniteTypeAssignable(sourceValue, sourceType, (BFiniteType) targetType);
case TypeTags.XML_ELEMENT_TAG:
if (sourceTypeTag == TypeTags.XML_TAG) {
XmlValue xmlSource = (XmlValue) sourceValue;
return xmlSource.isSingleton();
}
return false;
case TypeTags.XML_COMMENT_TAG:
case TypeTags.XML_PI_TAG:
case TypeTags.XML_TEXT_TAG:
if (sourceTypeTag == TypeTags.XML_TAG) {
return checkIsLikeNonElementSingleton((XmlValue) sourceValue, targetType);
}
return false;
case TypeTags.XML_TAG:
if (sourceTypeTag == TypeTags.XML_TAG) {
return checkIsLikeXMLSequenceType((XmlValue) sourceValue, targetType);
}
return false;
case TypeTags.UNION_TAG:
if (allowNumericConversion) {
List<Type> compatibleTypesWithNumConversion = new ArrayList<>();
List<Type> compatibleTypesWithoutNumConversion = new ArrayList<>();
for (Type type : ((BUnionType) targetType).getMemberTypes()) {
List<TypeValuePair> tempList = new ArrayList<>(unresolvedValues.size());
tempList.addAll(unresolvedValues);
if (checkIsLikeType(sourceValue, type, tempList, false)) {
compatibleTypesWithoutNumConversion.add(type);
}
if (checkIsLikeType(sourceValue, type, unresolvedValues, true)) {
compatibleTypesWithNumConversion.add(type);
}
}
return compatibleTypesWithNumConversion.size() != 0 &&
compatibleTypesWithNumConversion.size() - compatibleTypesWithoutNumConversion.size() <= 1;
} else {
for (Type type : ((BUnionType) targetType).getMemberTypes()) {
if (checkIsLikeType(sourceValue, type, unresolvedValues, false)) {
return true;
}
}
}
return false;
default:
return false;
}
}
private static XmlNodeType getXmlNodeType(Type type) {
XmlNodeType nodeType = null;
switch (type.getTag()) {
case TypeTags.XML_ELEMENT_TAG:
nodeType = XmlNodeType.ELEMENT;
break;
case TypeTags.XML_COMMENT_TAG:
nodeType = XmlNodeType.COMMENT;
break;
case TypeTags.XML_PI_TAG:
nodeType = XmlNodeType.PI;
break;
case TypeTags.XML_TEXT_TAG:
nodeType = XmlNodeType.TEXT;
break;
default:
return null;
}
return nodeType;
}
private static boolean checkIsLikeNonElementSingleton(XmlValue xmlSource, Type targetType) {
XmlNodeType nodeType = getXmlNodeType(targetType);
if (nodeType == null) {
return false;
}
if (xmlSource.getNodeType() == nodeType) {
return true;
}
if (xmlSource.getNodeType() == XmlNodeType.SEQUENCE) {
XmlSequence seq = (XmlSequence) xmlSource;
return seq.size() == 1 && seq.getChildrenList().get(0).getNodeType() == nodeType ||
(nodeType == XmlNodeType.TEXT && seq.isEmpty());
}
return false;
}
private static boolean checkIsLikeXMLSequenceType(XmlValue xmlSource, Type targetType) {
if (xmlSource.getNodeType() != XmlNodeType.SEQUENCE) {
return false;
}
Set<XmlNodeType> acceptedNodes = new HashSet<>();
BXmlType target = (BXmlType) targetType;
if (target.constraint.getTag() == TypeTags.UNION_TAG) {
getXMLNodeOnUnion((BUnionType) target.constraint, acceptedNodes);
} else {
acceptedNodes.add(getXmlNodeType(((BXmlType) targetType).constraint));
}
XmlSequence seq = (XmlSequence) xmlSource;
for (BXml m : seq.getChildrenList()) {
if (!acceptedNodes.contains(m.getNodeType())) {
return false;
}
}
return true;
}
private static void getXMLNodeOnUnion(BUnionType unionType, Set<XmlNodeType> nodeTypes) {
if (nodeTypes.size() == 4) {
return;
}
for (Type memberType : unionType.getMemberTypes()) {
if (memberType.getTag() == TypeTags.UNION_TAG) {
getXMLNodeOnUnion((BUnionType) memberType, nodeTypes);
} else {
nodeTypes.add(getXmlNodeType(memberType));
}
}
}
public static boolean isNumericType(Type type) {
return type.getTag() < TypeTags.STRING_TAG || TypeTags.isIntegerTypeTag(type.getTag());
}
private static boolean checkIsLikeAnydataType(Object sourceValue, Type sourceType,
List<TypeValuePair> unresolvedValues,
boolean allowNumericConversion) {
switch (sourceType.getTag()) {
case TypeTags.RECORD_TYPE_TAG:
case TypeTags.JSON_TAG:
case TypeTags.MAP_TAG:
return isLikeType(((MapValueImpl) sourceValue).values().toArray(), TYPE_ANYDATA,
unresolvedValues, allowNumericConversion);
case TypeTags.ARRAY_TAG:
ArrayValue arr = (ArrayValue) sourceValue;
BArrayType arrayType = (BArrayType) arr.getType();
switch (arrayType.getElementType().getTag()) {
case TypeTags.INT_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
case TypeTags.STRING_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.BYTE_TAG:
return true;
default:
return isLikeType(arr.getValues(), TYPE_ANYDATA, unresolvedValues,
allowNumericConversion);
}
case TypeTags.TUPLE_TAG:
return isLikeType(((ArrayValue) sourceValue).getValues(), TYPE_ANYDATA, unresolvedValues,
allowNumericConversion);
case TypeTags.ANYDATA_TAG:
return true;
case TypeTags.FINITE_TYPE_TAG:
case TypeTags.UNION_TAG:
return checkIsLikeType(sourceValue, TYPE_ANYDATA, unresolvedValues, allowNumericConversion);
default:
return false;
}
}
private static boolean isLikeType(Object[] objects, Type targetType, List<TypeValuePair> unresolvedValues,
boolean allowNumericConversion) {
for (Object value : objects) {
if (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
private static boolean checkIsLikeTupleType(Object sourceValue, BTupleType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof ArrayValue)) {
return false;
}
ArrayValue source = (ArrayValue) sourceValue;
List<Type> targetTypes = targetType.getTupleTypes();
int sourceTypeSize = source.size();
int targetTypeSize = targetTypes.size();
Type targetRestType = targetType.getRestType();
if (sourceTypeSize < targetTypeSize) {
return false;
}
if (targetRestType == null && sourceTypeSize > targetTypeSize) {
return false;
}
for (int i = 0; i < targetTypeSize; i++) {
if (!checkIsLikeType(source.getRefValue(i), targetTypes.get(i), unresolvedValues, allowNumericConversion)) {
return false;
}
}
for (int i = targetTypeSize; i < sourceTypeSize; i++) {
if (!checkIsLikeType(source.getRefValue(i), targetRestType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
static boolean isByteLiteral(long longValue) {
return (longValue >= BBYTE_MIN_VALUE && longValue <= BBYTE_MAX_VALUE);
}
static boolean isSigned32LiteralValue(Long longObject) {
return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);
}
static boolean isSigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);
}
static boolean isSigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);
}
static boolean isUnsigned32LiteralValue(Long longObject) {
return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);
}
static boolean isUnsigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);
}
static boolean isUnsigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);
}
static boolean isCharLiteralValue(Object object) {
String value;
if (object instanceof BString) {
value = ((BString) object).getValue();
} else if (object instanceof String) {
value = (String) object;
} else {
return false;
}
return value.codePoints().count() == 1;
}
private static boolean checkIsLikeArrayType(Object sourceValue, BArrayType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof ArrayValue)) {
return false;
}
ArrayValue source = (ArrayValue) sourceValue;
Type targetTypeElementType = targetType.getElementType();
if (source.getType().getTag() == TypeTags.ARRAY_TAG) {
Type sourceElementType = ((BArrayType) source.getType()).getElementType();
if (isValueType(sourceElementType)) {
if (checkIsType(sourceElementType, targetTypeElementType, new ArrayList<>())) {
return true;
}
if (allowNumericConversion && isNumericType(sourceElementType)) {
if (isNumericType(targetTypeElementType)) {
return true;
}
if (targetTypeElementType.getTag() != TypeTags.UNION_TAG) {
return false;
}
List<Type> targetNumericTypes = new ArrayList<>();
for (Type memType : ((BUnionType) targetTypeElementType).getMemberTypes()) {
if (isNumericType(memType) && !targetNumericTypes.contains(memType)) {
targetNumericTypes.add(memType);
}
}
return targetNumericTypes.size() == 1;
}
if (targetTypeElementType.getTag() == TypeTags.FLOAT_TAG ||
targetTypeElementType.getTag() == TypeTags.DECIMAL_TAG) {
return false;
}
}
}
for (int i = 0; i < source.size(); i++) {
if (!checkIsLikeType(source.get(i), targetTypeElementType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
private static boolean checkIsLikeMapType(Object sourceValue, BMapType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof MapValueImpl)) {
return false;
}
for (Object mapEntry : ((MapValueImpl) sourceValue).values()) {
if (!checkIsLikeType(mapEntry, targetType.getConstrainedType(), unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
private static boolean checkIsLikeStreamType(Object sourceValue, BStreamType targetType) {
if (!(sourceValue instanceof StreamValue)) {
return false;
}
BStreamType streamType = (BStreamType) ((StreamValue) sourceValue).getType();
return streamType.getConstrainedType() == targetType.getConstrainedType();
}
private static boolean checkIsLikeJSONType(Object sourceValue, Type sourceType, BJsonType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (sourceType.getTag() == TypeTags.ARRAY_TAG) {
ArrayValue source = (ArrayValue) sourceValue;
Type elementType = ((BArrayType) source.getType()).getElementType();
if (isValueType(elementType)) {
return checkIsType(elementType, targetType, new ArrayList<>());
}
Object[] arrayValues = source.getValues();
for (int i = 0; i < ((ArrayValue) sourceValue).size(); i++) {
if (!checkIsLikeType(arrayValues[i], targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
} else if (sourceType.getTag() == TypeTags.MAP_TAG) {
for (Object value : ((MapValueImpl) sourceValue).values()) {
if (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
} else if (sourceType.getTag() == TypeTags.RECORD_TYPE_TAG) {
TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);
if (unresolvedValues.contains(typeValuePair)) {
return true;
}
unresolvedValues.add(typeValuePair);
for (Object object : ((MapValueImpl) sourceValue).values()) {
if (!checkIsLikeType(object, targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
} else if (sourceType.getTag() == TypeTags.TUPLE_TAG) {
for (Object obj : ((TupleValueImpl) sourceValue).getValues()) {
if (!checkIsLikeType(obj, targetType, unresolvedValues, allowNumericConversion)) {
return false;
}
}
return true;
}
return false;
}
private static boolean checkIsLikeRecordType(Object sourceValue, BRecordType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof MapValueImpl)) {
return false;
}
TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);
if (unresolvedValues.contains(typeValuePair)) {
return true;
}
unresolvedValues.add(typeValuePair);
Map<String, Type> targetTypeField = new HashMap<>();
Type restFieldType = targetType.restFieldType;
for (Field field : targetType.getFields().values()) {
targetTypeField.put(field.getFieldName(), field.getFieldType());
}
for (Map.Entry targetTypeEntry : targetTypeField.entrySet()) {
Object fieldName = StringUtils.fromString(targetTypeEntry.getKey().toString());
if (!(((MapValueImpl) sourceValue).containsKey(fieldName)) &&
!SymbolFlags.isFlagOn(targetType.getFields().get(fieldName.toString()).getFlags(),
SymbolFlags.OPTIONAL)) {
return false;
}
}
for (Object object : ((MapValueImpl) sourceValue).entrySet()) {
Map.Entry valueEntry = (Map.Entry) object;
String fieldName = valueEntry.getKey().toString();
if (targetTypeField.containsKey(fieldName)) {
if (!checkIsLikeType((valueEntry.getValue()), targetTypeField.get(fieldName),
unresolvedValues, allowNumericConversion)) {
return false;
}
} else {
if (!targetType.sealed) {
if (!checkIsLikeType((valueEntry.getValue()), restFieldType, unresolvedValues,
allowNumericConversion)) {
return false;
}
} else {
return false;
}
}
}
return true;
}
private static boolean checkIsLikeTableType(Object sourceValue, BTableType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
if (!(sourceValue instanceof TableValueImpl)) {
return false;
}
TableValueImpl tableValue = (TableValueImpl) sourceValue;
BTableType sourceType = (BTableType) tableValue.getType();
if (targetType.getKeyType() != null && sourceType.getFieldNames() == null) {
return false;
}
if (sourceType.getKeyType() != null && !checkIsType(tableValue.getKeyType(), targetType.getKeyType())) {
return false;
}
TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);
if (unresolvedValues.contains(typeValuePair)) {
return true;
}
Object[] objects = tableValue.values().toArray();
for (Object object : objects) {
if (!checkIsLikeType(object, targetType.getConstrainedType(), allowNumericConversion)) {
return false;
}
}
return true;
}
private static boolean checkFiniteTypeAssignable(Object sourceValue, Type sourceType, BFiniteType targetType) {
for (Object valueSpaceItem : targetType.valueSpace) {
if (isFiniteTypeValue(sourceValue, sourceType, valueSpaceItem)) {
return true;
}
}
return false;
}
protected static boolean isFiniteTypeValue(Object sourceValue, Type sourceType, Object valueSpaceItem) {
Type valueSpaceItemType = getType(valueSpaceItem);
if (valueSpaceItemType.getTag() > TypeTags.FLOAT_TAG) {
return valueSpaceItemType.getTag() == sourceType.getTag() &&
(valueSpaceItem == sourceValue || valueSpaceItem.equals(sourceValue));
}
switch (sourceType.getTag()) {
case TypeTags.BYTE_TAG:
case TypeTags.INT_TAG:
return ((Number) sourceValue).longValue() == ((Number) valueSpaceItem).longValue();
case TypeTags.FLOAT_TAG:
if (sourceType.getTag() != valueSpaceItemType.getTag()) {
return false;
}
return ((Number) sourceValue).doubleValue() == ((Number) valueSpaceItem).doubleValue();
case TypeTags.DECIMAL_TAG:
default:
if (sourceType.getTag() != valueSpaceItemType.getTag()) {
return false;
}
return valueSpaceItem.equals(sourceValue);
}
}
private static boolean checkIsErrorType(Type sourceType, BErrorType targetType, List<TypePair> unresolvedTypes) {
if (sourceType.getTag() != TypeTags.ERROR_TAG) {
return false;
}
TypePair pair = new TypePair(sourceType, targetType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BErrorType bErrorType = (BErrorType) sourceType;
if (!checkIsType(bErrorType.detailType, targetType.detailType, unresolvedTypes)) {
return false;
}
if (targetType.typeIdSet == null) {
return true;
}
BTypeIdSet sourceTypeIdSet = bErrorType.typeIdSet;
if (sourceTypeIdSet == null) {
return false;
}
return sourceTypeIdSet.containsAll(targetType.typeIdSet);
}
private static boolean checkIsLikeErrorType(Object sourceValue, BErrorType targetType,
List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) {
Type sourceType = getType(sourceValue);
if (sourceValue == null || sourceType.getTag() != TypeTags.ERROR_TAG) {
return false;
}
if (!checkIsLikeType(((ErrorValue) sourceValue).getDetails(), targetType.detailType, unresolvedValues,
allowNumericConversion)) {
return false;
}
if (targetType.typeIdSet == null) {
return true;
}
BTypeIdSet sourceIdSet = ((BErrorType) sourceType).typeIdSet;
if (sourceIdSet == null) {
return false;
}
return sourceIdSet.containsAll(targetType.typeIdSet);
}
private static boolean isSimpleBasicType(Type type) {
return type.getTag() < TypeTags.NULL_TAG;
}
private static boolean isHandleType(Type type) {
return type.getTag() == TypeTags.HANDLE_TAG;
}
/**
* Deep value equality check for anydata.
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value on the right hand side
* @param checkedValues Structured value pairs already compared or being compared
* @return True if values are equal, else false.
*/
private static boolean isEqual(Object lhsValue, Object rhsValue, List<ValuePair> checkedValues) {
if (lhsValue == rhsValue) {
return true;
}
if (null == lhsValue || null == rhsValue) {
return false;
}
int lhsValTypeTag = getType(lhsValue).getTag();
int rhsValTypeTag = getType(rhsValue).getTag();
switch (lhsValTypeTag) {
case TypeTags.STRING_TAG:
case TypeTags.BOOLEAN_TAG:
return lhsValue.equals(rhsValue);
case TypeTags.INT_TAG:
if (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) {
return false;
}
return lhsValue.equals(((Number) rhsValue).longValue());
case TypeTags.BYTE_TAG:
if (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) {
return false;
}
return ((Number) lhsValue).byteValue() == ((Number) rhsValue).byteValue();
case TypeTags.FLOAT_TAG:
if (rhsValTypeTag != TypeTags.FLOAT_TAG) {
return false;
}
if (Double.isNaN((Double) lhsValue) && Double.isNaN((Double) rhsValue)) {
return true;
}
return ((Number) lhsValue).doubleValue() == ((Number) rhsValue).doubleValue();
case TypeTags.DECIMAL_TAG:
if (rhsValTypeTag != TypeTags.DECIMAL_TAG) {
return false;
}
return checkDecimalEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue);
case TypeTags.XML_TAG:
if (lhsValue instanceof XmlText) {
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue);
}
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlSequence) lhsValue, (XmlValue) rhsValue);
case TypeTags.XML_ELEMENT_TAG:
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlItem) lhsValue, (XmlValue) rhsValue);
case TypeTags.XML_COMMENT_TAG:
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlComment) lhsValue, (XmlValue) rhsValue);
case TypeTags.XML_TEXT_TAG:
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue);
case TypeTags.XML_PI_TAG:
return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlPi) lhsValue, (XmlValue) rhsValue);
case TypeTags.MAP_TAG:
case TypeTags.JSON_TAG:
case TypeTags.RECORD_TYPE_TAG:
return isMappingType(rhsValTypeTag) && isEqual((MapValueImpl) lhsValue, (MapValueImpl) rhsValue,
checkedValues);
case TypeTags.TUPLE_TAG:
case TypeTags.ARRAY_TAG:
return isListType(rhsValTypeTag) &&
isEqual((ArrayValue) lhsValue, (ArrayValue) rhsValue, checkedValues);
case TypeTags.ERROR_TAG:
return rhsValTypeTag == TypeTags.ERROR_TAG &&
isEqual((ErrorValue) lhsValue, (ErrorValue) rhsValue, checkedValues);
case TypeTags.SERVICE_TAG:
break;
case TypeTags.TABLE_TAG:
return rhsValTypeTag == TypeTags.TABLE_TAG &&
isEqual((TableValueImpl) lhsValue, (TableValueImpl) rhsValue, checkedValues);
}
return false;
}
private static boolean isListType(int typeTag) {
return typeTag == TypeTags.ARRAY_TAG || typeTag == TypeTags.TUPLE_TAG;
}
private static boolean isMappingType(int typeTag) {
return typeTag == TypeTags.MAP_TAG || typeTag == TypeTags.RECORD_TYPE_TAG || typeTag == TypeTags.JSON_TAG;
}
/**
* Deep equality check for an array/tuple.
*
* @param lhsList The array/tuple on the left hand side
* @param rhsList The array/tuple on the right hand side
* @param checkedValues Structured value pairs already compared or being compared
* @return True if the array/tuple values are equal, else false.
*/
private static boolean isEqual(ArrayValue lhsList, ArrayValue rhsList, List<ValuePair> checkedValues) {
ValuePair compValuePair = new ValuePair(lhsList, rhsList);
if (checkedValues.contains(compValuePair)) {
return true;
}
checkedValues.add(compValuePair);
if (lhsList.size() != rhsList.size()) {
return false;
}
for (int i = 0; i < lhsList.size(); i++) {
if (!isEqual(lhsList.get(i), rhsList.get(i), checkedValues)) {
return false;
}
}
return true;
}
/**
* Deep equality check for a map.
*
* @param lhsMap Map on the left hand side
* @param rhsMap Map on the right hand side
* @param checkedValues Structured value pairs already compared or being compared
* @return True if the map values are equal, else false.
*/
private static boolean isEqual(MapValueImpl lhsMap, MapValueImpl rhsMap, List<ValuePair> checkedValues) {
ValuePair compValuePair = new ValuePair(lhsMap, rhsMap);
if (checkedValues.contains(compValuePair)) {
return true;
}
checkedValues.add(compValuePair);
if (lhsMap.size() != rhsMap.size()) {
return false;
}
if (!lhsMap.keySet().containsAll(rhsMap.keySet())) {
return false;
}
Iterator<Map.Entry<BString, Object>> mapIterator = lhsMap.entrySet().iterator();
while (mapIterator.hasNext()) {
Map.Entry<BString, Object> lhsMapEntry = mapIterator.next();
if (!isEqual(lhsMapEntry.getValue(), rhsMap.get(lhsMapEntry.getKey()), checkedValues)) {
return false;
}
}
return true;
}
/**
* Deep equality check for a table.
*
* @param lhsTable Table on the left hand side
* @param rhsTable Table on the right hand side
* @param checkedValues Structured value pairs already compared or being compared
* @return True if the table values are equal, else false.
*/
private static boolean isEqual(TableValueImpl lhsTable, TableValueImpl rhsTable, List<ValuePair> checkedValues) {
ValuePair compValuePair = new ValuePair(lhsTable, rhsTable);
if (checkedValues.contains(compValuePair)) {
return true;
}
checkedValues.add(compValuePair);
if (lhsTable.size() != rhsTable.size()) {
return false;
}
boolean isLhsKeyedTable = ((BTableType) lhsTable.getType()).getFieldNames() != null &&
((BTableType) lhsTable.getType()).getFieldNames().length > 0;
boolean isRhsKeyedTable = ((BTableType) rhsTable.getType()).getFieldNames() != null &&
((BTableType) rhsTable.getType()).getFieldNames().length > 0;
Object[] lhsTableValues = lhsTable.values().toArray();
Object[] rhsTableValues = rhsTable.values().toArray();
if (isLhsKeyedTable == isRhsKeyedTable) {
for (int i = 0; i < lhsTableValues.length; i++) {
if (!isEqual(lhsTableValues[i], rhsTableValues[i], checkedValues)) {
return false;
}
}
return true;
}
return false;
}
/**
* Deep equality check for error.
*
* @param lhsError The error on the left hand side
* @param rhsError The error on the right hand side
* @param checkedValues Errors already compared or being compared
* @return True if the error values are equal, else false.
*/
private static boolean isEqual(ErrorValue lhsError, ErrorValue rhsError, List<ValuePair> checkedValues) {
ValuePair compValuePair = new ValuePair(lhsError, rhsError);
if (checkedValues.contains(compValuePair)) {
return true;
}
checkedValues.add(compValuePair);
return isEqual(lhsError.getMessage(), rhsError.getMessage(), checkedValues) &&
isEqual((MapValueImpl) lhsError.getDetails(), (MapValueImpl) rhsError.getDetails(), checkedValues) &&
isEqual(lhsError.getCause(), rhsError.getCause(), checkedValues);
}
/**
* Deep equality check for XML Sequence.
*
* @param lhsXMLSequence The XML sequence on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlSequence lhsXMLSequence, XmlValue rhsXml) {
if (rhsXml instanceof XmlSequence) {
XmlSequence rhsXMLSequence = (XmlSequence) rhsXml;
return isXMLSequenceChildrenEqual(lhsXMLSequence.getChildrenList(), rhsXMLSequence.getChildrenList());
}
if (rhsXml instanceof XmlItem) {
return lhsXMLSequence.getChildrenList().size() == 1 &&
isEqual(lhsXMLSequence.getChildrenList().get(0), rhsXml);
}
return lhsXMLSequence.getChildrenList().isEmpty() &&
TypeUtils.getType(rhsXml) == PredefinedTypes.TYPE_XML_NEVER;
}
/**
* Deep equality check for XML item.
*
* @param lhsXMLItem The XML item on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlItem lhsXMLItem, XmlValue rhsXml) {
if (rhsXml instanceof XmlItem) {
XmlItem rhsXMLItem = (XmlItem) rhsXml;
if (!(rhsXMLItem.getQName().equals(lhsXMLItem.getQName()))) {
return false;
}
if (!(rhsXMLItem.getAttributesMap().entrySet().equals(lhsXMLItem.getAttributesMap().entrySet()))) {
return false;
}
return isEqual(rhsXMLItem.getChildrenSeq(), lhsXMLItem.getChildrenSeq());
}
if (rhsXml instanceof XmlSequence) {
XmlSequence rhsXMLSequence = (XmlSequence) rhsXml;
return rhsXMLSequence.getChildrenList().size() == 1 &&
isEqual(lhsXMLItem, rhsXMLSequence.getChildrenList().get(0));
}
return false;
}
/**
* Deep equality check for XML Text.
*
* @param lhsXMLText The XML text on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlText lhsXMLText, XmlValue rhsXml) {
if (rhsXml instanceof XmlText) {
XmlText rhsXMLText = (XmlText) rhsXml;
return lhsXMLText.getTextValue().equals(rhsXMLText.getTextValue());
}
return lhsXMLText.getType() == PredefinedTypes.TYPE_XML_NEVER && rhsXml instanceof XmlSequence &&
((XmlSequence) rhsXml).getChildrenList().isEmpty();
}
/**
* Deep equality check for XML Comment.
*
* @param lhsXMLComment The XML comment on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlComment lhsXMLComment, XmlValue rhsXml) {
if (!(rhsXml instanceof XmlComment)) {
return false;
}
XmlComment rhXMLComment = (XmlComment) rhsXml;
return lhsXMLComment.getTextValue().equals(rhXMLComment.getTextValue());
}
/**
* Deep equality check for XML Processing Instruction.
*
* @param lhsXMLPi The XML processing instruction on the left hand side
* @param rhsXml The XML on the right hand side
* @return True if the XML values are equal, else false.
*/
private static boolean isEqual(XmlPi lhsXMLPi, XmlValue rhsXml) {
if (!(rhsXml instanceof XmlPi)) {
return false;
}
XmlPi rhsXMLPi = (XmlPi) rhsXml;
return lhsXMLPi.getData().equals(rhsXMLPi.getData()) && lhsXMLPi.getTarget().equals(rhsXMLPi.getTarget());
}
private static boolean isXMLSequenceChildrenEqual(List<BXml> lhsList, List<BXml> rhsList) {
if (lhsList.size() != rhsList.size()) {
return false;
}
for (int i = 0; i < lhsList.size(); i++) {
if (!isEqual(lhsList.get(i), rhsList.get(i))) {
return false;
}
}
return true;
}
/**
* Type vector of size two, to hold the source and the target types.
*
* @since 0.995.0
*/
private static class TypePair {
Type sourceType;
Type targetType;
public TypePair(Type sourceType, Type targetType) {
this.sourceType = sourceType;
this.targetType = targetType;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof TypePair)) {
return false;
}
TypePair other = (TypePair) obj;
return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);
}
}
/**
* Check the reference equality of handle values.
*
* @param lhsValue The value on the left hand side
* @param rhsValue The value on the right hand side
* @return True if values are equal, else false.
*/
private static boolean isHandleValueRefEqual(Object lhsValue, Object rhsValue) {
HandleValue lhsHandle = (HandleValue) lhsValue;
HandleValue rhsHandle = (HandleValue) rhsValue;
return lhsHandle.getValue() == rhsHandle.getValue();
}
/**
* Unordered value vector of size two, to hold two values being compared.
*
* @since 0.995.0
*/
private static class ValuePair {
ArrayList<Object> valueList = new ArrayList<>(2);
ValuePair(Object valueOne, Object valueTwo) {
valueList.add(valueOne);
valueList.add(valueTwo);
}
@Override
public boolean equals(Object otherPair) {
if (!(otherPair instanceof ValuePair)) {
return false;
}
ArrayList otherList = ((ValuePair) otherPair).valueList;
ArrayList currentList = valueList;
if (otherList.size() != currentList.size()) {
return false;
}
for (int i = 0; i < otherList.size(); i++) {
if (!otherList.get(i).equals(currentList.get(i))) {
return false;
}
}
return true;
}
}
/**
* Checks whether a given {@link BType} has an implicit initial value or not.
* @param type {@link BType} to be analyzed.
* @return whether there's an implicit initial value or not.
*/
public static boolean hasFillerValue(Type type) {
return hasFillerValue(type, new ArrayList<>());
}
private static boolean hasFillerValue(Type type, List<Type> unanalyzedTypes) {
if (type == null) {
return true;
}
if (type.getTag() < TypeTags.RECORD_TYPE_TAG &&
!(type.getTag() == TypeTags.CHAR_STRING_TAG || type.getTag() == TypeTags.NEVER_TAG)) {
return true;
}
switch (type.getTag()) {
case TypeTags.STREAM_TAG:
case TypeTags.MAP_TAG:
case TypeTags.ANY_TAG:
return true;
case TypeTags.ARRAY_TAG:
return checkFillerValue((BArrayType) type, unanalyzedTypes);
case TypeTags.FINITE_TYPE_TAG:
return checkFillerValue((BFiniteType) type);
case TypeTags.OBJECT_TYPE_TAG:
return checkFillerValue((BObjectType) type);
case TypeTags.RECORD_TYPE_TAG:
return checkFillerValue((BRecordType) type, unanalyzedTypes);
case TypeTags.TUPLE_TAG:
return checkFillerValue((BTupleType) type, unanalyzedTypes);
case TypeTags.UNION_TAG:
return checkFillerValue((BUnionType) type, unanalyzedTypes);
default:
return false;
}
}
private static boolean checkFillerValue(BTupleType tupleType, List<Type> unAnalyzedTypes) {
if (unAnalyzedTypes.contains(tupleType)) {
return true;
}
unAnalyzedTypes.add(tupleType);
for (Type member : tupleType.getTupleTypes()) {
if (!hasFillerValue(member, unAnalyzedTypes)) {
return false;
}
}
return true;
}
private static boolean checkFillerValue(BUnionType type, List<Type> unAnalyzedTypes) {
if (unAnalyzedTypes.contains(type)) {
return true;
}
unAnalyzedTypes.add(type);
if (type.isNullable()) {
return true;
}
Iterator<Type> iterator = type.getMemberTypes().iterator();
Type firstMember;
for (firstMember = iterator.next(); iterator.hasNext(); ) {
if (!isSameType(firstMember, iterator.next())) {
return false;
}
}
return isValueType(firstMember) && hasFillerValue(firstMember);
}
private static boolean checkFillerValue(BRecordType type, List<Type> unAnalyzedTypes) {
if (unAnalyzedTypes.contains(type)) {
return true;
}
unAnalyzedTypes.add(type);
for (Field field : type.getFields().values()) {
if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) {
continue;
}
if (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED)) {
continue;
}
return false;
}
return true;
}
private static boolean checkFillerValue(BArrayType type, List<Type> unAnalyzedTypes) {
return type.getState() == ArrayState.OPEN || hasFillerValue(type.getElementType(), unAnalyzedTypes);
}
private static boolean checkFillerValue(BObjectType type) {
if (type.getTag() == TypeTags.SERVICE_TAG) {
return false;
} else {
MethodType generatedInitializer = type.generatedInitializer;
if (generatedInitializer == null) {
return false;
}
FunctionType initFuncType = generatedInitializer.getType();
boolean noParams = initFuncType.getParameters().length == 0;
boolean nilReturn = initFuncType.getReturnType().getTag() == TypeTags.NULL_TAG;
return noParams && nilReturn;
}
}
private static boolean checkFillerValue(BFiniteType type) {
for (Object value: type.valueSpace) {
if (value == null) {
return true;
}
}
if (type.valueSpace.size() == 1) {
return true;
}
Object firstElement = type.valueSpace.iterator().next();
for (Object value : type.valueSpace) {
if (value.getClass() != firstElement.getClass()) {
return false;
}
}
if (firstElement instanceof String) {
return containsElement(type.valueSpace, "\"\"");
} else if (firstElement instanceof Byte
|| firstElement instanceof Integer
|| firstElement instanceof Long) {
return containsElement(type.valueSpace, "0");
} else if (firstElement instanceof Float
|| firstElement instanceof Double
|| firstElement instanceof BigDecimal) {
return containsElement(type.valueSpace, "0.0");
} else if (firstElement instanceof Boolean) {
return containsElement(type.valueSpace, "false");
} else {
return false;
}
}
private static boolean containsElement(Set<Object> valueSpace, String e) {
for (Object value : valueSpace) {
if (value != null && value.toString().equals(e)) {
return true;
}
}
return false;
}
private static boolean containsType(Set<Object> valueSpace, Type type) {
for (Object value : valueSpace) {
if (!isSameType(type, getType(value))) {
return false;
}
}
return true;
}
public static Object handleAnydataValues(Object sourceVal, Type targetType) {
if (sourceVal != null && !(sourceVal instanceof Number) && !(sourceVal instanceof BString) &&
!(sourceVal instanceof Boolean) && !(sourceVal instanceof BValue)) {
throw ErrorUtils.createJToBTypeCastError(sourceVal.getClass(), targetType);
}
return sourceVal;
}
private TypeChecker() {
}
}
|
Because we can definitively say that a if-else statement causes a function to return only when there are return statements inside all if, if else and else blocks. If at least one such block does not contain the return statement, the whole if-else statement is marked as a statement which will not cause a function to return. In this case, absence of an else block => absence of a return statement.
|
public void visit(IfElseStmt ifElseStmt) {
boolean stmtReturns = true;
Expression expr = ifElseStmt.getCondition();
visitSingleValueExpr(expr);
if (expr.getType() != BTypes.typeBoolean) {
BLangExceptionHelper
.throwSemanticError(ifElseStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, expr.getType());
}
Statement thenBody = ifElseStmt.getThenBody();
thenBody.accept(this);
stmtReturns &= thenBody.resultsInAReturn();
for (IfElseStmt.ElseIfBlock elseIfBlock : ifElseStmt.getElseIfBlocks()) {
Expression elseIfCondition = elseIfBlock.getElseIfCondition();
visitSingleValueExpr(elseIfCondition);
if (elseIfCondition.getType() != BTypes.typeBoolean) {
BLangExceptionHelper.throwSemanticError(ifElseStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED,
elseIfCondition.getType());
}
Statement elseIfBody = elseIfBlock.getElseIfBody();
elseIfBody.accept(this);
stmtReturns &= elseIfBody.resultsInAReturn();
}
Statement elseBody = ifElseStmt.getElseBody();
if (elseBody != null) {
elseBody.accept(this);
stmtReturns &= elseBody.resultsInAReturn();
} else {
stmtReturns = false;
}
ifElseStmt.setReturns(stmtReturns);
}
|
stmtReturns = false;
|
public void visit(IfElseStmt ifElseStmt) {
boolean stmtReturns = true;
Expression expr = ifElseStmt.getCondition();
visitSingleValueExpr(expr);
if (expr.getType() != BTypes.typeBoolean) {
BLangExceptionHelper
.throwSemanticError(ifElseStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, expr.getType());
}
Statement thenBody = ifElseStmt.getThenBody();
thenBody.accept(this);
stmtReturns &= thenBody.isAlwaysReturns();
for (IfElseStmt.ElseIfBlock elseIfBlock : ifElseStmt.getElseIfBlocks()) {
Expression elseIfCondition = elseIfBlock.getElseIfCondition();
visitSingleValueExpr(elseIfCondition);
if (elseIfCondition.getType() != BTypes.typeBoolean) {
BLangExceptionHelper.throwSemanticError(ifElseStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED,
elseIfCondition.getType());
}
Statement elseIfBody = elseIfBlock.getElseIfBody();
elseIfBody.accept(this);
stmtReturns &= elseIfBody.isAlwaysReturns();
}
Statement elseBody = ifElseStmt.getElseBody();
if (elseBody != null) {
elseBody.accept(this);
stmtReturns &= elseBody.isAlwaysReturns();
} else {
stmtReturns = false;
}
ifElseStmt.setAlwaysReturns(stmtReturns);
}
|
class SemanticAnalyzer implements NodeVisitor {
private int stackFrameOffset = -1;
private int staticMemAddrOffset = -1;
private int connectorMemAddrOffset = -1;
private int structMemAddrOffset = -1;
private int workerMemAddrOffset = -1;
private String currentPkg;
private TypeLattice packageTypeLattice;
private CallableUnit currentCallableUnit = null;
private CallableUnit parentCallableUnit = null;
private static final String patternString = "\\$\\{((\\w+)(\\[(\\d+|\\\"(\\w+)\\\")\\])?)\\}";
private static final Pattern compiledPattern = Pattern.compile(patternString);
private int whileStmtCount = 0;
private SymbolScope currentScope;
private BlockStmt.BlockStmtBuilder pkgInitFuncStmtBuilder;
public SemanticAnalyzer(BLangProgram programScope) {
currentScope = programScope;
}
@Override
public void visit(BLangProgram bLangProgram) {
BLangPackage mainPkg = bLangProgram.getMainPackage();
if (bLangProgram.getProgramCategory() == BLangProgram.Category.MAIN_PROGRAM) {
mainPkg.accept(this);
} else if (bLangProgram.getProgramCategory() == BLangProgram.Category.SERVICE_PROGRAM) {
BLangPackage[] servicePackages = bLangProgram.getServicePackages();
for (BLangPackage servicePkg : servicePackages) {
servicePkg.accept(this);
}
} else {
BLangPackage[] libraryPackages = bLangProgram.getLibraryPackages();
for (BLangPackage libraryPkg : libraryPackages) {
libraryPkg.accept(this);
}
}
int setSizeOfStaticMem = staticMemAddrOffset + 1;
bLangProgram.setSizeOfStaticMem(setSizeOfStaticMem);
staticMemAddrOffset = -1;
}
@Override
public void visit(BLangPackage bLangPackage) {
BLangPackage[] dependentPackages = bLangPackage.getDependentPackages();
List<BallerinaFunction> initFunctionList = new ArrayList<>();
for (int i = 0; i < dependentPackages.length; i++) {
BLangPackage dependentPkg = dependentPackages[i];
if (dependentPkg.isSymbolsDefined()) {
continue;
}
dependentPkg.accept(this);
initFunctionList.add(dependentPkg.getInitFunction());
}
currentScope = bLangPackage;
currentPkg = bLangPackage.getPackagePath();
if (packageTypeLattice != null) {
TypeLattice currentLattice = bLangPackage.getTypeLattice();
currentLattice.merge(packageTypeLattice, currentPkg);
packageTypeLattice = currentLattice;
} else {
packageTypeLattice = bLangPackage.getTypeLattice();
}
NodeLocation pkgLocation = bLangPackage.getNodeLocation();
if (pkgLocation == null) {
BallerinaFile[] ballerinaFiles = bLangPackage.getBallerinaFiles();
String filename = ballerinaFiles.length == 0 ? "" :
ballerinaFiles[0].getFileName();
pkgLocation = new NodeLocation("", filename, 0);
}
BallerinaFunction.BallerinaFunctionBuilder functionBuilder =
new BallerinaFunction.BallerinaFunctionBuilder(bLangPackage);
functionBuilder.setNodeLocation(pkgLocation);
functionBuilder.setName(bLangPackage.getPackagePath() + ".<init>");
functionBuilder.setPkgPath(bLangPackage.getPackagePath());
pkgInitFuncStmtBuilder = new BlockStmt.BlockStmtBuilder(bLangPackage.getNodeLocation(),
bLangPackage);
addDependentPkgInitCalls(initFunctionList, pkgInitFuncStmtBuilder, pkgLocation);
defineStructs(bLangPackage.getStructDefs());
defineConnectors(bLangPackage.getConnectors());
resolveStructFieldTypes(bLangPackage.getStructDefs());
defineFunctions(bLangPackage.getFunctions());
defineTypeMappers(bLangPackage.getTypeMappers());
defineServices(bLangPackage.getServices());
defineAnnotations(bLangPackage.getAnnotationDefs());
for (CompilationUnit compilationUnit : bLangPackage.getCompilationUnits()) {
compilationUnit.accept(this);
}
ReturnStmt returnStmt = new ReturnStmt(pkgLocation, new Expression[0]);
pkgInitFuncStmtBuilder.addStmt(returnStmt);
functionBuilder.setBody(pkgInitFuncStmtBuilder.build());
BallerinaFunction initFunction = functionBuilder.buildFunction();
initFunction.setReturnParamTypes(new BType[0]);
bLangPackage.setInitFunction(initFunction);
bLangPackage.setSymbolsDefined(true);
}
@Override
public void visit(BallerinaFile bFile) {
}
@Override
public void visit(ImportPackage importPkg) {
}
@Override
public void visit(ConstDef constDef) {
SimpleTypeName typeName = constDef.getTypeName();
BType bType = BTypes.resolveType(typeName, currentScope, constDef.getNodeLocation());
constDef.setType(bType);
if (!BTypes.isValueType(bType)) {
BLangExceptionHelper.throwSemanticError(constDef, SemanticErrors.INVALID_TYPE, typeName);
}
SymbolName symbolName = new SymbolName(constDef.getName());
if (currentScope.resolve(symbolName) != null) {
BLangExceptionHelper.throwSemanticError(constDef,
SemanticErrors.REDECLARED_SYMBOL, constDef.getName());
}
currentScope.define(symbolName, constDef);
for (AnnotationAttachment annotationAttachment : constDef.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.CONSTANT);
annotationAttachment.accept(this);
}
ConstantLocation memLocation = new ConstantLocation(++staticMemAddrOffset);
constDef.setMemoryLocation(memLocation);
VariableRefExpr varRefExpr = new VariableRefExpr(constDef.getNodeLocation(), constDef.getName());
varRefExpr.setVariableDef(constDef);
VariableDefStmt varDefStmt = new VariableDefStmt(constDef.getNodeLocation(),
constDef, varRefExpr, constDef.getRhsExpr());
pkgInitFuncStmtBuilder.addStmt(varDefStmt);
}
@Override
public void visit(GlobalVariableDef globalVarDef) {
VariableDefStmt variableDefStmt = globalVarDef.getVariableDefStmt();
variableDefStmt.accept(this);
AssignStmt assignStmt = new AssignStmt(variableDefStmt.getNodeLocation(),
new Expression[]{variableDefStmt.getLExpr()}, variableDefStmt.getRExpr());
pkgInitFuncStmtBuilder.addStmt(assignStmt);
}
@Override
public void visit(Service service) {
openScope(service);
for (AnnotationAttachment annotationAttachment : service.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.SERVICE);
annotationAttachment.accept(this);
}
for (VariableDefStmt variableDefStmt : service.getVariableDefStmts()) {
variableDefStmt.accept(this);
}
for (Resource resource : service.getResources()) {
resource.accept(this);
}
closeScope();
}
@Override
public void visit(BallerinaConnectorDef connector) {
openScope(connector);
for (AnnotationAttachment annotationAttachment : connector.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.CONNECTOR);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : connector.getParameterDefs()) {
parameterDef.setMemoryLocation(new ConnectorVarLocation(++connectorMemAddrOffset));
parameterDef.accept(this);
}
for (VariableDefStmt variableDefStmt : connector.getVariableDefStmts()) {
variableDefStmt.accept(this);
}
for (BallerinaAction action : connector.getActions()) {
action.accept(this);
}
int sizeOfConnectorMem = connectorMemAddrOffset + 1;
connector.setSizeOfConnectorMem(sizeOfConnectorMem);
connectorMemAddrOffset = -1;
closeScope();
}
@Override
public void visit(Resource resource) {
openScope(resource);
currentCallableUnit = resource;
for (AnnotationAttachment annotationAttachment : resource.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.RESOURCE);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : resource.getParameterDefs()) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameterDef.accept(this);
}
for (Worker worker : resource.getWorkers()) {
visit(worker);
addWorkerSymbol(worker);
}
BlockStmt blockStmt = resource.getResourceBody();
blockStmt.accept(this);
int sizeOfStackFrame = stackFrameOffset + 1;
resource.setStackFrameSize(sizeOfStackFrame);
stackFrameOffset = -1;
currentCallableUnit = null;
closeScope();
}
@Override
public void visit(BallerinaFunction function) {
openScope(function);
currentCallableUnit = function;
for (AnnotationAttachment annotationAttachment : function.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.FUNCTION);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : function.getParameterDefs()) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameterDef.accept(this);
}
for (ParameterDef parameterDef : function.getReturnParameters()) {
if (parameterDef.getName() != null) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
}
parameterDef.accept(this);
}
if (!function.isNative()) {
for (Worker worker : function.getWorkers()) {
worker.accept(this);
addWorkerSymbol(worker);
}
BlockStmt blockStmt = function.getCallableUnitBody();
blockStmt.accept(this);
if (function.getReturnParameters().length > 0) {
Statement[] stmts = blockStmt.getStatements();
for (int i = 0; i < stmts.length; i++) {
if (stmts[i].resultsInAReturn()) {
checkUnreachableStmt(stmts, i + 1);
}
}
if (!blockStmt.resultsInAReturn()) {
throw new SemanticException(function.getNodeLocation().getFileName() + ":" +
function.getNodeLocation().getLineNumber() +
": missing return statement");
}
}
}
int sizeOfStackFrame = stackFrameOffset + 1;
function.setStackFrameSize(sizeOfStackFrame);
stackFrameOffset = -1;
currentCallableUnit = null;
closeScope();
}
@Override
public void visit(BTypeMapper typeMapper) {
openScope(typeMapper);
currentCallableUnit = typeMapper;
for (AnnotationAttachment annotationAttachment : typeMapper.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.TYPEMAPPER);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : typeMapper.getParameterDefs()) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameterDef.accept(this);
}
for (ParameterDef parameterDef : typeMapper.getReturnParameters()) {
if (parameterDef.getName() != null) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
}
parameterDef.accept(this);
}
if (!typeMapper.isNative()) {
BlockStmt blockStmt = typeMapper.getCallableUnitBody();
currentScope = blockStmt;
blockStmt.accept(this);
currentScope = blockStmt.getEnclosingScope();
}
int sizeOfStackFrame = stackFrameOffset + 1;
typeMapper.setStackFrameSize(sizeOfStackFrame);
stackFrameOffset = -1;
currentCallableUnit = null;
closeScope();
}
@Override
public void visit(BallerinaAction action) {
openScope(action);
currentCallableUnit = action;
for (AnnotationAttachment annotationAttachment : action.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.ACTION);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : action.getParameterDefs()) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameterDef.accept(this);
}
ParameterDef firstParamDef = action.getParameterDefs()[0];
if (firstParamDef.getType() != action.getConnectorDef()) {
BLangExceptionHelper.throwSemanticError(action, SemanticErrors.INCOMPATIBLE_TYPES,
action.getConnectorDef(), firstParamDef.getType());
}
for (ParameterDef parameterDef : action.getReturnParameters()) {
if (parameterDef.getName() != null) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
}
parameterDef.accept(this);
}
if (!action.isNative()) {
for (Worker worker : action.getWorkers()) {
worker.accept(this);
addWorkerSymbol(worker);
}
BlockStmt blockStmt = action.getCallableUnitBody();
blockStmt.accept(this);
if (action.getReturnParameters().length > 0) {
Statement[] stmts = blockStmt.getStatements();
for (int i = 0; i < stmts.length; i++) {
if (stmts[i].resultsInAReturn()) {
checkUnreachableStmt(stmts, i + 1);
}
}
if (!blockStmt.resultsInAReturn()) {
throw new SemanticException(action.getNodeLocation().getFileName() + ":" +
action.getNodeLocation().getLineNumber() +
": missing return statement");
}
}
}
int sizeOfStackFrame = stackFrameOffset + 1;
action.setStackFrameSize(sizeOfStackFrame);
stackFrameOffset = -1;
currentCallableUnit = null;
closeScope();
}
@Override
public void visit(Worker worker) {
SymbolScope parentScope = currentScope;
currentScope = worker;
parentCallableUnit = currentCallableUnit;
currentCallableUnit = worker;
for (ParameterDef parameterDef : worker.getParameterDefs()) {
parameterDef.setMemoryLocation(new WorkerVarLocation(++workerMemAddrOffset));
parameterDef.accept(this);
}
for (ParameterDef parameterDef : worker.getReturnParameters()) {
if (parameterDef.getName() != null) {
parameterDef.setMemoryLocation(new WorkerVarLocation(++workerMemAddrOffset));
}
parameterDef.accept(this);
}
BlockStmt blockStmt = worker.getCallableUnitBody();
blockStmt.accept(this);
int sizeOfStackFrame = workerMemAddrOffset + 1;
worker.setStackFrameSize(sizeOfStackFrame);
workerMemAddrOffset = -1;
currentCallableUnit = parentCallableUnit;
currentScope = parentScope;
}
private void addWorkerSymbol(Worker worker) {
SymbolName symbolName = worker.getSymbolName();
BLangSymbol varSymbol = currentScope.resolve(symbolName);
if (varSymbol != null) {
BLangExceptionHelper.throwSemanticError(worker,
SemanticErrors.REDECLARED_SYMBOL, worker.getName());
}
currentScope.define(symbolName, worker);
}
@Override
public void visit(StructDef structDef) {
for (AnnotationAttachment annotationAttachment : structDef.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.STRUCT);
annotationAttachment.accept(this);
}
}
@Override
public void visit(AnnotationAttachment annotation) {
AttachmentPoint attachedPoint = annotation.getAttachedPoint();
SymbolName annotationSymName = new SymbolName(annotation.getName(), annotation.getPkgPath());
BLangSymbol annotationSymbol = currentScope.resolve(annotationSymName);
if (!(annotationSymbol instanceof AnnotationDef)) {
BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.UNDEFINED_ANNOTATION,
annotationSymName);
}
AnnotationDef annotationDef = (AnnotationDef) annotationSymbol;
if (annotationDef.getAttachmentPoints() != null && annotationDef.getAttachmentPoints().length > 0) {
Optional<String> matchingAttachmentPoint = Arrays.stream(annotationDef.getAttachmentPoints())
.filter(attachmentPoint -> attachmentPoint.equals(attachedPoint.getValue()))
.findAny();
if (!matchingAttachmentPoint.isPresent()) {
BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.ANNOTATION_NOT_ALLOWED,
annotationSymName, attachedPoint);
}
}
validateAttributes(annotation, annotationDef);
populateDefaultValues(annotation, annotationDef);
}
/**
* Visit and validate attributes of an annotation attachment.
*
* @param annotation Annotation attachment to validate attributes
* @param annotationDef Definition of the annotation
*/
private void validateAttributes(AnnotationAttachment annotation, AnnotationDef annotationDef) {
annotation.getAttributeNameValuePairs().forEach((attributeName, attributeValue) -> {
BLangSymbol attributeSymbol = annotationDef.resolveMembers(new SymbolName(attributeName));
if (attributeSymbol == null || !(attributeSymbol instanceof AnnotationAttributeDef)) {
BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.NO_SUCH_ATTRIBUTE,
attributeName, annotation.getName());
}
AnnotationAttributeDef attributeDef = ((AnnotationAttributeDef) attributeSymbol);
SimpleTypeName attributeType = attributeDef.getTypeName();
SimpleTypeName valueType = attributeValue.getType();
BLangSymbol valueTypeSymbol = currentScope.resolve(valueType.getSymbolName());
BLangSymbol attributeTypeSymbol = annotationDef.resolve(new SymbolName(attributeType.getName(),
attributeType.getPackagePath()));
if (attributeType.isArrayType()) {
if (!valueType.isArrayType()) {
BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES,
attributeTypeSymbol.getSymbolName() + TypeConstants.ARRAY_TNAME,
valueTypeSymbol.getSymbolName());
}
AnnotationAttributeValue[] valuesArray = attributeValue.getValueArray();
for (AnnotationAttributeValue value : valuesArray) {
valueTypeSymbol = currentScope.resolve(value.getType().getSymbolName());
if (attributeTypeSymbol != valueTypeSymbol) {
BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES,
attributeTypeSymbol.getSymbolName(), valueTypeSymbol.getSymbolName());
}
AnnotationAttachment childAnnotation = value.getAnnotationValue();
if (childAnnotation != null && valueTypeSymbol instanceof AnnotationDef) {
validateAttributes(childAnnotation, (AnnotationDef) valueTypeSymbol);
}
}
} else {
if (valueType.isArrayType()) {
BLangExceptionHelper.throwSemanticError(attributeValue,
SemanticErrors.INCOMPATIBLE_TYPES_ARRAY_FOUND, attributeTypeSymbol.getName());
}
if (attributeTypeSymbol != valueTypeSymbol) {
BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES,
attributeTypeSymbol.getSymbolName(), valueTypeSymbol.getSymbolName());
}
AnnotationAttachment childAnnotation = attributeValue.getAnnotationValue();
if (childAnnotation != null && valueTypeSymbol instanceof AnnotationDef) {
validateAttributes(childAnnotation, (AnnotationDef) valueTypeSymbol);
}
}
});
}
/**
* Populate default values to the annotation attributes.
*
* @param annotation Annotation attachment to populate default values
* @param annotationDef Definition of the annotation corresponds to the provided annotation attachment
*/
private void populateDefaultValues(AnnotationAttachment annotation, AnnotationDef annotationDef) {
Map<String, AnnotationAttributeValue> attributeValPairs = annotation.getAttributeNameValuePairs();
for (AnnotationAttributeDef attributeDef : annotationDef.getAttributeDefs()) {
String attributeName = attributeDef.getName();
if (!attributeValPairs.containsKey(attributeName)) {
BasicLiteral defaultValue = attributeDef.getAttributeValue();
if (defaultValue != null) {
annotation.addAttributeNameValuePair(attributeName,
new AnnotationAttributeValue(defaultValue.getBValue(), defaultValue.getTypeName(), null));
}
continue;
}
AnnotationAttributeValue attributeValue = attributeValPairs.get(attributeName);
SimpleTypeName valueType = attributeValue.getType();
if (valueType.isArrayType()) {
AnnotationAttributeValue[] valuesArray = attributeValue.getValueArray();
for (AnnotationAttributeValue value : valuesArray) {
AnnotationAttachment annotationTypeVal = value.getAnnotationValue();
if (annotationTypeVal == null) {
continue;
}
SimpleTypeName attributeType = attributeDef.getTypeName();
BLangSymbol attributeTypeSymbol = annotationDef.resolve(
new SymbolName(attributeType.getName(), attributeType.getPackagePath()));
if (attributeTypeSymbol instanceof AnnotationDef) {
populateDefaultValues(annotationTypeVal, (AnnotationDef) attributeTypeSymbol);
}
}
} else {
AnnotationAttachment annotationTypeVal = attributeValue.getAnnotationValue();
if (annotationTypeVal == null) {
continue;
}
BLangSymbol attributeTypeSymbol = annotationDef.resolve(attributeDef.getTypeName().getSymbolName());
if (attributeTypeSymbol instanceof AnnotationDef) {
populateDefaultValues(annotationTypeVal, (AnnotationDef) attributeTypeSymbol);
}
}
}
}
@Override
public void visit(AnnotationAttributeDef annotationAttributeDef) {
SimpleTypeName fieldType = annotationAttributeDef.getTypeName();
BasicLiteral fieldVal = annotationAttributeDef.getAttributeValue();
if (fieldVal != null) {
fieldVal.accept(this);
BType valueType = fieldVal.getType();
if (!BTypes.isBuiltInTypeName(fieldType.getName())) {
BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_DEFAULT_VALUE);
}
BLangSymbol typeSymbol = currentScope.resolve(fieldType.getSymbolName());
BType fieldBType = (BType) typeSymbol;
if (!BTypes.isValueType(fieldBType)) {
BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_DEFAULT_VALUE);
}
if (fieldBType != valueType) {
BLangExceptionHelper.throwSemanticError(annotationAttributeDef,
SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, fieldType, fieldVal.getTypeName());
}
} else {
BLangSymbol typeSymbol;
if (fieldType.isArrayType()) {
typeSymbol = currentScope.resolve(new SymbolName(fieldType.getName(), fieldType.getPackagePath()));
} else {
typeSymbol = currentScope.resolve(fieldType.getSymbolName());
}
if (((typeSymbol instanceof BType) && !BTypes.isValueType((BType) typeSymbol)) ||
(!(typeSymbol instanceof BType) && !(typeSymbol instanceof AnnotationDef))) {
BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_ATTRIBUTE_TYPE,
fieldType);
}
if (!(typeSymbol instanceof BType)) {
fieldType.setPkgPath(annotationAttributeDef.getPackagePath());
}
}
}
@Override
public void visit(AnnotationDef annotationDef) {
for (AnnotationAttributeDef fields : annotationDef.getAttributeDefs()) {
fields.accept(this);
}
for (AnnotationAttachment annotationAttachment : annotationDef.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.ANNOTATION);
annotationAttachment.accept(this);
}
}
@Override
public void visit(ParameterDef paramDef) {
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
if (paramDef.getAnnotations() == null) {
return;
}
for (AnnotationAttachment annotationAttachment : paramDef.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.PARAMETER);
annotationAttachment.accept(this);
}
}
@Override
public void visit(VariableDef varDef) {
}
@Override
public void visit(VariableDefStmt varDefStmt) {
VariableDef varDef = varDefStmt.getVariableDef();
BType varBType = BTypes.resolveType(varDef.getTypeName(), currentScope, varDef.getNodeLocation());
varDef.setType(varBType);
SymbolName symbolName = new SymbolName(varDef.getName());
BLangSymbol varSymbol = currentScope.resolve(symbolName);
if (varSymbol != null && varSymbol.getSymbolScope().getScopeName() == currentScope.getScopeName()) {
BLangExceptionHelper.throwSemanticError(varDef, SemanticErrors.REDECLARED_SYMBOL, varDef.getName());
}
currentScope.define(symbolName, varDef);
setMemoryLocation(varDef);
Expression rExpr = varDefStmt.getRExpr();
if (rExpr == null) {
return;
}
if (rExpr instanceof NullLiteral) {
if (BTypes.isValueType(varBType)) {
BLangExceptionHelper.throwSemanticError(rExpr, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, rExpr.getType(),
varBType);
}
rExpr.setType(varBType);
return;
}
if (rExpr instanceof RefTypeInitExpr) {
RefTypeInitExpr refTypeInitExpr = getNestedInitExpr(rExpr, varBType);
varDefStmt.setRExpr(refTypeInitExpr);
refTypeInitExpr.accept(this);
return;
}
if (rExpr instanceof FunctionInvocationExpr || rExpr instanceof ActionInvocationExpr) {
rExpr.accept(this);
CallableUnitInvocationExpr invocationExpr = (CallableUnitInvocationExpr) rExpr;
BType[] returnTypes = invocationExpr.getTypes();
if (returnTypes.length != 1) {
BLangExceptionHelper.throwSemanticError(varDefStmt, SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, "1",
returnTypes.length);
} else if (varBType == BTypes.typeAny) {
return;
} else if ((varBType != BTypes.typeMap) && (returnTypes[0] != BTypes.typeMap) &&
(!varBType.equals(returnTypes[0]))) {
TypeCastExpression newExpr = checkWideningPossible(varBType, rExpr);
if (newExpr != null) {
newExpr.accept(this);
varDefStmt.setRExpr(newExpr);
} else {
BLangExceptionHelper.throwSemanticError(rExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT,
returnTypes[0], varBType);
}
}
return;
}
visitSingleValueExpr(rExpr);
if (varBType == BTypes.typeAny) {
return;
}
BType rType = rExpr.getType();
if (rExpr instanceof TypeCastExpression && rType == null) {
rType = BTypes.resolveType(((TypeCastExpression) rExpr).getTypeName(), currentScope, null);
}
if (!varBType.equals(rType)) {
TypeCastExpression newExpr = checkWideningPossible(varBType, rExpr);
if (newExpr != null) {
newExpr.accept(this);
varDefStmt.setRExpr(newExpr);
} else {
BLangExceptionHelper.throwSemanticError(varDefStmt, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT,
rExpr.getType(), varBType);
}
}
}
@Override
public void visit(AssignStmt assignStmt) {
Expression[] lExprs = assignStmt.getLExprs();
visitLExprsOfAssignment(assignStmt, lExprs);
Expression rExpr = assignStmt.getRExpr();
if (rExpr instanceof FunctionInvocationExpr || rExpr instanceof ActionInvocationExpr) {
rExpr.accept(this);
checkForMultiAssignmentErrors(assignStmt, lExprs, (CallableUnitInvocationExpr) rExpr);
return;
}
Expression lExpr = assignStmt.getLExprs()[0];
BType lExprType = lExpr.getType();
if (rExpr instanceof NullLiteral) {
if (BTypes.isValueType(lExprType)) {
BLangExceptionHelper.throwSemanticError(lExpr, SemanticErrors.INCOMPATIBLE_TYPES,
rExpr.getType(), lExpr.getType());
}
rExpr.setType(lExprType);
return;
}
if (rExpr instanceof RefTypeInitExpr) {
RefTypeInitExpr refTypeInitExpr = getNestedInitExpr(rExpr, lExprType);
assignStmt.setRExpr(refTypeInitExpr);
refTypeInitExpr.accept(this);
return;
}
visitSingleValueExpr(rExpr);
if (lExprType == BTypes.typeAny) {
return;
}
BType rType = rExpr.getType();
if (rExpr instanceof TypeCastExpression && rType == null) {
rType = BTypes.resolveType(((TypeCastExpression) rExpr).getTypeName(), currentScope, null);
}
if (!lExprType.equals(rType)) {
TypeCastExpression newExpr = checkWideningPossible(lExpr.getType(), rExpr);
if (newExpr != null) {
newExpr.accept(this);
assignStmt.setRhsExpr(newExpr);
} else {
BLangExceptionHelper.throwSemanticError(lExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT,
rExpr.getType(), lExpr.getType());
}
}
}
@Override
public void visit(BlockStmt blockStmt) {
openScope(blockStmt);
for (int stmtIndex = 0; stmtIndex < blockStmt.getStatements().length; stmtIndex++) {
Statement stmt = blockStmt.getStatements()[stmtIndex];
if (stmt instanceof BreakStmt && whileStmtCount < 1) {
BLangExceptionHelper.throwSemanticError(stmt,
SemanticErrors.BREAK_STMT_NOT_ALLOWED_HERE);
}
if (stmt instanceof ReturnStmt || stmt instanceof ReplyStmt || stmt instanceof BreakStmt
|| stmt instanceof ThrowStmt) {
checkUnreachableStmt(blockStmt.getStatements(), stmtIndex + 1);
}
stmt.accept(this);
if (stmt.resultsInAReturn()) {
checkUnreachableStmt(blockStmt.getStatements(), stmtIndex + 1);
blockStmt.setReturns(true);
}
}
closeScope();
}
@Override
public void visit(CommentStmt commentStmt) {
}
@Override
@Override
public void visit(WhileStmt whileStmt) {
whileStmtCount++;
Expression expr = whileStmt.getCondition();
visitSingleValueExpr(expr);
if (expr.getType() != BTypes.typeBoolean) {
BLangExceptionHelper
.throwSemanticError(whileStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, expr.getType());
}
BlockStmt blockStmt = whileStmt.getBody();
if (blockStmt.getStatements().length == 0) {
BLangExceptionHelper.throwSemanticError(blockStmt, SemanticErrors.NO_STATEMENTS_WHILE_LOOP);
}
blockStmt.accept(this);
whileStmtCount--;
}
@Override
public void visit(BreakStmt breakStmt) {
}
@Override
public void visit(TryCatchStmt tryCatchStmt) {
tryCatchStmt.getTryBlock().accept(this);
tryCatchStmt.getCatchBlock().getParameterDef().setMemoryLocation(new StackVarLocation(++stackFrameOffset));
tryCatchStmt.getCatchBlock().getParameterDef().accept(this);
tryCatchStmt.getCatchBlock().getCatchBlockStmt().accept(this);
}
@Override
public void visit(ThrowStmt throwStmt) {
throwStmt.getExpr().accept(this);
if (throwStmt.getExpr() instanceof VariableRefExpr) {
if (throwStmt.getExpr().getType() instanceof BExceptionType) {
throwStmt.setReturns(true);
return;
}
} else {
FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) throwStmt.getExpr();
if (!funcIExpr.isMultiReturnExpr() && funcIExpr.getTypes().length > 0
&& funcIExpr.getTypes()[0] instanceof BExceptionType) {
throwStmt.setReturns(true);
return;
}
}
throw new SemanticException(throwStmt.getNodeLocation().getFileName() + ":" +
throwStmt.getNodeLocation().getLineNumber() +
": only a variable reference of type 'exception' is allowed in throw statement");
}
@Override
public void visit(FunctionInvocationStmt functionInvocationStmt) {
functionInvocationStmt.getFunctionInvocationExpr().accept(this);
}
@Override
public void visit(ActionInvocationStmt actionInvocationStmt) {
actionInvocationStmt.getActionInvocationExpr().accept(this);
}
@Override
public void visit(WorkerInvocationStmt workerInvocationStmt) {
VariableRefExpr variableRefExpr = workerInvocationStmt.getInMsg();
variableRefExpr.accept(this);
linkWorker(workerInvocationStmt);
ParameterDef[] returnParams = workerInvocationStmt.getCallableUnit().getReturnParameters();
BType[] returnTypes = new BType[returnParams.length];
for (int i = 0; i < returnParams.length; i++) {
returnTypes[i] = returnParams[i].getType();
}
workerInvocationStmt.setTypes(returnTypes);
}
@Override
public void visit(WorkerReplyStmt workerReplyStmt) {
String workerName = workerReplyStmt.getWorkerName();
SymbolName workerSymbol = new SymbolName(workerName);
VariableRefExpr variableRefExpr = workerReplyStmt.getReceiveExpr();
variableRefExpr.accept(this);
BLangSymbol worker = currentScope.resolve(workerSymbol);
if (!(worker instanceof Worker)) {
BLangExceptionHelper.throwSemanticError(variableRefExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
workerSymbol);
}
workerReplyStmt.setWorker((Worker) worker);
}
@Override
public void visit(ForkJoinStmt forkJoinStmt) {
boolean stmtReturns = true;
openScope(forkJoinStmt);
VariableRefExpr messageReference = forkJoinStmt.getMessageReference();
messageReference.accept(this);
if (!messageReference.getType().equals(BTypes.typeMessage)) {
throw new SemanticException("Incompatible types: expected a message in " +
messageReference.getNodeLocation().getFileName() + ":" +
messageReference.getNodeLocation().getLineNumber());
}
for (Worker worker: forkJoinStmt.getWorkers()) {
worker.accept(this);
}
ForkJoinStmt.Join join = forkJoinStmt.getJoin();
openScope(join);
ParameterDef parameter = join.getJoinResult();
parameter.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameter.accept(this);
join.define(parameter.getSymbolName(), parameter);
if (!(parameter.getType() instanceof BArrayType &&
(((BArrayType) parameter.getType()).getElementType() == BTypes.typeMessage))) {
throw new SemanticException("Incompatible types: expected a message[] in " +
parameter.getNodeLocation().getFileName() + ":" + parameter.getNodeLocation().getLineNumber());
}
Statement joinBody = join.getJoinBlock();
joinBody.accept(this);
stmtReturns &= joinBody.resultsInAReturn();
closeScope();
ForkJoinStmt.Timeout timeout = forkJoinStmt.getTimeout();
openScope(timeout);
Expression timeoutExpr = timeout.getTimeoutExpression();
timeoutExpr.accept(this);
ParameterDef timeoutParam = timeout.getTimeoutResult();
timeoutParam.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
timeoutParam.accept(this);
timeout.define(timeoutParam.getSymbolName(), timeoutParam);
if (!(timeoutParam.getType() instanceof BArrayType &&
(((BArrayType) timeoutParam.getType()).getElementType() == BTypes.typeMessage))) {
throw new SemanticException("Incompatible types: expected a message[] in " +
timeoutParam.getNodeLocation().getFileName() + ":" +
timeoutParam.getNodeLocation().getLineNumber());
}
Statement timeoutBody = timeout.getTimeoutBlock();
timeoutBody.accept(this);
stmtReturns &= timeoutBody.resultsInAReturn();
closeScope();
forkJoinStmt.setReturns(stmtReturns);
closeScope();
}
@Override
public void visit(ReplyStmt replyStmt) {
if (currentCallableUnit instanceof Function) {
BLangExceptionHelper.throwSemanticError(currentCallableUnit,
SemanticErrors.REPLY_STATEMENT_CANNOT_USED_IN_FUNCTION);
} else if (currentCallableUnit instanceof Action) {
BLangExceptionHelper.throwSemanticError(currentCallableUnit,
SemanticErrors.REPLY_STATEMENT_CANNOT_USED_IN_ACTION);
}
if (replyStmt.getReplyExpr() instanceof ActionInvocationExpr) {
BLangExceptionHelper.throwSemanticError(currentCallableUnit,
SemanticErrors.ACTION_INVOCATION_NOT_ALLOWED_IN_REPLY);
}
Expression replyExpr = replyStmt.getReplyExpr();
visitSingleValueExpr(replyExpr);
if (replyExpr.getType() != BTypes.typeMessage) {
BLangExceptionHelper.throwSemanticError(replyExpr, SemanticErrors.INCOMPATIBLE_TYPES,
BTypes.typeMessage, replyExpr.getType());
}
}
@Override
public void visit(ReturnStmt returnStmt) {
if (currentCallableUnit instanceof Resource) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.RETURN_CANNOT_USED_IN_RESOURCE);
}
Expression[] returnArgExprs = returnStmt.getExprs();
ParameterDef[] returnParamsOfCU = currentCallableUnit.getReturnParameters();
if (returnArgExprs.length == 0 && returnParamsOfCU.length == 0) {
returnStmt.setReturns(true);
return;
}
if (returnArgExprs.length == 0 && returnParamsOfCU[0].getName() != null) {
Expression[] returnExprs = new Expression[returnParamsOfCU.length];
for (int i = 0; i < returnParamsOfCU.length; i++) {
VariableRefExpr variableRefExpr = new VariableRefExpr(returnStmt.getNodeLocation(),
returnParamsOfCU[i].getSymbolName());
visit(variableRefExpr);
returnExprs[i] = variableRefExpr;
}
returnStmt.setExprs(returnExprs);
returnStmt.setReturns(true);
return;
} else if (returnArgExprs.length == 0) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN);
}
BType[] typesOfReturnExprs = new BType[returnArgExprs.length];
for (int i = 0; i < returnArgExprs.length; i++) {
Expression returnArgExpr = returnArgExprs[i];
returnArgExpr.accept(this);
typesOfReturnExprs[i] = returnArgExpr.getType();
}
if (returnArgExprs.length == 1 && returnArgExprs[0] instanceof FunctionInvocationExpr) {
FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) returnArgExprs[0];
BType[] funcIExprReturnTypes = funcIExpr.getTypes();
if (funcIExprReturnTypes.length > returnParamsOfCU.length) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.TOO_MANY_ARGUMENTS_TO_RETURN);
} else if (funcIExprReturnTypes.length < returnParamsOfCU.length) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN);
}
for (int i = 0; i < returnParamsOfCU.length; i++) {
if (returnParamsOfCU[i].getType() != BTypes.typeAny &&
!funcIExprReturnTypes[i].equals(returnParamsOfCU[i].getType())) {
BLangExceptionHelper.throwSemanticError(returnStmt,
SemanticErrors.CANNOT_USE_TYPE_IN_RETURN_STATEMENT, returnParamsOfCU[i].getType(),
funcIExprReturnTypes[i]);
}
}
returnStmt.setReturns(true);
return;
}
if (typesOfReturnExprs.length > returnParamsOfCU.length) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.TOO_MANY_ARGUMENTS_TO_RETURN);
} else if (typesOfReturnExprs.length < returnParamsOfCU.length) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN);
} else {
for (int i = 0; i < returnParamsOfCU.length; i++) {
if (returnArgExprs[i] instanceof ActionInvocationExpr) {
BLangExceptionHelper.throwSemanticError(returnStmt,
SemanticErrors.ACTION_INVOCATION_NOT_ALLOWED_IN_RETURN);
}
if (returnArgExprs[i] instanceof FunctionInvocationExpr) {
FunctionInvocationExpr funcIExpr = ((FunctionInvocationExpr) returnArgExprs[i]);
if (funcIExpr.getTypes().length > 1) {
BLangExceptionHelper.throwSemanticError(returnStmt,
SemanticErrors.MULTIPLE_VALUE_IN_SINGLE_VALUE_CONTEXT,
funcIExpr.getCallableUnit().getName());
}
}
BType targetType = returnParamsOfCU[i].getType();
if (NativeCastMapper.isCompatible(returnParamsOfCU[i].getType(), typesOfReturnExprs[i])) {
continue;
}
TypeCastExpression newExpr = checkWideningPossible(targetType, returnArgExprs[i]);
if (newExpr != null) {
newExpr.accept(this);
returnArgExprs[i] = newExpr;
continue;
}
BLangExceptionHelper.throwSemanticError(returnStmt,
SemanticErrors.CANNOT_USE_TYPE_IN_RETURN_STATEMENT, returnParamsOfCU[i].getType(),
typesOfReturnExprs[i]);
}
}
returnStmt.setReturns(true);
}
@Override
public void visit(InstanceCreationExpr instanceCreationExpr) {
visitSingleValueExpr(instanceCreationExpr);
if (BTypes.isValueType(instanceCreationExpr.getType())) {
BLangExceptionHelper.throwSemanticError(instanceCreationExpr,
SemanticErrors.CANNOT_USE_CREATE_FOR_VALUE_TYPES, instanceCreationExpr.getType());
}
}
@Override
public void visit(FunctionInvocationExpr funcIExpr) {
Expression[] exprs = funcIExpr.getArgExprs();
for (Expression expr : exprs) {
visitSingleValueExpr(expr);
}
linkFunction(funcIExpr);
BType[] returnParamTypes = funcIExpr.getCallableUnit().getReturnParamTypes();
funcIExpr.setTypes(returnParamTypes);
}
@Override
public void visit(ActionInvocationExpr actionIExpr) {
Expression[] exprs = actionIExpr.getArgExprs();
for (Expression expr : exprs) {
visitSingleValueExpr(expr);
}
linkAction(actionIExpr);
BType[] returnParamTypes = actionIExpr.getCallableUnit().getReturnParamTypes();
actionIExpr.setTypes(returnParamTypes);
}
@Override
public void visit(BasicLiteral basicLiteral) {
BType bType = BTypes.resolveType(basicLiteral.getTypeName(), currentScope, basicLiteral.getNodeLocation());
basicLiteral.setType(bType);
}
@Override
public void visit(DivideExpr divideExpr) {
BType arithmeticExprType = verifyBinaryArithmeticExprType(divideExpr);
if (arithmeticExprType == BTypes.typeInt) {
divideExpr.setEvalFunc(DivideExpr.DIV_INT_FUNC);
} else if (arithmeticExprType == BTypes.typeFloat) {
divideExpr.setEvalFunc(DivideExpr.DIV_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(divideExpr);
}
}
@Override
public void visit(ModExpression modExpression) {
BType arithmeticExprType = verifyBinaryArithmeticExprType(modExpression);
if (arithmeticExprType == BTypes.typeInt) {
modExpression.setEvalFunc(ModExpression.MOD_INT_FUNC);
} else if (arithmeticExprType == BTypes.typeFloat) {
modExpression.setEvalFunc(ModExpression.MOD_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(modExpression);
}
}
@Override
public void visit(UnaryExpression unaryExpr) {
visitSingleValueExpr(unaryExpr.getRExpr());
unaryExpr.setType(unaryExpr.getRExpr().getType());
if (Operator.SUB.equals(unaryExpr.getOperator())) {
if (unaryExpr.getType() == BTypes.typeInt) {
unaryExpr.setEvalFunc(UnaryExpression.NEGATIVE_INT_FUNC);
} else if (unaryExpr.getType() == BTypes.typeFloat) {
unaryExpr.setEvalFunc(UnaryExpression.NEGATIVE_FLOAT_FUNC);
} else {
throwInvalidUnaryOpError(unaryExpr);
}
} else if (Operator.ADD.equals(unaryExpr.getOperator())) {
if (unaryExpr.getType() == BTypes.typeInt) {
unaryExpr.setEvalFunc(UnaryExpression.POSITIVE_INT_FUNC);
} else if (unaryExpr.getType() == BTypes.typeFloat) {
unaryExpr.setEvalFunc(UnaryExpression.POSITIVE_FLOAT_FUNC);
} else {
throwInvalidUnaryOpError(unaryExpr);
}
} else if (Operator.NOT.equals(unaryExpr.getOperator())) {
if (unaryExpr.getType() == BTypes.typeBoolean) {
unaryExpr.setEvalFunc(UnaryExpression.NOT_BOOLEAN_FUNC);
} else {
throwInvalidUnaryOpError(unaryExpr);
}
} else {
BLangExceptionHelper.throwSemanticError(unaryExpr, SemanticErrors.UNKNOWN_OPERATOR_IN_UNARY,
unaryExpr.getOperator());
}
}
@Override
public void visit(AddExpression addExpr) {
BType arithmeticExprType = verifyBinaryArithmeticExprType(addExpr);
if (arithmeticExprType == BTypes.typeInt) {
addExpr.setEvalFunc(AddExpression.ADD_INT_FUNC);
} else if (arithmeticExprType == BTypes.typeFloat) {
addExpr.setEvalFunc(AddExpression.ADD_FLOAT_FUNC);
} else if (arithmeticExprType == BTypes.typeString) {
addExpr.setEvalFunc(AddExpression.ADD_STRING_FUNC);
} else {
throwInvalidBinaryOpError(addExpr);
}
}
@Override
public void visit(MultExpression multExpr) {
BType binaryExprType = verifyBinaryArithmeticExprType(multExpr);
if (binaryExprType == BTypes.typeInt) {
multExpr.setEvalFunc(MultExpression.MULT_INT_FUNC);
} else if (binaryExprType == BTypes.typeFloat) {
multExpr.setEvalFunc(MultExpression.MULT_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(multExpr);
}
}
@Override
public void visit(SubtractExpression subtractExpr) {
BType binaryExprType = verifyBinaryArithmeticExprType(subtractExpr);
if (binaryExprType == BTypes.typeInt) {
subtractExpr.setEvalFunc(SubtractExpression.SUB_INT_FUNC);
} else if (binaryExprType == BTypes.typeFloat) {
subtractExpr.setEvalFunc(SubtractExpression.SUB_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(subtractExpr);
}
}
@Override
public void visit(AndExpression andExpr) {
visitBinaryLogicalExpr(andExpr);
andExpr.setEvalFunc(AndExpression.AND_FUNC);
}
@Override
public void visit(OrExpression orExpr) {
visitBinaryLogicalExpr(orExpr);
orExpr.setEvalFunc(OrExpression.OR_FUNC);
}
@Override
public void visit(EqualExpression equalExpr) {
BType compareExprType = verifyBinaryEqualityExprType(equalExpr);
if (compareExprType == BTypes.typeInt) {
equalExpr.setEvalFunc(EqualExpression.EQUAL_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
equalExpr.setEvalFunc(EqualExpression.EQUAL_FLOAT_FUNC);
} else if (compareExprType == BTypes.typeBoolean) {
equalExpr.setEvalFunc(EqualExpression.EQUAL_BOOLEAN_FUNC);
} else if (compareExprType == BTypes.typeString) {
equalExpr.setEvalFunc(EqualExpression.EQUAL_STRING_FUNC);
} else if (compareExprType == BTypes.typeNull) {
equalExpr.setRefTypeEvalFunc(EqualExpression.EQUAL_NULL_FUNC);
} else {
throwInvalidBinaryOpError(equalExpr);
}
}
@Override
public void visit(NotEqualExpression notEqualExpr) {
BType compareExprType = verifyBinaryEqualityExprType(notEqualExpr);
if (compareExprType == BTypes.typeInt) {
notEqualExpr.setEvalFunc(NotEqualExpression.NOT_EQUAL_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
notEqualExpr.setEvalFunc(NotEqualExpression.NOT_EQUAL_FLOAT_FUNC);
} else if (compareExprType == BTypes.typeBoolean) {
notEqualExpr.setEvalFunc(NotEqualExpression.NOT_EQUAL_BOOLEAN_FUNC);
} else if (compareExprType == BTypes.typeString) {
notEqualExpr.setEvalFunc(NotEqualExpression.NOT_EQUAL_STRING_FUNC);
} else if (compareExprType == BTypes.typeNull) {
notEqualExpr.setRefTypeEvalFunc(NotEqualExpression.NOT_EQUAL_NULL_FUNC);
} else {
throwInvalidBinaryOpError(notEqualExpr);
}
}
@Override
public void visit(GreaterEqualExpression greaterEqualExpr) {
BType compareExprType = verifyBinaryCompareExprType(greaterEqualExpr);
if (compareExprType == BTypes.typeInt) {
greaterEqualExpr.setEvalFunc(GreaterEqualExpression.GREATER_EQUAL_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
greaterEqualExpr.setEvalFunc(GreaterEqualExpression.GREATER_EQUAL_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(greaterEqualExpr);
}
}
@Override
public void visit(GreaterThanExpression greaterThanExpr) {
BType compareExprType = verifyBinaryCompareExprType(greaterThanExpr);
if (compareExprType == BTypes.typeInt) {
greaterThanExpr.setEvalFunc(GreaterThanExpression.GREATER_THAN_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
greaterThanExpr.setEvalFunc(GreaterThanExpression.GREATER_THAN_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(greaterThanExpr);
}
}
@Override
public void visit(LessEqualExpression lessEqualExpr) {
BType compareExprType = verifyBinaryCompareExprType(lessEqualExpr);
if (compareExprType == BTypes.typeInt) {
lessEqualExpr.setEvalFunc(LessEqualExpression.LESS_EQUAL_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
lessEqualExpr.setEvalFunc(LessEqualExpression.LESS_EQUAL_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(lessEqualExpr);
}
}
@Override
public void visit(LessThanExpression lessThanExpr) {
BType compareExprType = verifyBinaryCompareExprType(lessThanExpr);
if (compareExprType == BTypes.typeInt) {
lessThanExpr.setEvalFunc(LessThanExpression.LESS_THAN_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
lessThanExpr.setEvalFunc(LessThanExpression.LESS_THAN_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(lessThanExpr);
}
}
@Override
public void visit(ArrayMapAccessExpr arrayMapAccessExpr) {
VariableRefExpr arrayMapVarRefExpr = (VariableRefExpr) arrayMapAccessExpr.getRExpr();
arrayMapVarRefExpr.accept(this);
handleArrayType(arrayMapAccessExpr);
}
@Override
public void visit(FieldAccessExpr fieldAccessExpr) {
visitField(fieldAccessExpr, currentScope);
}
@Override
public void visit(JSONFieldAccessExpr jsonFieldExpr) {
}
@Override
public void visit(RefTypeInitExpr refTypeInitExpr) {
visitMapJsonInitExpr(refTypeInitExpr);
}
@Override
public void visit(MapInitExpr mapInitExpr) {
visitMapJsonInitExpr(mapInitExpr);
}
@Override
public void visit(JSONInitExpr jsonInitExpr) {
visitMapJsonInitExpr(jsonInitExpr);
}
@Override
public void visit(JSONArrayInitExpr jsonArrayInitExpr) {
BType inheritedType = jsonArrayInitExpr.getInheritedType();
jsonArrayInitExpr.setType(inheritedType);
Expression[] argExprs = jsonArrayInitExpr.getArgExprs();
for (int i = 0; i < argExprs.length; i++) {
Expression argExpr = argExprs[i];
if (argExpr instanceof RefTypeInitExpr) {
argExpr = getNestedInitExpr(argExpr, inheritedType);
argExprs[i] = argExpr;
}
visitSingleValueExpr(argExpr);
BType argType = argExpr.getType();
if (BTypes.isValueType(argType) || NativeCastMapper.isCompatible(BTypes.typeJSON, argType)) {
continue;
}
TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, argExpr);
if (typeCastExpr == null) {
BLangExceptionHelper.throwSemanticError(jsonArrayInitExpr,
SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, argExpr.getType(), BTypes.typeJSON);
}
argExprs[i] = typeCastExpr;
}
}
@Override
public void visit(ConnectorInitExpr connectorInitExpr) {
BType inheritedType = connectorInitExpr.getInheritedType();
if (!(inheritedType instanceof BallerinaConnectorDef) && !(inheritedType instanceof AbstractNativeConnector)) {
BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.CONNECTOR_INIT_NOT_ALLOWED);
}
connectorInitExpr.setType(inheritedType);
for (Expression argExpr : connectorInitExpr.getArgExprs()) {
visitSingleValueExpr(argExpr);
}
if (inheritedType instanceof AbstractNativeConnector) {
AbstractNativeConnector nativeConnector = (AbstractNativeConnector) inheritedType;
for (int i = 0; i < nativeConnector.getArgumentTypeNames().length; i++) {
SimpleTypeName simpleTypeName = nativeConnector.getArgumentTypeNames()[i];
BType argType = BTypes.resolveType(simpleTypeName, currentScope, connectorInitExpr.getNodeLocation());
if (argType != connectorInitExpr.getArgExprs()[i].getType()) {
BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.INCOMPATIBLE_TYPES,
argType, connectorInitExpr.getArgExprs()[i].getType());
}
}
return;
}
Expression[] argExprs = connectorInitExpr.getArgExprs();
ParameterDef[] parameterDefs = ((BallerinaConnectorDef) inheritedType).getParameterDefs();
for (int i = 0; i < argExprs.length; i++) {
SimpleTypeName simpleTypeName = parameterDefs[i].getTypeName();
BType paramType = BTypes.resolveType(simpleTypeName, currentScope, connectorInitExpr.getNodeLocation());
parameterDefs[i].setType(paramType);
Expression argExpr = argExprs[i];
if (parameterDefs[i].getType() != argExpr.getType()) {
BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.INCOMPATIBLE_TYPES,
parameterDefs[i].getType(), argExpr.getType());
}
}
}
@Override
public void visit(ArrayInitExpr arrayInitExpr) {
if (!(arrayInitExpr.getInheritedType() instanceof BArrayType)) {
BLangExceptionHelper.throwSemanticError(arrayInitExpr, SemanticErrors.ARRAY_INIT_NOT_ALLOWED_HERE);
}
visitArrayInitExpr(arrayInitExpr);
}
private void visitArrayInitExpr(ArrayInitExpr arrayInitExpr) {
BType inheritedType = arrayInitExpr.getInheritedType();
arrayInitExpr.setType(inheritedType);
Expression[] argExprs = arrayInitExpr.getArgExprs();
if (argExprs.length == 0) {
return;
}
BType expectedElementType = ((BArrayType) inheritedType).getElementType();
for (int i = 0; i < argExprs.length; i++) {
Expression argExpr = argExprs[i];
if (argExpr instanceof RefTypeInitExpr) {
((RefTypeInitExpr) argExpr).setInheritedType(expectedElementType);
argExpr = getNestedInitExpr(argExpr, expectedElementType);
argExprs[i] = argExpr;
}
visitSingleValueExpr(argExpr);
if (NativeCastMapper.isCompatible(expectedElementType, argExpr.getType())) {
continue;
}
TypeCastExpression typeCastExpr = checkWideningPossible(expectedElementType, argExpr);
if (typeCastExpr == null) {
BLangExceptionHelper.throwSemanticError(arrayInitExpr,
SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, argExpr.getType(), expectedElementType);
}
argExprs[i] = typeCastExpr;
}
}
/**
* Visit and analyze ballerina Struct initializing expression.
*/
@Override
public void visit(StructInitExpr structInitExpr) {
BType inheritedType = structInitExpr.getInheritedType();
structInitExpr.setType(inheritedType);
Expression[] argExprs = structInitExpr.getArgExprs();
if (argExprs.length == 0) {
return;
}
StructDef structDef = (StructDef) inheritedType;
for (Expression argExpr : argExprs) {
KeyValueExpr keyValueExpr = (KeyValueExpr) argExpr;
Expression keyExpr = keyValueExpr.getKeyExpr();
if (!(keyExpr instanceof VariableRefExpr)) {
BLangExceptionHelper.throwSemanticError(keyExpr, SemanticErrors.INVALID_FIELD_NAME_STRUCT_INIT);
}
VariableRefExpr varRefExpr = (VariableRefExpr) keyExpr;
BLangSymbol varDefSymbol = structDef.resolveMembers(varRefExpr.getSymbolName());
if (varDefSymbol == null) {
BLangExceptionHelper.throwSemanticError(keyExpr, SemanticErrors.UNKNOWN_FIELD_IN_STRUCT,
varRefExpr.getVarName(), structDef.getName());
}
if (!(varDefSymbol instanceof VariableDef)) {
BLangExceptionHelper.throwSemanticError(varRefExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
varDefSymbol.getSymbolName());
}
VariableDef varDef = (VariableDef) varDefSymbol;
varRefExpr.setVariableDef(varDef);
Expression valueExpr = keyValueExpr.getValueExpr();
BType structFieldType = varDef.getType();
if (valueExpr instanceof RefTypeInitExpr) {
valueExpr = getNestedInitExpr(valueExpr, structFieldType);
keyValueExpr.setValueExpr(valueExpr);
}
valueExpr.accept(this);
if (!NativeCastMapper.isCompatible(structFieldType, valueExpr.getType())) {
BLangExceptionHelper.throwSemanticError(keyExpr, SemanticErrors.INCOMPATIBLE_TYPES,
varDef.getType(), valueExpr.getType());
}
}
}
@Override
public void visit(BacktickExpr backtickExpr) {
BType inheritedType = backtickExpr.getInheritedType();
if (inheritedType != BTypes.typeXML) {
BLangExceptionHelper.throwSemanticError(backtickExpr, SemanticErrors.INCOMPATIBLE_TYPES_EXPECTED_XML);
}
backtickExpr.setType(inheritedType);
String[] literals = backtickExpr.getTemplateStr().split(patternString);
List<Expression> argExprList = new ArrayList<>();
int i = 0;
if (literals.length > i) {
BasicLiteral basicLiteral = new BasicLiteral(backtickExpr.getNodeLocation(),
new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(literals[i]));
visit(basicLiteral);
argExprList.add(basicLiteral);
i++;
}
Matcher m = compiledPattern.matcher(backtickExpr.getTemplateStr());
while (m.find()) {
if (m.group(3) != null) {
BasicLiteral indexExpr;
if (m.group(5) != null) {
indexExpr = new BasicLiteral(backtickExpr.getNodeLocation(),
new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(m.group(5)));
indexExpr.setType(BTypes.typeString);
} else {
indexExpr = new BasicLiteral(backtickExpr.getNodeLocation(),
new SimpleTypeName(TypeConstants.INT_TNAME), new BInteger(Integer.parseInt(m.group(4))));
indexExpr.setType(BTypes.typeInt);
}
SymbolName mapOrArrName = new SymbolName(m.group(2));
ArrayMapAccessExpr.ArrayMapAccessExprBuilder builder =
new ArrayMapAccessExpr.ArrayMapAccessExprBuilder();
VariableRefExpr arrayMapVarRefExpr = new VariableRefExpr(backtickExpr.getNodeLocation(), mapOrArrName);
visit(arrayMapVarRefExpr);
builder.setArrayMapVarRefExpr(arrayMapVarRefExpr);
builder.setSymbolName(mapOrArrName);
Expression[] exprs = {indexExpr};
builder.setIndexExprs(exprs);
ArrayMapAccessExpr arrayMapAccessExpr = builder.buildWithSymbol();
visit(arrayMapAccessExpr);
argExprList.add(arrayMapAccessExpr);
} else {
VariableRefExpr variableRefExpr = new VariableRefExpr(backtickExpr.getNodeLocation(),
new SymbolName(m.group(1)));
visit(variableRefExpr);
argExprList.add(variableRefExpr);
}
if (literals.length > i) {
BasicLiteral basicLiteral = new BasicLiteral(backtickExpr.getNodeLocation(),
new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(literals[i]));
visit(basicLiteral);
argExprList.add(basicLiteral);
i++;
}
}
backtickExpr.setArgsExprs(argExprList.toArray(new Expression[argExprList.size()]));
}
@Override
public void visit(KeyValueExpr keyValueExpr) {
}
@Override
public void visit(VariableRefExpr variableRefExpr) {
SymbolName symbolName = variableRefExpr.getSymbolName();
BLangSymbol varDefSymbol = currentScope.resolve(symbolName);
if (varDefSymbol == null) {
BLangExceptionHelper.throwSemanticError(variableRefExpr, SemanticErrors.UNDEFINED_SYMBOL, symbolName);
}
if (!(varDefSymbol instanceof VariableDef)) {
BLangExceptionHelper.throwSemanticError(variableRefExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
}
variableRefExpr.setVariableDef((VariableDef) varDefSymbol);
}
@Override
public void visit(TypeCastExpression typeCastExpression) {
Expression rExpr = typeCastExpression.getRExpr();
visitSingleValueExpr(rExpr);
BType sourceType = rExpr.getType();
BType targetType = typeCastExpression.getTargetType();
if (targetType == null) {
targetType = BTypes.resolveType(typeCastExpression.getTypeName(), currentScope, null);
typeCastExpression.setTargetType(targetType);
}
if (rExpr instanceof NullLiteral) {
BLangExceptionHelper.throwSemanticError(typeCastExpression, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST,
sourceType, targetType);
}
TypeEdge newEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, null);
if (newEdge != null) {
typeCastExpression.setEvalFunc(newEdge.getTypeMapperFunction());
} else {
linkTypeMapper(typeCastExpression, sourceType, targetType);
}
}
@Override
public void visit(NullLiteral nullLiteral) {
nullLiteral.setType(BTypes.typeNull);
}
@Override
public void visit(StackVarLocation stackVarLocation) {
}
@Override
public void visit(ServiceVarLocation serviceVarLocation) {
}
@Override
public void visit(GlobalVarLocation globalVarLocation) {
}
@Override
public void visit(ConnectorVarLocation connectorVarLocation) {
}
@Override
public void visit(ConstantLocation constantLocation) {
}
@Override
public void visit(StructVarLocation structVarLocation) {
}
@Override
public void visit(WorkerVarLocation workerVarLocation) {
}
public void visit(ResourceInvocationExpr resourceIExpr) {
}
public void visit(MainInvoker mainInvoker) {
}
private void openScope(SymbolScope symbolScope) {
currentScope = symbolScope;
}
private void closeScope() {
currentScope = currentScope.getEnclosingScope();
}
private void handleArrayType(ArrayMapAccessExpr arrayMapAccessExpr) {
ReferenceExpr arrayMapVarRefExpr = (ReferenceExpr) arrayMapAccessExpr.getRExpr();
if (arrayMapVarRefExpr.getType() instanceof BArrayType) {
for (Expression indexExpr : arrayMapAccessExpr.getIndexExprs()) {
visitSingleValueExpr(indexExpr);
if (indexExpr.getType() != BTypes.typeInt) {
BLangExceptionHelper.throwSemanticError(arrayMapAccessExpr, SemanticErrors.NON_INTEGER_ARRAY_INDEX,
indexExpr.getType());
}
}
BType expectedType = arrayMapVarRefExpr.getType();
for (int i = 0; i < arrayMapAccessExpr.getIndexExprs().length; i++) {
expectedType = ((BArrayType) expectedType).getElementType();
}
arrayMapAccessExpr.setType(expectedType);
} else if (arrayMapVarRefExpr.getType() instanceof BMapType) {
Expression indexExpr = arrayMapAccessExpr.getIndexExprs()[0];
visitSingleValueExpr(indexExpr);
if (indexExpr.getType() != BTypes.typeString) {
BLangExceptionHelper.throwSemanticError(arrayMapAccessExpr, SemanticErrors.NON_STRING_MAP_INDEX,
indexExpr.getType());
}
BMapType typeOfMap = (BMapType) arrayMapVarRefExpr.getType();
arrayMapAccessExpr.setType(typeOfMap.getElementType());
} else {
BLangExceptionHelper.throwSemanticError(arrayMapAccessExpr,
SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, arrayMapVarRefExpr.getType());
}
}
private void visitBinaryExpr(BinaryExpression expr) {
visitSingleValueExpr(expr.getLExpr());
visitSingleValueExpr(expr.getRExpr());
}
private void visitSingleValueExpr(Expression expr) {
expr.accept(this);
if (expr.isMultiReturnExpr()) {
FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) expr;
String nameWithPkgName = (funcIExpr.getPackageName() != null) ? funcIExpr.getPackageName()
+ ":" + funcIExpr.getName() : funcIExpr.getName();
BLangExceptionHelper.throwSemanticError(expr, SemanticErrors.MULTIPLE_VALUE_IN_SINGLE_VALUE_CONTEXT,
nameWithPkgName);
}
}
private BType verifyBinaryArithmeticExprType(BinaryArithmeticExpression binaryArithmeticExpr) {
visitBinaryExpr(binaryArithmeticExpr);
BType type = verifyBinaryExprType(binaryArithmeticExpr);
binaryArithmeticExpr.setType(type);
return type;
}
private BType verifyBinaryCompareExprType(BinaryExpression binaryExpression) {
visitBinaryExpr(binaryExpression);
BType type = verifyBinaryExprType(binaryExpression);
binaryExpression.setType(BTypes.typeBoolean);
return type;
}
private BType verifyBinaryEqualityExprType(BinaryExpression binaryExpression) {
visitBinaryExpr(binaryExpression);
BType rType = binaryExpression.getRExpr().getType();
BType lType = binaryExpression.getLExpr().getType();
BType type;
if (rType == BTypes.typeNull) {
if (BTypes.isValueType(lType)) {
BLangExceptionHelper.throwSemanticError(binaryExpression,
SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lType, rType);
}
type = rType;
} else if (lType == BTypes.typeNull) {
if (BTypes.isValueType(rType)) {
BLangExceptionHelper.throwSemanticError(binaryExpression,
SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lType, rType);
}
type = lType;
} else {
type = verifyBinaryExprType(binaryExpression);
}
binaryExpression.setType(BTypes.typeBoolean);
return type;
}
private BType verifyBinaryExprType(BinaryExpression binaryExpr) {
Expression rExpr = binaryExpr.getRExpr();
Expression lExpr = binaryExpr.getLExpr();
BType rType = rExpr.getType();
BType lType = lExpr.getType();
if (!(rType.equals(lType))) {
TypeCastExpression newExpr;
TypeEdge newEdge;
if (((rType.equals(BTypes.typeString) || lType.equals(BTypes.typeString))
&& binaryExpr.getOperator().equals(Operator.ADD)) || (!(rType.equals(BTypes.typeString)) &&
!(lType.equals(BTypes.typeString)))) {
newEdge = TypeLattice.getImplicitCastLattice().getEdgeFromTypes(rType, lType, null);
if (newEdge != null) {
newExpr = new TypeCastExpression(rExpr.getNodeLocation(), rExpr, lType);
newExpr.setEvalFunc(newEdge.getTypeMapperFunction());
newExpr.accept(this);
binaryExpr.setRExpr(newExpr);
return lType;
} else {
newEdge = TypeLattice.getImplicitCastLattice().getEdgeFromTypes(lType, rType, null);
if (newEdge != null) {
newExpr = new TypeCastExpression(lExpr.getNodeLocation(), lExpr, rType);
newExpr.setEvalFunc(newEdge.getTypeMapperFunction());
newExpr.accept(this);
binaryExpr.setLExpr(newExpr);
return rType;
}
}
}
throwInvalidBinaryOpError(binaryExpr);
}
return rType;
}
private void visitBinaryLogicalExpr(BinaryLogicalExpression expr) {
visitBinaryExpr(expr);
Expression rExpr = expr.getRExpr();
Expression lExpr = expr.getLExpr();
if (lExpr.getType() == BTypes.typeBoolean && rExpr.getType() == BTypes.typeBoolean) {
expr.setType(BTypes.typeBoolean);
} else {
throwInvalidBinaryOpError(expr);
}
}
private String getVarNameFromExpression(Expression expr) {
if (expr instanceof ArrayMapAccessExpr) {
return ((ArrayMapAccessExpr) expr).getSymbolName().getName();
} else if (expr instanceof FieldAccessExpr) {
return getVarNameFromExpression(((FieldAccessExpr) expr).getVarRef());
} else {
return ((VariableRefExpr) expr).getSymbolName().getName();
}
}
private void checkForConstAssignment(AssignStmt assignStmt, Expression lExpr) {
if (lExpr instanceof VariableRefExpr &&
((VariableRefExpr) lExpr).getMemoryLocation() instanceof ConstantLocation) {
BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.CANNOT_ASSIGN_VALUE_CONSTANT,
((VariableRefExpr) lExpr).getSymbolName());
}
}
private void checkForMultiAssignmentErrors(AssignStmt assignStmt, Expression[] lExprs,
CallableUnitInvocationExpr rExpr) {
BType[] returnTypes = rExpr.getTypes();
if (lExprs.length != returnTypes.length) {
BLangExceptionHelper.throwSemanticError(assignStmt,
SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, lExprs.length, returnTypes.length);
}
for (int i = 0; i < lExprs.length; i++) {
Expression lExpr = lExprs[i];
BType returnType = returnTypes[i];
if ((lExpr.getType() != BTypes.typeAny) && (!lExpr.getType().equals(returnType))) {
String varName = getVarNameFromExpression(lExpr);
BLangExceptionHelper.throwSemanticError(assignStmt,
SemanticErrors.CANNOT_ASSIGN_IN_MULTIPLE_ASSIGNMENT, returnType, varName, lExpr.getType());
}
}
}
private void visitLExprsOfAssignment(AssignStmt assignStmt, Expression[] lExprs) {
Set<String> varNameSet = new HashSet<>();
for (Expression lExpr : lExprs) {
String varName = getVarNameFromExpression(lExpr);
if (!varNameSet.add(varName)) {
BLangExceptionHelper.throwSemanticError(assignStmt,
SemanticErrors.VAR_IS_REPEATED_ON_LEFT_SIDE_ASSIGNMENT, varName);
}
if (lExpr instanceof ArrayMapAccessExpr) {
((ArrayMapAccessExpr) lExpr).setLHSExpr(true);
} else if (lExpr instanceof FieldAccessExpr) {
((FieldAccessExpr) lExpr).setLHSExpr(true);
}
lExpr.accept(this);
checkForConstAssignment(assignStmt, lExpr);
}
}
private void linkFunction(FunctionInvocationExpr funcIExpr) {
String pkgPath = funcIExpr.getPackagePath();
Expression[] exprs = funcIExpr.getArgExprs();
BType[] paramTypes = new BType[exprs.length];
for (int i = 0; i < exprs.length; i++) {
paramTypes[i] = exprs[i].getType();
}
FunctionSymbolName symbolName = LangModelUtils.getFuncSymNameWithParams(funcIExpr.getName(),
pkgPath, paramTypes);
BLangSymbol functionSymbol = currentScope.resolve(symbolName);
if (functionSymbol == null) {
functionSymbol = findBestMatchForFunctionSymbol(funcIExpr, symbolName);
}
if (functionSymbol == null) {
String funcName = (funcIExpr.getPackageName() != null) ? funcIExpr.getPackageName() + ":" +
funcIExpr.getName() : funcIExpr.getName();
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.UNDEFINED_FUNCTION, funcName);
}
Function function;
if (functionSymbol instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) functionSymbol).load();
SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames();
BType[] returnTypes = new BType[returnParamTypeNames.length];
for (int i = 0; i < returnParamTypeNames.length; i++) {
SimpleTypeName typeName = returnParamTypeNames[i];
BType bType = BTypes.resolveType(typeName, currentScope, funcIExpr.getNodeLocation());
returnTypes[i] = bType;
}
if (!(nativeUnit instanceof Function)) {
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
}
function = (Function) nativeUnit;
function.setReturnParamTypes(returnTypes);
} else {
if (!(functionSymbol instanceof Function)) {
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
}
function = (Function) functionSymbol;
}
funcIExpr.setCallableUnit(function);
}
/**
* Helper method to find the best function match when there is no direct match.
*
* @param symbolName
* @return bLangSymbol
*/
private BLangSymbol findBestMatchForFunctionSymbol(FunctionInvocationExpr funcIExpr,
FunctionSymbolName symbolName) {
BLangSymbol functionSymbol = null;
BLangSymbol pkgSymbol = null;
if (symbolName.getPkgPath() == null) {
pkgSymbol = (BLangPackage) getCurrentPackageScope(currentScope);
} else {
SymbolName pkgSymbolName = new SymbolName(symbolName.getPkgPath());
pkgSymbol = currentScope.resolve(pkgSymbolName);
}
if (pkgSymbol == null) {
return null;
}
Expression[] argExprs = funcIExpr.getArgExprs();
Expression[] updatedArgExprs = new Expression[argExprs.length];
for (Map.Entry entry : ((SymbolScope) pkgSymbol).getSymbolMap().entrySet()) {
if (!(entry.getKey() instanceof FunctionSymbolName)) {
continue;
}
FunctionSymbolName funcSymName = (FunctionSymbolName) entry.getKey();
if (!funcSymName.isNameAndParamCountMatch(symbolName)) {
continue;
}
boolean implicitCastPossible = true;
for (int i = 0; i < argExprs.length; i++) {
Expression argExpr = argExprs[i];
updatedArgExprs[i] = argExpr;
BType lhsType;
if (entry.getValue() instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) entry.getValue()).load();
SimpleTypeName simpleTypeName = nativeUnit.getArgumentTypeNames()[i];
lhsType = BTypes.resolveType(simpleTypeName, currentScope, funcIExpr.getNodeLocation());
} else {
if (!(entry.getValue() instanceof Function)) {
continue;
}
lhsType = ((Function) entry.getValue()).getParameterDefs()[i].getType();
}
BType rhsType = argExpr.getType();
if (rhsType instanceof BNullType && !BTypes.isValueType(lhsType)) {
continue;
}
if (rhsType != null && lhsType.equals(rhsType)) {
continue;
}
if (lhsType == BTypes.typeAny) {
continue;
}
TypeCastExpression newExpr = checkWideningPossible(lhsType, argExpr);
if (newExpr != null) {
updatedArgExprs[i] = newExpr;
} else {
implicitCastPossible = false;
break;
}
}
if (implicitCastPossible) {
if (functionSymbol == null) {
functionSymbol = (BLangSymbol) entry.getValue();
} else {
/**
* This way second ambiguous function will cause this method to throw semantic error, so in a
* scenario where there are more than two ambiguous functions, then this will show only the
* first two.
*/
String ambiguousFunc1 = generateErrorMessage(funcIExpr, functionSymbol, symbolName.getPkgPath());
String ambiguousFunc2 = generateErrorMessage(funcIExpr, (BLangSymbol) entry.getValue(),
symbolName.getPkgPath());
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.AMBIGUOUS_FUNCTIONS,
funcSymName.getFuncName(), ambiguousFunc1, ambiguousFunc2);
break;
}
}
}
for (int i = 0; i < updatedArgExprs.length; i++) {
funcIExpr.getArgExprs()[i] = updatedArgExprs[i];
}
return functionSymbol;
}
/**
* Helper method to generate error message for each ambiguous function.
*
* @param funcIExpr
* @param functionSymbol
* @return errorMsg
*/
private static String generateErrorMessage(FunctionInvocationExpr funcIExpr, BLangSymbol functionSymbol,
String packagePath) {
Function function;
if (functionSymbol instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) functionSymbol).load();
if (!(nativeUnit instanceof Function)) {
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
functionSymbol.getName());
}
function = (Function) nativeUnit;
} else {
if (!(functionSymbol instanceof Function)) {
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
functionSymbol.getName());
}
function = (Function) functionSymbol;
}
String funcName = function.getSymbolName().getName().split("\\.")[0];
String firstPart = (packagePath != null) ? packagePath + ":" + funcName : funcName;
StringBuilder sBuilder = new StringBuilder(firstPart + "(");
String prefix = "";
for (ParameterDef parameterDef : function.getParameterDefs()) {
sBuilder.append(prefix);
prefix = ",";
String pkgPath = parameterDef.getTypeName().getPackagePath();
if (pkgPath != null) {
sBuilder.append(pkgPath).append(":");
}
sBuilder.append(parameterDef.getTypeName().getName());
}
sBuilder.append(")");
return sBuilder.toString();
}
/**
* Get current package Scope.
*
* @param scope
* @return scope
*/
private SymbolScope getCurrentPackageScope(SymbolScope scope) {
if (scope instanceof BLangPackage) {
return scope;
} else {
return getCurrentPackageScope(scope.getEnclosingScope());
}
}
private void linkAction(ActionInvocationExpr actionIExpr) {
String pkgPath = actionIExpr.getPackagePath();
String connectorName = actionIExpr.getConnectorName();
SymbolName connectorSymbolName = new SymbolName(connectorName, pkgPath);
BLangSymbol connectorSymbol = currentScope.resolve(connectorSymbolName);
if (connectorSymbol == null) {
String connectorWithPkgName = (actionIExpr.getPackageName() != null) ? actionIExpr.getPackageName() +
":" + actionIExpr.getConnectorName() : actionIExpr.getConnectorName();
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.UNDEFINED_CONNECTOR,
connectorWithPkgName);
}
Expression[] exprs = actionIExpr.getArgExprs();
BType[] paramTypes = new BType[exprs.length];
for (int i = 0; i < exprs.length; i++) {
paramTypes[i] = exprs[i].getType();
}
SymbolName symbolName = LangModelUtils.getActionSymName(actionIExpr.getName(), actionIExpr.getConnectorName(),
null, paramTypes);
BLangSymbol actionSymbol = null;
if (connectorSymbol instanceof NativeUnitProxy) {
AbstractNativeConnector connector = (AbstractNativeConnector) ((NativeUnitProxy) connectorSymbol).load();
actionSymbol = connector.resolveMembers(symbolName);
} else if (connectorSymbol instanceof BallerinaConnectorDef) {
actionSymbol = ((BallerinaConnectorDef) connectorSymbol).resolveMembers(symbolName);
} else {
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_CONNECTOR_EXPECTED,
connectorSymbolName);
}
if (actionSymbol == null) {
String actionWithConnector = actionIExpr.getConnectorName() + "." + actionIExpr.getName();
String actionName = (actionIExpr.getPackageName() != null) ? actionIExpr.getPackageName() + ":" +
actionWithConnector : actionWithConnector;
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.UNDEFINED_ACTION, actionName);
}
Action action = null;
if (actionSymbol instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) actionSymbol).load();
SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames();
BType[] returnTypes = new BType[returnParamTypeNames.length];
for (int i = 0; i < returnParamTypeNames.length; i++) {
SimpleTypeName typeName = returnParamTypeNames[i];
BType bType = BTypes.resolveType(typeName, currentScope, actionIExpr.getNodeLocation());
returnTypes[i] = bType;
}
if (!(nativeUnit instanceof Action)) {
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
}
action = (Action) nativeUnit;
action.setReturnParamTypes(returnTypes);
} else if (actionSymbol instanceof Action) {
action = (Action) actionSymbol;
} else {
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
}
actionIExpr.setCallableUnit(action);
}
private void linkWorker(WorkerInvocationStmt workerInvocationStmt) {
String workerName = workerInvocationStmt.getCallableUnitName();
SymbolName workerSymbolName = new SymbolName(workerName);
Worker worker = (Worker) currentScope.resolve(workerSymbolName);
if (worker == null) {
throw new LinkerException(workerInvocationStmt.getNodeLocation().getFileName() + ":" +
workerInvocationStmt.getNodeLocation().getLineNumber() +
": undefined worker '" + workerInvocationStmt.getCallableUnitName() + "'");
}
workerInvocationStmt.setCallableUnit(worker);
}
private void throwInvalidBinaryOpError(BinaryExpression binaryExpr) {
BType lExprType = binaryExpr.getLExpr().getType();
BType rExprType = binaryExpr.getRExpr().getType();
if (lExprType == rExprType) {
BLangExceptionHelper.throwSemanticError(binaryExpr,
SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, binaryExpr.getOperator(), lExprType);
} else {
BLangExceptionHelper.throwSemanticError(binaryExpr,
SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lExprType, rExprType);
}
}
private void throwInvalidUnaryOpError(UnaryExpression unaryExpr) {
BType rExprType = unaryExpr.getRExpr().getType();
BLangExceptionHelper.throwSemanticError(unaryExpr,
SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, unaryExpr.getOperator(), rExprType);
}
private void visitField(FieldAccessExpr fieldAccessExpr, SymbolScope enclosingScope) {
ReferenceExpr varRefExpr = (ReferenceExpr) fieldAccessExpr.getVarRef();
SymbolName symbolName = varRefExpr.getSymbolName();
BLangSymbol fieldSymbol = enclosingScope.resolve(symbolName);
if (fieldSymbol == null) {
if (enclosingScope instanceof StructDef) {
BLangExceptionHelper.throwSemanticError(fieldAccessExpr, SemanticErrors.UNKNOWN_FIELD_IN_STRUCT,
symbolName.getName(), ((StructDef) enclosingScope).getName());
} else {
BLangExceptionHelper.throwSemanticError(fieldAccessExpr, SemanticErrors.UNDEFINED_SYMBOL,
symbolName.getName());
}
}
if (!(fieldSymbol instanceof VariableDef)) {
BLangExceptionHelper.throwSemanticError(varRefExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
}
VariableDef varDef = (VariableDef) fieldSymbol;
BType exprType = varDef.getType();
/* Get the actual var representation of this field, and semantically analyze. This will check for semantic
* errors of arrays/map accesses, used in this field.
* eg: in dpt.employee[2].name , below will check for semantics of 'employee[2]',
* treating them as individual arrays/map variables.
*/
if (varRefExpr instanceof ArrayMapAccessExpr) {
Expression rExpr = ((ArrayMapAccessExpr) varRefExpr).getRExpr();
if (rExpr instanceof VariableRefExpr) {
((VariableRefExpr) rExpr).setVariableDef(varDef);
}
if (exprType instanceof BArrayType) {
exprType = ((BArrayType) varDef.getType()).getElementType();
}
handleArrayType((ArrayMapAccessExpr) varRefExpr);
} else {
((VariableRefExpr) varRefExpr).setVariableDef(varDef);
}
FieldAccessExpr fieldExpr = (FieldAccessExpr) fieldAccessExpr.getFieldExpr();
if (fieldExpr == null) {
return;
}
if (exprType instanceof StructDef) {
visitStructAccessExpr(fieldExpr, exprType);
} else if (exprType instanceof BJSONType) {
visitJSONAccessExpr(fieldAccessExpr, fieldExpr);
} else if (exprType instanceof BMapType) {
visitMapAccessExpr(fieldAccessExpr, varRefExpr, fieldExpr, enclosingScope);
} else if (exprType instanceof BArrayType) {
visitArrayAccessExpr(fieldAccessExpr, varRefExpr, fieldExpr, exprType, enclosingScope);
} else {
BLangExceptionHelper.throwSemanticError(fieldAccessExpr,
SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, exprType);
}
}
/**
* Visit a struct and its fields and semantically validate the field expression.
*
* @param fieldExpr field expression to validate
* @param exprType Struct definition
*/
private void visitStructAccessExpr(FieldAccessExpr fieldExpr, BType exprType) {
Expression fieldVar = fieldExpr.getVarRef();
if (fieldVar instanceof BasicLiteral) {
String varName = ((BasicLiteral) fieldVar).getBValue().stringValue();
VariableRefExpr varRef = new VariableRefExpr(fieldVar.getNodeLocation(), varName);
fieldExpr.setVarRef(varRef);
fieldExpr.setIsStaticField(true);
}
if (!fieldExpr.isStaticField()) {
BLangExceptionHelper.throwSemanticError(fieldVar, SemanticErrors.DYNAMIC_KEYS_NOT_SUPPORTED_FOR_STRUCT);
}
visitField(fieldExpr, ((StructDef) exprType));
}
/**
* Visits a JSON access expression. Rewrites the tree by replacing the {@link FieldAccessExpr}
* with a {@link JSONFieldAccessExpr}.
*
* @param parentExpr Current expression
* @param fieldExpr Field access expression of the current expression
*/
private void visitJSONAccessExpr(FieldAccessExpr parentExpr, FieldAccessExpr fieldExpr) {
if (fieldExpr == null) {
return;
}
FieldAccessExpr currentFieldExpr;
FieldAccessExpr nextFieldExpr = fieldExpr.getFieldExpr();
if (fieldExpr instanceof JSONFieldAccessExpr) {
currentFieldExpr = fieldExpr;
} else {
Expression varRefExpr = fieldExpr.getVarRef();
varRefExpr.accept(this);
currentFieldExpr = new JSONFieldAccessExpr(fieldExpr.getNodeLocation(), varRefExpr, nextFieldExpr);
}
parentExpr.setFieldExpr(currentFieldExpr);
visitJSONAccessExpr(currentFieldExpr, nextFieldExpr);
}
/**
* Visits a map access expression. Rewrites the tree by replacing the {@link FieldAccessExpr} with an
* {@link ArrayMapAccessExpr}. Then revisits the rewritten branch, and check for semantic.
*
* @param parentExpr Current expression
* @param varRefExpr VariableRefExpression of the current expression
* @param fieldExpr Field access expression of the current expression
* @param enclosingScope Enclosing scope
*/
private void visitMapAccessExpr(FieldAccessExpr parentExpr, ReferenceExpr varRefExpr, FieldAccessExpr fieldExpr,
SymbolScope enclosingScope) {
Expression fieldVar = fieldExpr.getVarRef();
if (fieldExpr.getFieldExpr() != null) {
BLangExceptionHelper.throwSemanticError(fieldExpr, SemanticErrors.INDEXING_NOT_SUPPORTED_FOR_MAP_ELEMENT,
BTypes.typeAny);
}
Expression indexExpr[] = new Expression[]{fieldVar};
ArrayMapAccessExpr.ArrayMapAccessExprBuilder builder = new ArrayMapAccessExpr.ArrayMapAccessExprBuilder();
builder.setVarName(varRefExpr.getVarName());
builder.setPkgName(varRefExpr.getPkgName());
builder.setPkgPath(varRefExpr.getPkgPath());
builder.setIndexExprs(indexExpr);
builder.setArrayMapVarRefExpr(varRefExpr);
builder.setNodeLocation(fieldExpr.getNodeLocation());
ArrayMapAccessExpr accessExpr = builder.build();
parentExpr.setFieldExpr(fieldExpr.getFieldExpr());
parentExpr.setVarRef(accessExpr);
accessExpr.setLHSExpr(parentExpr.isLHSExpr());
visitField(parentExpr, enclosingScope);
}
/**
* Visits an array access expression. Rewrites the tree by replacing the {@link FieldAccessExpr} with an
* {@link ArrayMapAccessExpr}. Then revisits the rewritten branch, and check for semantic.
*
* @param parentExpr Current expression
* @param varRefExpr VariableRefExpression of the current expression
* @param fieldExpr Field access expression of the current expression
* @param exprType Type to which the expression evaluates
* @param enclosingScope Enclosing scope
*/
private void visitArrayAccessExpr(FieldAccessExpr parentExpr, ReferenceExpr varRefExpr, FieldAccessExpr fieldExpr,
BType exprType, SymbolScope enclosingScope) {
int dimensions = ((BArrayType) exprType).getDimensions();
List<Expression> indexExprs = new ArrayList<Expression>();
for (int i = 0; i < dimensions; i++) {
if (fieldExpr == null) {
break;
}
indexExprs.add(fieldExpr.getVarRef());
fieldExpr = (FieldAccessExpr) fieldExpr.getFieldExpr();
}
Collections.reverse(indexExprs);
ArrayMapAccessExpr.ArrayMapAccessExprBuilder builder = new ArrayMapAccessExpr.ArrayMapAccessExprBuilder();
builder.setVarName(varRefExpr.getVarName());
builder.setPkgName(varRefExpr.getPkgName());
builder.setPkgPath(varRefExpr.getPkgPath());
builder.setIndexExprs(indexExprs.toArray(new Expression[0]));
builder.setArrayMapVarRefExpr(varRefExpr);
builder.setNodeLocation(parentExpr.getNodeLocation());
ArrayMapAccessExpr accessExpr = builder.build();
parentExpr.setFieldExpr(fieldExpr);
parentExpr.setVarRef(accessExpr);
accessExpr.setLHSExpr(parentExpr.isLHSExpr());
visitField(parentExpr, enclosingScope);
}
private void linkTypeMapper(TypeCastExpression typeCastExpression, BType sourceType, BType targetType) {
TypeEdge newEdge = null;
TypeMapper typeMapper;
newEdge = packageTypeLattice.getEdgeFromTypes(sourceType, targetType, currentPkg);
if (newEdge != null) {
typeMapper = newEdge.getTypeMapper();
if (typeMapper != null) {
typeCastExpression.setCallableUnit(typeMapper);
}
} else {
newEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, currentPkg);
if (newEdge != null) {
typeMapper = newEdge.getTypeMapper();
if (typeMapper != null) {
typeCastExpression.setCallableUnit(typeMapper);
}
} else {
newEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, null);
if (newEdge != null) {
typeMapper = newEdge.getTypeMapper();
if (typeMapper != null) {
typeCastExpression.setCallableUnit(typeMapper);
}
} else {
String pkgPath = typeCastExpression.getPackagePath();
Expression[] exprs = typeCastExpression.getArgExprs();
BType[] paramTypes = new BType[exprs.length];
for (int i = 0; i < exprs.length; i++) {
paramTypes[i] = exprs[i].getType();
}
SymbolName symbolName = LangModelUtils.getTypeMapperSymName(pkgPath,
sourceType, targetType);
BLangSymbol typeMapperSymbol = currentScope.resolve(symbolName);
if (typeMapperSymbol == null) {
BLangExceptionHelper.throwSemanticError(typeCastExpression,
SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType);
}
if (typeMapperSymbol instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) typeMapperSymbol).load();
SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames();
BType[] returnTypes = new BType[returnParamTypeNames.length];
for (int i = 0; i < returnParamTypeNames.length; i++) {
SimpleTypeName typeName = returnParamTypeNames[i];
BType bType = BTypes.resolveType(typeName, currentScope,
typeCastExpression.getNodeLocation());
returnTypes[i] = bType;
}
if (!(nativeUnit instanceof TypeMapper)) {
BLangExceptionHelper.throwSemanticError(typeCastExpression,
SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName);
}
typeMapper = (TypeMapper) nativeUnit;
typeMapper.setReturnParamTypes(returnTypes);
} else {
if (!(typeMapperSymbol instanceof TypeMapper)) {
BLangExceptionHelper.throwSemanticError(typeCastExpression,
SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName);
}
typeMapper = (TypeMapper) typeMapperSymbol;
}
if (typeMapper != null) {
typeMapper.setParameterTypes(paramTypes);
typeCastExpression.setCallableUnit(typeMapper);
} else {
BLangExceptionHelper.throwSemanticError(typeCastExpression,
SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType);
}
}
}
}
}
private TypeCastExpression checkWideningPossible(BType lhsType, Expression rhsExpr) {
BType rhsType = rhsExpr.getType();
if (rhsType == null && rhsExpr instanceof TypeCastExpression) {
rhsType = BTypes.resolveType(((TypeCastExpression) rhsExpr).getTypeName(), currentScope, null);
}
TypeCastExpression newExpr = null;
TypeEdge newEdge;
newEdge = TypeLattice.getImplicitCastLattice().getEdgeFromTypes(rhsType, lhsType, null);
if (newEdge != null) {
newExpr = new TypeCastExpression(rhsExpr.getNodeLocation(), rhsExpr, lhsType);
newExpr.setEvalFunc(newEdge.getTypeMapperFunction());
}
return newExpr;
}
private void setMemoryLocation(VariableDef variableDef) {
if (currentScope.getScopeName() == SymbolScope.ScopeName.LOCAL) {
if (currentScope.getEnclosingScope().getScopeName() == SymbolScope.ScopeName.WORKER) {
variableDef.setMemoryLocation(new WorkerVarLocation(++workerMemAddrOffset));
} else {
variableDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
}
} else if (currentScope.getScopeName() == SymbolScope.ScopeName.SERVICE) {
variableDef.setMemoryLocation(new ServiceVarLocation(++staticMemAddrOffset));
} else if (currentScope.getScopeName() == SymbolScope.ScopeName.CONNECTOR) {
variableDef.setMemoryLocation(new ConnectorVarLocation(++connectorMemAddrOffset));
} else if (currentScope.getScopeName() == SymbolScope.ScopeName.STRUCT) {
variableDef.setMemoryLocation(new StructVarLocation(++structMemAddrOffset));
} else if (currentScope.getScopeName() == SymbolScope.ScopeName.PACKAGE) {
variableDef.setMemoryLocation(new GlobalVarLocation(++staticMemAddrOffset));
}
}
private void defineFunctions(Function[] functions) {
for (Function function : functions) {
ParameterDef[] paramDefArray = function.getParameterDefs();
BType[] paramTypes = new BType[paramDefArray.length];
for (int i = 0; i < paramDefArray.length; i++) {
ParameterDef paramDef = paramDefArray[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
paramTypes[i] = bType;
}
function.setParameterTypes(paramTypes);
FunctionSymbolName symbolName = LangModelUtils.getFuncSymNameWithParams(function.getName(),
null, paramTypes);
function.setSymbolName(symbolName);
BLangSymbol functionSymbol = currentScope.resolve(symbolName);
if (function.isNative() && functionSymbol == null) {
BLangExceptionHelper.throwSemanticError(function,
SemanticErrors.UNDEFINED_FUNCTION, function.getName());
}
if (!function.isNative()) {
if (functionSymbol != null) {
BLangExceptionHelper.throwSemanticError(function,
SemanticErrors.REDECLARED_SYMBOL, function.getName());
}
currentScope.define(symbolName, function);
}
ParameterDef[] returnParameters = function.getReturnParameters();
BType[] returnTypes = new BType[returnParameters.length];
for (int i = 0; i < returnParameters.length; i++) {
ParameterDef paramDef = returnParameters[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
returnTypes[i] = bType;
}
function.setReturnParamTypes(returnTypes);
}
}
private void defineTypeMappers(TypeMapper[] typeMappers) {
for (TypeMapper typeMapper : typeMappers) {
NodeLocation location = typeMapper.getNodeLocation();
SimpleTypeName sourceType = typeMapper.getParameterDefs()[0].getTypeName();
BType sourceBType = BTypes.resolveType(sourceType, currentScope, location);
typeMapper.setParameterTypes(new BType[] { sourceBType });
SimpleTypeName targetType = typeMapper.getReturnParameters()[0].getTypeName();
BType targetBType = BTypes.resolveType(targetType, currentScope, location);
TypeVertex sourceV = new TypeVertex(sourceBType);
TypeVertex targetV = new TypeVertex(targetBType);
typeMapper.setReturnParamTypes(new BType[] { targetBType });
SymbolName symbolName = LangModelUtils
.getTypeMapperSymName(typeMapper.getPackagePath(), sourceBType, targetBType);
typeMapper.setSymbolName(symbolName);
BLangSymbol typConvertorSymbol = currentScope.resolve(symbolName);
if (typeMapper.isNative() && typConvertorSymbol == null) {
BLangExceptionHelper
.throwSemanticError(typeMapper, SemanticErrors.UNDEFINED_TYPE_MAPPER, typeMapper.getName());
}
if (!typeMapper.isNative()) {
if (typConvertorSymbol != null) {
BLangExceptionHelper
.throwSemanticError(typeMapper, SemanticErrors.REDECLARED_SYMBOL, typeMapper.getName());
}
currentScope.define(symbolName, typeMapper);
}
packageTypeLattice.addVertex(sourceV, true);
packageTypeLattice.addVertex(targetV, true);
packageTypeLattice.addEdge(sourceV, targetV, typeMapper,
typeMapper.getPackagePath() != null ? typeMapper.getPackagePath() : ".");
}
}
private void defineConnectors(BallerinaConnectorDef[] connectorDefArray) {
for (BallerinaConnectorDef connectorDef : connectorDefArray) {
String connectorName = connectorDef.getName();
SymbolName connectorSymbolName = new SymbolName(connectorName);
BLangSymbol connectorSymbol = currentScope.resolve(connectorSymbolName);
if (connectorDef.isNative() && connectorSymbol == null) {
BLangExceptionHelper.throwSemanticError(connectorDef,
SemanticErrors.UNDEFINED_CONNECTOR, connectorDef.getName());
}
if (!connectorDef.isNative()) {
if (connectorSymbol != null) {
BLangExceptionHelper.throwSemanticError(connectorDef,
SemanticErrors.REDECLARED_SYMBOL, connectorName);
}
currentScope.define(connectorSymbolName, connectorDef);
}
BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(
connectorDef.getNodeLocation(), connectorDef);
for (VariableDefStmt variableDefStmt : connectorDef.getVariableDefStmts()) {
blockStmtBuilder.addStmt(variableDefStmt);
}
BallerinaFunction.BallerinaFunctionBuilder functionBuilder =
new BallerinaFunction.BallerinaFunctionBuilder(connectorDef);
functionBuilder.setNodeLocation(connectorDef.getNodeLocation());
functionBuilder.setName(connectorName + ".<init>");
functionBuilder.setPkgPath(connectorDef.getPackagePath());
functionBuilder.setBody(blockStmtBuilder.build());
connectorDef.setInitFunction(functionBuilder.buildFunction());
}
for (BallerinaConnectorDef connectorDef : connectorDefArray) {
openScope(connectorDef);
for (BallerinaAction bAction : connectorDef.getActions()) {
bAction.setConnectorDef(connectorDef);
defineAction(bAction, connectorDef);
}
closeScope();
}
}
private void defineAction(BallerinaAction action, BallerinaConnectorDef connectorDef) {
ParameterDef[] paramDefArray = action.getParameterDefs();
BType[] paramTypes = new BType[paramDefArray.length];
for (int i = 0; i < paramDefArray.length; i++) {
ParameterDef paramDef = paramDefArray[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
paramTypes[i] = bType;
}
action.setParameterTypes(paramTypes);
SymbolName symbolName = LangModelUtils.getActionSymName(action.getName(), connectorDef.getName(),
action.getPackagePath(), paramTypes);
action.setSymbolName(symbolName);
BLangSymbol actionSymbol = currentScope.resolve(symbolName);
if (action.isNative()) {
AbstractNativeConnector connector = (AbstractNativeConnector) BTypes
.resolveType(new SimpleTypeName(connectorDef.getName()),
currentScope, connectorDef.getNodeLocation());
actionSymbol = connector.resolve(symbolName);
if (actionSymbol == null) {
BLangExceptionHelper.throwSemanticError(connectorDef,
SemanticErrors.UNDEFINED_ACTION_IN_CONNECTOR, action.getName(), connectorDef.getName());
}
} else {
if (actionSymbol != null) {
BLangExceptionHelper.throwSemanticError(action, SemanticErrors.REDECLARED_SYMBOL, action.getName());
}
currentScope.define(symbolName, action);
}
ParameterDef[] returnParameters = action.getReturnParameters();
BType[] returnTypes = new BType[returnParameters.length];
for (int i = 0; i < returnParameters.length; i++) {
ParameterDef paramDef = returnParameters[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
returnTypes[i] = bType;
}
action.setReturnParamTypes(returnTypes);
}
private void defineServices(Service[] services) {
for (Service service : services) {
if (currentScope.resolve(service.getSymbolName()) != null) {
BLangExceptionHelper.throwSemanticError(service, SemanticErrors.REDECLARED_SYMBOL, service.getName());
}
currentScope.define(service.getSymbolName(), service);
BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(
service.getNodeLocation(), service);
for (VariableDefStmt variableDefStmt : service.getVariableDefStmts()) {
blockStmtBuilder.addStmt(variableDefStmt);
}
BallerinaFunction.BallerinaFunctionBuilder functionBuilder =
new BallerinaFunction.BallerinaFunctionBuilder(service);
functionBuilder.setNodeLocation(service.getNodeLocation());
functionBuilder.setName(service.getName() + ".<init>");
functionBuilder.setPkgPath(service.getPackagePath());
functionBuilder.setBody(blockStmtBuilder.build());
service.setInitFunction(functionBuilder.buildFunction());
openScope(service);
for (Resource resource : service.getResources()) {
defineResource(resource, service);
}
closeScope();
}
}
private void defineResource(Resource resource, Service service) {
ParameterDef[] paramDefArray = resource.getParameterDefs();
BType[] paramTypes = new BType[paramDefArray.length];
for (int i = 0; i < paramDefArray.length; i++) {
ParameterDef paramDef = paramDefArray[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
paramTypes[i] = bType;
}
resource.setParameterTypes(paramTypes);
SymbolName symbolName = LangModelUtils.getActionSymName(resource.getName(), service.getName(),
resource.getPackagePath(), paramTypes);
resource.setSymbolName(symbolName);
if (currentScope.resolve(symbolName) != null) {
BLangExceptionHelper.throwSemanticError(resource, SemanticErrors.REDECLARED_SYMBOL, resource.getName());
}
currentScope.define(symbolName, resource);
}
private void defineStructs(StructDef[] structDefs) {
for (StructDef structDef : structDefs) {
SymbolName symbolName = new SymbolName(structDef.getName());
if (currentScope.resolve(symbolName) != null) {
BLangExceptionHelper.throwSemanticError(structDef,
SemanticErrors.REDECLARED_SYMBOL, structDef.getName());
}
currentScope.define(symbolName, structDef);
BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(
structDef.getNodeLocation(), structDef);
for (VariableDefStmt variableDefStmt : structDef.getFieldDefStmts()) {
blockStmtBuilder.addStmt(variableDefStmt);
}
BallerinaFunction.BallerinaFunctionBuilder functionBuilder =
new BallerinaFunction.BallerinaFunctionBuilder(structDef);
functionBuilder.setNodeLocation(structDef.getNodeLocation());
functionBuilder.setName(structDef + ".<init>");
functionBuilder.setPkgPath(structDef.getPackagePath());
functionBuilder.setBody(blockStmtBuilder.build());
structDef.setInitFunction(functionBuilder.buildFunction());
}
for (StructDef structDef : structDefs) {
SymbolScope tmpScope = currentScope;
currentScope = structDef;
for (VariableDefStmt fieldDefStmt : structDef.getFieldDefStmts()) {
fieldDefStmt.accept(this);
}
structDef.setStructMemorySize(structMemAddrOffset + 1);
structMemAddrOffset = -1;
currentScope = tmpScope;
}
for (StructDef structDef : structDefs) {
TypeLattice.addStructEdges(structDef, currentScope);
}
}
/**
* Add the annotation definitions to the current scope.
*
* @param annotationDefs Annotations definitions list
*/
private void defineAnnotations(AnnotationDef[] annotationDefs) {
for (AnnotationDef annotationDef : annotationDefs) {
SymbolName symbolName = new SymbolName(annotationDef.getName());
if (currentScope.resolve(symbolName) != null) {
BLangExceptionHelper.throwSemanticError(annotationDef,
SemanticErrors.REDECLARED_SYMBOL, annotationDef.getSymbolName().getName());
}
currentScope.define(symbolName, annotationDef);
}
}
private void resolveStructFieldTypes(StructDef[] structDefs) {
for (StructDef structDef : structDefs) {
for (VariableDefStmt fieldDefStmt : structDef.getFieldDefStmts()) {
VariableDef fieldDef = fieldDefStmt.getVariableDef();
BType fieldType = BTypes.resolveType(fieldDef.getTypeName(), currentScope,
fieldDef.getNodeLocation());
fieldDef.setType(fieldType);
}
}
}
private void checkUnreachableStmt(Statement[] stmts, int stmtIndex) {
if (stmts.length > stmtIndex) {
if (stmts[stmtIndex] instanceof CommentStmt) {
checkUnreachableStmt(stmts, ++stmtIndex);
} else {
BLangExceptionHelper.throwSemanticError(stmts[stmtIndex], SemanticErrors.UNREACHABLE_STATEMENT);
}
}
}
/**
* Recursively visits a nested init expression. Reconstruct the init expression with the
* specific init expression type, and replaces the generic {@link RefTypeInitExpr}.
*
* @param fieldType Type of the current field
* @return reconstructed nested init expression
*/
private RefTypeInitExpr getNestedInitExpr(Expression expr, BType fieldType) {
RefTypeInitExpr refTypeInitExpr = (RefTypeInitExpr) expr;
if (refTypeInitExpr instanceof ArrayInitExpr) {
if (fieldType == BTypes.typeAny || fieldType == BTypes.typeMap) {
fieldType = BTypes.resolveType(new SimpleTypeName(BTypes.typeAny.getName(), true, 1), currentScope,
expr.getNodeLocation());
} else if (fieldType == BTypes.typeJSON) {
refTypeInitExpr = new JSONArrayInitExpr(refTypeInitExpr.getNodeLocation(),
refTypeInitExpr.getArgExprs());
}
} else if (!(refTypeInitExpr instanceof BacktickExpr)) {
if (fieldType == BTypes.typeAny) {
fieldType = BTypes.typeMap;
}
if (fieldType == BTypes.typeMap) {
refTypeInitExpr = new MapInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getArgExprs());
} else if (fieldType == BTypes.typeJSON) {
refTypeInitExpr = new JSONInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getArgExprs());
} else if (fieldType instanceof StructDef) {
refTypeInitExpr = new StructInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getArgExprs());
}
}
refTypeInitExpr.setInheritedType(fieldType);
return refTypeInitExpr;
}
/**
* Visit and validate map/json initialize expression.
*
* @param initExpr Expression to visit.
*/
private void visitMapJsonInitExpr(RefTypeInitExpr initExpr) {
BType inheritedType = initExpr.getInheritedType();
initExpr.setType(inheritedType);
Expression[] argExprs = initExpr.getArgExprs();
for (int i = 0; i < argExprs.length; i++) {
Expression argExpr = argExprs[i];
KeyValueExpr keyValueExpr = (KeyValueExpr) argExpr;
Expression keyExpr = keyValueExpr.getKeyExpr();
if (keyExpr instanceof VariableRefExpr) {
BString key = new BString(((VariableRefExpr) keyExpr).getVarName());
keyExpr = new BasicLiteral(keyExpr.getNodeLocation(), new SimpleTypeName(TypeConstants.STRING_TNAME),
key);
keyValueExpr.setKeyExpr(keyExpr);
}
visitSingleValueExpr(keyExpr);
Expression valueExpr = keyValueExpr.getValueExpr();
if (valueExpr instanceof RefTypeInitExpr) {
valueExpr = getNestedInitExpr(valueExpr, inheritedType);
keyValueExpr.setValueExpr(valueExpr);
}
valueExpr.accept(this);
if (inheritedType == BTypes.typeMap) {
continue;
}
BType valueType = valueExpr.getType();
if (BTypes.isValueType(valueType) || NativeCastMapper.isCompatible(BTypes.typeJSON, valueType)) {
continue;
}
TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, valueExpr);
if (typeCastExpr == null) {
BLangExceptionHelper.throwSemanticError(initExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT,
valueExpr.getType(), BTypes.typeJSON);
}
argExprs[i] = typeCastExpr;
}
}
private void addDependentPkgInitCalls(List<BallerinaFunction> initFunctionList,
BlockStmt.BlockStmtBuilder blockStmtBuilder, NodeLocation initFuncLocation) {
for (BallerinaFunction initFunc : initFunctionList) {
FunctionInvocationExpr funcIExpr = new FunctionInvocationExpr(initFuncLocation, initFunc.getName(),
null,
initFunc.getPackagePath(), new Expression[] {});
funcIExpr.setCallableUnit(initFunc);
FunctionInvocationStmt funcIStmt = new FunctionInvocationStmt(initFuncLocation, funcIExpr);
blockStmtBuilder.addStmt(funcIStmt);
}
}
}
|
class SemanticAnalyzer implements NodeVisitor {
private int stackFrameOffset = -1;
private int staticMemAddrOffset = -1;
private int connectorMemAddrOffset = -1;
private int structMemAddrOffset = -1;
private int workerMemAddrOffset = -1;
private String currentPkg;
private TypeLattice packageTypeLattice;
private CallableUnit currentCallableUnit = null;
private CallableUnit parentCallableUnit = null;
private static final String patternString = "\\$\\{((\\w+)(\\[(\\d+|\\\"(\\w+)\\\")\\])?)\\}";
private static final Pattern compiledPattern = Pattern.compile(patternString);
private int whileStmtCount = 0;
private SymbolScope currentScope;
private SymbolScope nativeScope;
private BlockStmt.BlockStmtBuilder pkgInitFuncStmtBuilder;
public SemanticAnalyzer(BLangProgram programScope) {
currentScope = programScope;
this.nativeScope = programScope.getNativeScope();
}
@Override
public void visit(BLangProgram bLangProgram) {
BLangPackage mainPkg = bLangProgram.getMainPackage();
if (bLangProgram.getProgramCategory() == BLangProgram.Category.MAIN_PROGRAM) {
mainPkg.accept(this);
} else if (bLangProgram.getProgramCategory() == BLangProgram.Category.SERVICE_PROGRAM) {
BLangPackage[] servicePackages = bLangProgram.getServicePackages();
for (BLangPackage servicePkg : servicePackages) {
servicePkg.accept(this);
}
} else {
BLangPackage[] libraryPackages = bLangProgram.getLibraryPackages();
for (BLangPackage libraryPkg : libraryPackages) {
libraryPkg.accept(this);
}
}
int setSizeOfStaticMem = staticMemAddrOffset + 1;
bLangProgram.setSizeOfStaticMem(setSizeOfStaticMem);
staticMemAddrOffset = -1;
}
@Override
public void visit(BLangPackage bLangPackage) {
BLangPackage[] dependentPackages = bLangPackage.getDependentPackages();
List<BallerinaFunction> initFunctionList = new ArrayList<>();
for (int i = 0; i < dependentPackages.length; i++) {
BLangPackage dependentPkg = dependentPackages[i];
if (dependentPkg.isSymbolsDefined()) {
continue;
}
dependentPkg.accept(this);
initFunctionList.add(dependentPkg.getInitFunction());
}
currentScope = bLangPackage;
currentPkg = bLangPackage.getPackagePath();
if (packageTypeLattice != null) {
TypeLattice currentLattice = bLangPackage.getTypeLattice();
currentLattice.merge(packageTypeLattice, currentPkg);
packageTypeLattice = currentLattice;
} else {
packageTypeLattice = bLangPackage.getTypeLattice();
}
NodeLocation pkgLocation = bLangPackage.getNodeLocation();
if (pkgLocation == null) {
BallerinaFile[] ballerinaFiles = bLangPackage.getBallerinaFiles();
String filename = ballerinaFiles.length == 0 ? "" :
ballerinaFiles[0].getFileName();
pkgLocation = new NodeLocation("", filename, 0);
}
BallerinaFunction.BallerinaFunctionBuilder functionBuilder =
new BallerinaFunction.BallerinaFunctionBuilder(bLangPackage);
functionBuilder.setNodeLocation(pkgLocation);
functionBuilder.setIdentifier(new Identifier(bLangPackage.getPackagePath() + ".<init>"));
functionBuilder.setPkgPath(bLangPackage.getPackagePath());
pkgInitFuncStmtBuilder = new BlockStmt.BlockStmtBuilder(bLangPackage.getNodeLocation(),
bLangPackage);
addDependentPkgInitCalls(initFunctionList, pkgInitFuncStmtBuilder, pkgLocation);
defineStructs(bLangPackage.getStructDefs());
defineConnectors(bLangPackage.getConnectors());
resolveStructFieldTypes(bLangPackage.getStructDefs());
defineFunctions(bLangPackage.getFunctions());
defineTypeMappers(bLangPackage.getTypeMappers());
defineServices(bLangPackage.getServices());
defineAnnotations(bLangPackage.getAnnotationDefs());
for (CompilationUnit compilationUnit : bLangPackage.getCompilationUnits()) {
compilationUnit.accept(this);
}
ReturnStmt returnStmt = new ReturnStmt(pkgLocation, new Expression[0]);
pkgInitFuncStmtBuilder.addStmt(returnStmt);
functionBuilder.setBody(pkgInitFuncStmtBuilder.build());
BallerinaFunction initFunction = functionBuilder.buildFunction();
initFunction.setReturnParamTypes(new BType[0]);
bLangPackage.setInitFunction(initFunction);
bLangPackage.setSymbolsDefined(true);
}
@Override
public void visit(BallerinaFile bFile) {
}
@Override
public void visit(ImportPackage importPkg) {
}
@Override
public void visit(ConstDef constDef) {
SimpleTypeName typeName = constDef.getTypeName();
BType bType = BTypes.resolveType(typeName, currentScope, constDef.getNodeLocation());
constDef.setType(bType);
if (!BTypes.isValueType(bType)) {
BLangExceptionHelper.throwSemanticError(constDef, SemanticErrors.INVALID_TYPE, typeName);
}
SymbolName symbolName = new SymbolName(constDef.getName(), currentPkg);
if (currentScope.resolve(symbolName) != null) {
BLangExceptionHelper.throwSemanticError(constDef,
SemanticErrors.REDECLARED_SYMBOL, constDef.getName());
}
currentScope.define(symbolName, constDef);
for (AnnotationAttachment annotationAttachment : constDef.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.CONSTANT);
annotationAttachment.accept(this);
}
ConstantLocation memLocation = new ConstantLocation(++staticMemAddrOffset);
constDef.setMemoryLocation(memLocation);
VariableRefExpr varRefExpr = new VariableRefExpr(constDef.getNodeLocation(), constDef.getName());
varRefExpr.setVariableDef(constDef);
VariableDefStmt varDefStmt = new VariableDefStmt(constDef.getNodeLocation(),
constDef, varRefExpr, constDef.getRhsExpr());
pkgInitFuncStmtBuilder.addStmt(varDefStmt);
}
@Override
public void visit(GlobalVariableDef globalVarDef) {
VariableDefStmt variableDefStmt = globalVarDef.getVariableDefStmt();
variableDefStmt.accept(this);
if (variableDefStmt.getRExpr() != null) {
AssignStmt assignStmt = new AssignStmt(variableDefStmt.getNodeLocation(),
new Expression[]{variableDefStmt.getLExpr()}, variableDefStmt.getRExpr());
pkgInitFuncStmtBuilder.addStmt(assignStmt);
}
}
@Override
public void visit(Service service) {
openScope(service);
for (AnnotationAttachment annotationAttachment : service.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.SERVICE);
annotationAttachment.accept(this);
}
for (VariableDefStmt variableDefStmt : service.getVariableDefStmts()) {
variableDefStmt.accept(this);
}
for (Resource resource : service.getResources()) {
resource.accept(this);
}
closeScope();
}
@Override
public void visit(BallerinaConnectorDef connector) {
openScope(connector);
for (AnnotationAttachment annotationAttachment : connector.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.CONNECTOR);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : connector.getParameterDefs()) {
parameterDef.setMemoryLocation(new ConnectorVarLocation(++connectorMemAddrOffset));
parameterDef.accept(this);
}
for (VariableDefStmt variableDefStmt : connector.getVariableDefStmts()) {
variableDefStmt.accept(this);
}
for (BallerinaAction action : connector.getActions()) {
action.accept(this);
}
int sizeOfConnectorMem = connectorMemAddrOffset + 1;
connector.setSizeOfConnectorMem(sizeOfConnectorMem);
connectorMemAddrOffset = -1;
closeScope();
}
@Override
public void visit(Resource resource) {
openScope(resource);
currentCallableUnit = resource;
for (AnnotationAttachment annotationAttachment : resource.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.RESOURCE);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : resource.getParameterDefs()) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameterDef.accept(this);
}
for (Worker worker : resource.getWorkers()) {
visit(worker);
addWorkerSymbol(worker);
}
BlockStmt blockStmt = resource.getResourceBody();
blockStmt.accept(this);
int sizeOfStackFrame = stackFrameOffset + 1;
resource.setStackFrameSize(sizeOfStackFrame);
stackFrameOffset = -1;
currentCallableUnit = null;
closeScope();
}
@Override
public void visit(BallerinaFunction function) {
openScope(function);
currentCallableUnit = function;
for (AnnotationAttachment annotationAttachment : function.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.FUNCTION);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : function.getParameterDefs()) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameterDef.accept(this);
}
for (ParameterDef parameterDef : function.getReturnParameters()) {
if (parameterDef.getName() != null) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
}
parameterDef.accept(this);
}
if (!function.isNative()) {
for (Worker worker : function.getWorkers()) {
worker.accept(this);
addWorkerSymbol(worker);
}
BlockStmt blockStmt = function.getCallableUnitBody();
blockStmt.accept(this);
if (function.getReturnParameters().length > 0 && !blockStmt.isAlwaysReturns()) {
BLangExceptionHelper.throwSemanticError(function, SemanticErrors.MISSING_RETURN_STATEMENT);
}
}
int sizeOfStackFrame = stackFrameOffset + 1;
function.setStackFrameSize(sizeOfStackFrame);
stackFrameOffset = -1;
currentCallableUnit = null;
closeScope();
}
@Override
public void visit(BTypeMapper typeMapper) {
openScope(typeMapper);
currentCallableUnit = typeMapper;
for (AnnotationAttachment annotationAttachment : typeMapper.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.TYPEMAPPER);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : typeMapper.getParameterDefs()) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameterDef.accept(this);
}
for (ParameterDef parameterDef : typeMapper.getReturnParameters()) {
if (parameterDef.getName() != null) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
}
parameterDef.accept(this);
}
if (!typeMapper.isNative()) {
BlockStmt blockStmt = typeMapper.getCallableUnitBody();
currentScope = blockStmt;
blockStmt.accept(this);
currentScope = blockStmt.getEnclosingScope();
}
int sizeOfStackFrame = stackFrameOffset + 1;
typeMapper.setStackFrameSize(sizeOfStackFrame);
stackFrameOffset = -1;
currentCallableUnit = null;
closeScope();
}
@Override
public void visit(BallerinaAction action) {
openScope(action);
currentCallableUnit = action;
for (AnnotationAttachment annotationAttachment : action.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.ACTION);
annotationAttachment.accept(this);
}
for (ParameterDef parameterDef : action.getParameterDefs()) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameterDef.accept(this);
}
ParameterDef firstParamDef = action.getParameterDefs()[0];
if (firstParamDef.getType() != action.getConnectorDef()) {
BLangExceptionHelper.throwSemanticError(action, SemanticErrors.INCOMPATIBLE_TYPES,
action.getConnectorDef(), firstParamDef.getType());
}
for (ParameterDef parameterDef : action.getReturnParameters()) {
if (parameterDef.getName() != null) {
parameterDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
}
parameterDef.accept(this);
}
if (!action.isNative()) {
for (Worker worker : action.getWorkers()) {
worker.accept(this);
addWorkerSymbol(worker);
}
BlockStmt blockStmt = action.getCallableUnitBody();
blockStmt.accept(this);
if (action.getReturnParameters().length > 0 && !blockStmt.isAlwaysReturns()) {
BLangExceptionHelper.throwSemanticError(action, SemanticErrors.MISSING_RETURN_STATEMENT);
}
}
int sizeOfStackFrame = stackFrameOffset + 1;
action.setStackFrameSize(sizeOfStackFrame);
stackFrameOffset = -1;
currentCallableUnit = null;
closeScope();
}
@Override
public void visit(Worker worker) {
SymbolScope parentScope = currentScope;
currentScope = worker;
parentCallableUnit = currentCallableUnit;
currentCallableUnit = worker;
for (ParameterDef parameterDef : worker.getParameterDefs()) {
parameterDef.setMemoryLocation(new WorkerVarLocation(++workerMemAddrOffset));
parameterDef.accept(this);
}
for (ParameterDef parameterDef : worker.getReturnParameters()) {
if (parameterDef.getName() != null) {
parameterDef.setMemoryLocation(new WorkerVarLocation(++workerMemAddrOffset));
}
parameterDef.accept(this);
}
BlockStmt blockStmt = worker.getCallableUnitBody();
blockStmt.accept(this);
int sizeOfStackFrame = workerMemAddrOffset + 1;
worker.setStackFrameSize(sizeOfStackFrame);
workerMemAddrOffset = -1;
currentCallableUnit = parentCallableUnit;
currentScope = parentScope;
}
private void addWorkerSymbol(Worker worker) {
SymbolName symbolName = worker.getSymbolName();
BLangSymbol varSymbol = currentScope.resolve(symbolName);
if (varSymbol != null) {
BLangExceptionHelper.throwSemanticError(worker,
SemanticErrors.REDECLARED_SYMBOL, worker.getName());
}
currentScope.define(symbolName, worker);
}
@Override
public void visit(StructDef structDef) {
for (AnnotationAttachment annotationAttachment : structDef.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.STRUCT);
annotationAttachment.accept(this);
}
}
@Override
public void visit(AnnotationAttachment annotation) {
AttachmentPoint attachedPoint = annotation.getAttachedPoint();
SymbolName annotationSymName = new SymbolName(annotation.getName(), annotation.getPkgPath());
BLangSymbol annotationSymbol = currentScope.resolve(annotationSymName);
if (!(annotationSymbol instanceof AnnotationDef)) {
BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.UNDEFINED_ANNOTATION,
annotationSymName);
}
AnnotationDef annotationDef = (AnnotationDef) annotationSymbol;
if (annotationDef.getAttachmentPoints() != null && annotationDef.getAttachmentPoints().length > 0) {
Optional<String> matchingAttachmentPoint = Arrays.stream(annotationDef.getAttachmentPoints())
.filter(attachmentPoint -> attachmentPoint.equals(attachedPoint.getValue()))
.findAny();
if (!matchingAttachmentPoint.isPresent()) {
BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.ANNOTATION_NOT_ALLOWED,
annotationSymName, attachedPoint);
}
}
validateAttributes(annotation, annotationDef);
populateDefaultValues(annotation, annotationDef);
}
/**
* Visit and validate attributes of an annotation attachment.
*
* @param annotation Annotation attachment to validate attributes
* @param annotationDef Definition of the annotation
*/
private void validateAttributes(AnnotationAttachment annotation, AnnotationDef annotationDef) {
annotation.getAttributeNameValuePairs().forEach((attributeName, attributeValue) -> {
BLangSymbol attributeSymbol = annotationDef.resolveMembers(new SymbolName(attributeName));
if (attributeSymbol == null || !(attributeSymbol instanceof AnnotationAttributeDef)) {
BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.NO_SUCH_ATTRIBUTE,
attributeName, annotation.getName());
}
AnnotationAttributeDef attributeDef = ((AnnotationAttributeDef) attributeSymbol);
SimpleTypeName attributeType = attributeDef.getTypeName();
SimpleTypeName valueType = attributeValue.getType();
BLangSymbol valueTypeSymbol = currentScope.resolve(valueType.getSymbolName());
BLangSymbol attributeTypeSymbol = annotationDef.resolve(new SymbolName(attributeType.getName(),
attributeType.getPackagePath()));
if (attributeType.isArrayType()) {
if (!valueType.isArrayType()) {
BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES,
attributeTypeSymbol.getSymbolName() + TypeConstants.ARRAY_TNAME,
valueTypeSymbol.getSymbolName());
}
AnnotationAttributeValue[] valuesArray = attributeValue.getValueArray();
for (AnnotationAttributeValue value : valuesArray) {
valueTypeSymbol = currentScope.resolve(value.getType().getSymbolName());
if (attributeTypeSymbol != valueTypeSymbol) {
BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES,
attributeTypeSymbol.getSymbolName(), valueTypeSymbol.getSymbolName());
}
AnnotationAttachment childAnnotation = value.getAnnotationValue();
if (childAnnotation != null && valueTypeSymbol instanceof AnnotationDef) {
validateAttributes(childAnnotation, (AnnotationDef) valueTypeSymbol);
}
}
} else {
if (valueType.isArrayType()) {
BLangExceptionHelper.throwSemanticError(attributeValue,
SemanticErrors.INCOMPATIBLE_TYPES_ARRAY_FOUND, attributeTypeSymbol.getName());
}
if (attributeTypeSymbol != valueTypeSymbol) {
BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES,
attributeTypeSymbol.getSymbolName(), valueTypeSymbol.getSymbolName());
}
AnnotationAttachment childAnnotation = attributeValue.getAnnotationValue();
if (childAnnotation != null && valueTypeSymbol instanceof AnnotationDef) {
validateAttributes(childAnnotation, (AnnotationDef) valueTypeSymbol);
}
}
});
}
/**
* Populate default values to the annotation attributes.
*
* @param annotation Annotation attachment to populate default values
* @param annotationDef Definition of the annotation corresponds to the provided annotation attachment
*/
private void populateDefaultValues(AnnotationAttachment annotation, AnnotationDef annotationDef) {
Map<String, AnnotationAttributeValue> attributeValPairs = annotation.getAttributeNameValuePairs();
for (AnnotationAttributeDef attributeDef : annotationDef.getAttributeDefs()) {
String attributeName = attributeDef.getName();
if (!attributeValPairs.containsKey(attributeName)) {
BasicLiteral defaultValue = attributeDef.getAttributeValue();
if (defaultValue != null) {
annotation.addAttributeNameValuePair(attributeName,
new AnnotationAttributeValue(defaultValue.getBValue(), defaultValue.getTypeName(), null));
}
continue;
}
AnnotationAttributeValue attributeValue = attributeValPairs.get(attributeName);
SimpleTypeName valueType = attributeValue.getType();
if (valueType.isArrayType()) {
AnnotationAttributeValue[] valuesArray = attributeValue.getValueArray();
for (AnnotationAttributeValue value : valuesArray) {
AnnotationAttachment annotationTypeVal = value.getAnnotationValue();
if (annotationTypeVal == null) {
continue;
}
SimpleTypeName attributeType = attributeDef.getTypeName();
BLangSymbol attributeTypeSymbol = annotationDef.resolve(
new SymbolName(attributeType.getName(), attributeType.getPackagePath()));
if (attributeTypeSymbol instanceof AnnotationDef) {
populateDefaultValues(annotationTypeVal, (AnnotationDef) attributeTypeSymbol);
}
}
} else {
AnnotationAttachment annotationTypeVal = attributeValue.getAnnotationValue();
if (annotationTypeVal == null) {
continue;
}
BLangSymbol attributeTypeSymbol = annotationDef.resolve(attributeDef.getTypeName().getSymbolName());
if (attributeTypeSymbol instanceof AnnotationDef) {
populateDefaultValues(annotationTypeVal, (AnnotationDef) attributeTypeSymbol);
}
}
}
}
@Override
public void visit(AnnotationAttributeDef annotationAttributeDef) {
SimpleTypeName fieldType = annotationAttributeDef.getTypeName();
BasicLiteral fieldVal = annotationAttributeDef.getAttributeValue();
if (fieldVal != null) {
fieldVal.accept(this);
BType valueType = fieldVal.getType();
if (!BTypes.isBuiltInTypeName(fieldType.getName())) {
BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_DEFAULT_VALUE);
}
BLangSymbol typeSymbol = currentScope.resolve(fieldType.getSymbolName());
BType fieldBType = (BType) typeSymbol;
if (!BTypes.isValueType(fieldBType)) {
BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_DEFAULT_VALUE);
}
if (fieldBType != valueType) {
BLangExceptionHelper.throwSemanticError(annotationAttributeDef,
SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, fieldType, fieldVal.getTypeName());
}
} else {
BLangSymbol typeSymbol;
if (fieldType.isArrayType()) {
typeSymbol = currentScope.resolve(new SymbolName(fieldType.getName(), fieldType.getPackagePath()));
} else {
typeSymbol = currentScope.resolve(fieldType.getSymbolName());
}
if (((typeSymbol instanceof BType) && !BTypes.isValueType((BType) typeSymbol)) ||
(!(typeSymbol instanceof BType) && !(typeSymbol instanceof AnnotationDef))) {
BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_ATTRIBUTE_TYPE,
fieldType);
}
if (!(typeSymbol instanceof BType)) {
fieldType.setPkgPath(annotationAttributeDef.getPackagePath());
}
}
}
@Override
public void visit(AnnotationDef annotationDef) {
for (AnnotationAttributeDef fields : annotationDef.getAttributeDefs()) {
fields.accept(this);
}
for (AnnotationAttachment annotationAttachment : annotationDef.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.ANNOTATION);
annotationAttachment.accept(this);
}
}
@Override
public void visit(ParameterDef paramDef) {
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
if (paramDef.getAnnotations() == null) {
return;
}
for (AnnotationAttachment annotationAttachment : paramDef.getAnnotations()) {
annotationAttachment.setAttachedPoint(AttachmentPoint.PARAMETER);
annotationAttachment.accept(this);
}
}
@Override
public void visit(VariableDef varDef) {
}
@Override
public void visit(VariableDefStmt varDefStmt) {
VariableDef varDef = varDefStmt.getVariableDef();
BType varBType = BTypes.resolveType(varDef.getTypeName(), currentScope, varDef.getNodeLocation());
varDef.setType(varBType);
SymbolName symbolName = new SymbolName(varDef.getName(), currentPkg);
BLangSymbol varSymbol = currentScope.resolve(symbolName);
if (varSymbol != null && varSymbol.getSymbolScope().getScopeName() == currentScope.getScopeName()) {
BLangExceptionHelper.throwSemanticError(varDef, SemanticErrors.REDECLARED_SYMBOL, varDef.getName());
}
currentScope.define(symbolName, varDef);
setMemoryLocation(varDef);
Expression rExpr = varDefStmt.getRExpr();
if (rExpr == null) {
return;
}
if (rExpr instanceof NullLiteral) {
if (BTypes.isValueType(varBType)) {
BLangExceptionHelper.throwSemanticError(rExpr, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, rExpr.getType(),
varBType);
}
rExpr.setType(varBType);
return;
}
if (rExpr instanceof RefTypeInitExpr) {
RefTypeInitExpr refTypeInitExpr = getNestedInitExpr(rExpr, varBType);
varDefStmt.setRExpr(refTypeInitExpr);
refTypeInitExpr.accept(this);
return;
}
if (rExpr instanceof FunctionInvocationExpr || rExpr instanceof ActionInvocationExpr) {
rExpr.accept(this);
CallableUnitInvocationExpr invocationExpr = (CallableUnitInvocationExpr) rExpr;
BType[] returnTypes = invocationExpr.getTypes();
if (returnTypes.length != 1) {
BLangExceptionHelper.throwSemanticError(varDefStmt, SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, "1",
returnTypes.length);
} else if (varBType == BTypes.typeAny) {
return;
} else if ((varBType != BTypes.typeMap) && (returnTypes[0] != BTypes.typeMap) &&
(!varBType.equals(returnTypes[0]))) {
TypeCastExpression newExpr = checkWideningPossible(varBType, rExpr);
if (newExpr != null) {
newExpr.accept(this);
varDefStmt.setRExpr(newExpr);
} else {
BLangExceptionHelper.throwSemanticError(rExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT,
returnTypes[0], varBType);
}
}
return;
}
visitSingleValueExpr(rExpr);
if (varBType == BTypes.typeAny) {
return;
}
BType rType = rExpr.getType();
if (rExpr instanceof TypeCastExpression && rType == null) {
rType = BTypes.resolveType(((TypeCastExpression) rExpr).getTypeName(), currentScope, null);
}
if (!varBType.equals(rType)) {
TypeCastExpression newExpr = checkWideningPossible(varBType, rExpr);
if (newExpr != null) {
newExpr.accept(this);
varDefStmt.setRExpr(newExpr);
} else {
BLangExceptionHelper.throwSemanticError(varDefStmt, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT,
rExpr.getType().getSymbolName(), varBType.getSymbolName());
}
}
}
@Override
public void visit(AssignStmt assignStmt) {
Expression[] lExprs = assignStmt.getLExprs();
visitLExprsOfAssignment(assignStmt, lExprs);
Expression rExpr = assignStmt.getRExpr();
if (rExpr instanceof FunctionInvocationExpr || rExpr instanceof ActionInvocationExpr) {
rExpr.accept(this);
checkForMultiAssignmentErrors(assignStmt, lExprs, (CallableUnitInvocationExpr) rExpr);
return;
}
Expression lExpr = assignStmt.getLExprs()[0];
BType lExprType = lExpr.getType();
if (rExpr instanceof NullLiteral) {
if (BTypes.isValueType(lExprType)) {
BLangExceptionHelper.throwSemanticError(lExpr, SemanticErrors.INCOMPATIBLE_TYPES,
rExpr.getType(), lExpr.getType());
}
rExpr.setType(lExprType);
return;
}
if (rExpr instanceof RefTypeInitExpr) {
RefTypeInitExpr refTypeInitExpr = getNestedInitExpr(rExpr, lExprType);
assignStmt.setRExpr(refTypeInitExpr);
refTypeInitExpr.accept(this);
return;
}
visitSingleValueExpr(rExpr);
if (lExprType == BTypes.typeAny) {
return;
}
BType rType = rExpr.getType();
if (rExpr instanceof TypeCastExpression && rType == null) {
rType = BTypes.resolveType(((TypeCastExpression) rExpr).getTypeName(), currentScope, null);
}
if (!lExprType.equals(rType)) {
TypeCastExpression newExpr = checkWideningPossible(lExpr.getType(), rExpr);
if (newExpr != null) {
newExpr.accept(this);
assignStmt.setRhsExpr(newExpr);
} else {
BLangExceptionHelper.throwSemanticError(lExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT,
rExpr.getType(), lExpr.getType());
}
}
}
@Override
public void visit(BlockStmt blockStmt) {
openScope(blockStmt);
for (int stmtIndex = 0; stmtIndex < blockStmt.getStatements().length; stmtIndex++) {
Statement stmt = blockStmt.getStatements()[stmtIndex];
if (stmt instanceof BreakStmt && whileStmtCount < 1) {
BLangExceptionHelper.throwSemanticError(stmt,
SemanticErrors.BREAK_STMT_NOT_ALLOWED_HERE);
}
if (stmt instanceof BreakStmt || stmt instanceof ReplyStmt) {
checkUnreachableStmt(blockStmt.getStatements(), stmtIndex + 1);
}
stmt.accept(this);
if (stmt.isAlwaysReturns()) {
checkUnreachableStmt(blockStmt.getStatements(), stmtIndex + 1);
blockStmt.setAlwaysReturns(true);
}
}
closeScope();
}
@Override
public void visit(CommentStmt commentStmt) {
}
@Override
@Override
public void visit(WhileStmt whileStmt) {
whileStmtCount++;
Expression expr = whileStmt.getCondition();
visitSingleValueExpr(expr);
if (expr.getType() != BTypes.typeBoolean) {
BLangExceptionHelper
.throwSemanticError(whileStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, expr.getType());
}
BlockStmt blockStmt = whileStmt.getBody();
if (blockStmt.getStatements().length == 0) {
BLangExceptionHelper.throwSemanticError(blockStmt, SemanticErrors.NO_STATEMENTS_WHILE_LOOP);
}
blockStmt.accept(this);
whileStmtCount--;
}
@Override
public void visit(BreakStmt breakStmt) {
}
@Override
public void visit(TryCatchStmt tryCatchStmt) {
tryCatchStmt.getTryBlock().accept(this);
tryCatchStmt.getCatchBlock().getParameterDef().setMemoryLocation(new StackVarLocation(++stackFrameOffset));
tryCatchStmt.getCatchBlock().getParameterDef().accept(this);
tryCatchStmt.getCatchBlock().getCatchBlockStmt().accept(this);
}
@Override
public void visit(ThrowStmt throwStmt) {
throwStmt.getExpr().accept(this);
if (throwStmt.getExpr() instanceof VariableRefExpr) {
if (throwStmt.getExpr().getType() instanceof BExceptionType) {
throwStmt.setAlwaysReturns(true);
return;
}
} else {
FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) throwStmt.getExpr();
if (!funcIExpr.isMultiReturnExpr() && funcIExpr.getTypes().length > 0
&& funcIExpr.getTypes()[0] instanceof BExceptionType) {
throwStmt.setAlwaysReturns(true);
return;
}
}
throw new SemanticException(throwStmt.getNodeLocation().getFileName() + ":" +
throwStmt.getNodeLocation().getLineNumber() +
": only a variable reference of type 'exception' is allowed in throw statement");
}
@Override
public void visit(FunctionInvocationStmt functionInvocationStmt) {
functionInvocationStmt.getFunctionInvocationExpr().accept(this);
}
@Override
public void visit(ActionInvocationStmt actionInvocationStmt) {
actionInvocationStmt.getActionInvocationExpr().accept(this);
}
@Override
public void visit(WorkerInvocationStmt workerInvocationStmt) {
VariableRefExpr variableRefExpr = workerInvocationStmt.getInMsg();
variableRefExpr.accept(this);
linkWorker(workerInvocationStmt);
ParameterDef[] returnParams = workerInvocationStmt.getCallableUnit().getReturnParameters();
BType[] returnTypes = new BType[returnParams.length];
for (int i = 0; i < returnParams.length; i++) {
returnTypes[i] = returnParams[i].getType();
}
workerInvocationStmt.setTypes(returnTypes);
}
@Override
public void visit(WorkerReplyStmt workerReplyStmt) {
String workerName = workerReplyStmt.getWorkerName();
SymbolName workerSymbol = new SymbolName(workerName);
VariableRefExpr variableRefExpr = workerReplyStmt.getReceiveExpr();
variableRefExpr.accept(this);
BLangSymbol worker = currentScope.resolve(workerSymbol);
if (!(worker instanceof Worker)) {
BLangExceptionHelper.throwSemanticError(variableRefExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
workerSymbol);
}
workerReplyStmt.setWorker((Worker) worker);
}
@Override
public void visit(ForkJoinStmt forkJoinStmt) {
boolean stmtReturns = true;
openScope(forkJoinStmt);
VariableRefExpr messageReference = forkJoinStmt.getMessageReference();
messageReference.accept(this);
if (!messageReference.getType().equals(BTypes.typeMessage)) {
throw new SemanticException("Incompatible types: expected a message in " +
messageReference.getNodeLocation().getFileName() + ":" +
messageReference.getNodeLocation().getLineNumber());
}
for (Worker worker: forkJoinStmt.getWorkers()) {
worker.accept(this);
}
ForkJoinStmt.Join join = forkJoinStmt.getJoin();
openScope(join);
ParameterDef parameter = join.getJoinResult();
parameter.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
parameter.accept(this);
join.define(parameter.getSymbolName(), parameter);
if (!(parameter.getType() instanceof BArrayType &&
(((BArrayType) parameter.getType()).getElementType() == BTypes.typeMessage))) {
throw new SemanticException("Incompatible types: expected a message[] in " +
parameter.getNodeLocation().getFileName() + ":" + parameter.getNodeLocation().getLineNumber());
}
Statement joinBody = join.getJoinBlock();
joinBody.accept(this);
stmtReturns &= joinBody.isAlwaysReturns();
closeScope();
ForkJoinStmt.Timeout timeout = forkJoinStmt.getTimeout();
openScope(timeout);
Expression timeoutExpr = timeout.getTimeoutExpression();
timeoutExpr.accept(this);
ParameterDef timeoutParam = timeout.getTimeoutResult();
timeoutParam.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
timeoutParam.accept(this);
timeout.define(timeoutParam.getSymbolName(), timeoutParam);
if (!(timeoutParam.getType() instanceof BArrayType &&
(((BArrayType) timeoutParam.getType()).getElementType() == BTypes.typeMessage))) {
throw new SemanticException("Incompatible types: expected a message[] in " +
timeoutParam.getNodeLocation().getFileName() + ":" +
timeoutParam.getNodeLocation().getLineNumber());
}
Statement timeoutBody = timeout.getTimeoutBlock();
timeoutBody.accept(this);
stmtReturns &= timeoutBody.isAlwaysReturns();
closeScope();
forkJoinStmt.setAlwaysReturns(stmtReturns);
closeScope();
}
@Override
public void visit(ReplyStmt replyStmt) {
if (currentCallableUnit instanceof Function) {
BLangExceptionHelper.throwSemanticError(currentCallableUnit,
SemanticErrors.REPLY_STATEMENT_CANNOT_USED_IN_FUNCTION);
} else if (currentCallableUnit instanceof Action) {
BLangExceptionHelper.throwSemanticError(currentCallableUnit,
SemanticErrors.REPLY_STATEMENT_CANNOT_USED_IN_ACTION);
}
if (replyStmt.getReplyExpr() instanceof ActionInvocationExpr) {
BLangExceptionHelper.throwSemanticError(currentCallableUnit,
SemanticErrors.ACTION_INVOCATION_NOT_ALLOWED_IN_REPLY);
}
Expression replyExpr = replyStmt.getReplyExpr();
visitSingleValueExpr(replyExpr);
if (replyExpr.getType() != BTypes.typeMessage) {
BLangExceptionHelper.throwSemanticError(replyExpr, SemanticErrors.INCOMPATIBLE_TYPES,
BTypes.typeMessage, replyExpr.getType());
}
}
@Override
public void visit(ReturnStmt returnStmt) {
if (currentCallableUnit instanceof Resource) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.RETURN_CANNOT_USED_IN_RESOURCE);
}
Expression[] returnArgExprs = returnStmt.getExprs();
ParameterDef[] returnParamsOfCU = currentCallableUnit.getReturnParameters();
if (returnArgExprs.length == 0 && returnParamsOfCU.length == 0) {
return;
}
if (returnArgExprs.length == 0 && returnParamsOfCU[0].getName() != null) {
Expression[] returnExprs = new Expression[returnParamsOfCU.length];
for (int i = 0; i < returnParamsOfCU.length; i++) {
VariableRefExpr variableRefExpr = new VariableRefExpr(returnStmt.getNodeLocation(),
returnParamsOfCU[i].getSymbolName());
visit(variableRefExpr);
returnExprs[i] = variableRefExpr;
}
returnStmt.setExprs(returnExprs);
return;
} else if (returnArgExprs.length == 0) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN);
}
BType[] typesOfReturnExprs = new BType[returnArgExprs.length];
for (int i = 0; i < returnArgExprs.length; i++) {
Expression returnArgExpr = returnArgExprs[i];
returnArgExpr.accept(this);
typesOfReturnExprs[i] = returnArgExpr.getType();
}
if (returnArgExprs.length == 1 && returnArgExprs[0] instanceof FunctionInvocationExpr) {
FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) returnArgExprs[0];
BType[] funcIExprReturnTypes = funcIExpr.getTypes();
if (funcIExprReturnTypes.length > returnParamsOfCU.length) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.TOO_MANY_ARGUMENTS_TO_RETURN);
} else if (funcIExprReturnTypes.length < returnParamsOfCU.length) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN);
}
for (int i = 0; i < returnParamsOfCU.length; i++) {
if (returnParamsOfCU[i].getType() != BTypes.typeAny &&
!funcIExprReturnTypes[i].equals(returnParamsOfCU[i].getType())) {
BLangExceptionHelper.throwSemanticError(returnStmt,
SemanticErrors.CANNOT_USE_TYPE_IN_RETURN_STATEMENT, returnParamsOfCU[i].getType(),
funcIExprReturnTypes[i]);
}
}
return;
}
if (typesOfReturnExprs.length > returnParamsOfCU.length) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.TOO_MANY_ARGUMENTS_TO_RETURN);
} else if (typesOfReturnExprs.length < returnParamsOfCU.length) {
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN);
} else {
for (int i = 0; i < returnParamsOfCU.length; i++) {
if (returnArgExprs[i] instanceof ActionInvocationExpr) {
BLangExceptionHelper.throwSemanticError(returnStmt,
SemanticErrors.ACTION_INVOCATION_NOT_ALLOWED_IN_RETURN);
}
if (returnArgExprs[i] instanceof FunctionInvocationExpr) {
FunctionInvocationExpr funcIExpr = ((FunctionInvocationExpr) returnArgExprs[i]);
if (funcIExpr.getTypes().length > 1) {
BLangExceptionHelper.throwSemanticError(returnStmt,
SemanticErrors.MULTIPLE_VALUE_IN_SINGLE_VALUE_CONTEXT,
funcIExpr.getCallableUnit().getName());
}
}
BType targetType = returnParamsOfCU[i].getType();
if (NativeCastMapper.isCompatible(returnParamsOfCU[i].getType(), typesOfReturnExprs[i])) {
continue;
}
TypeCastExpression newExpr = checkWideningPossible(targetType, returnArgExprs[i]);
if (newExpr != null) {
newExpr.accept(this);
returnArgExprs[i] = newExpr;
continue;
}
BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.CANNOT_USE_TYPE_IN_RETURN_STATEMENT,
returnParamsOfCU[i].getType().getSymbolName(), typesOfReturnExprs[i].getSymbolName());
}
}
}
@Override
public void visit(InstanceCreationExpr instanceCreationExpr) {
visitSingleValueExpr(instanceCreationExpr);
if (BTypes.isValueType(instanceCreationExpr.getType())) {
BLangExceptionHelper.throwSemanticError(instanceCreationExpr,
SemanticErrors.CANNOT_USE_CREATE_FOR_VALUE_TYPES, instanceCreationExpr.getType());
}
}
@Override
public void visit(FunctionInvocationExpr funcIExpr) {
Expression[] exprs = funcIExpr.getArgExprs();
for (Expression expr : exprs) {
visitSingleValueExpr(expr);
}
linkFunction(funcIExpr);
BType[] returnParamTypes = funcIExpr.getCallableUnit().getReturnParamTypes();
funcIExpr.setTypes(returnParamTypes);
}
@Override
public void visit(ActionInvocationExpr actionIExpr) {
Expression[] exprs = actionIExpr.getArgExprs();
for (Expression expr : exprs) {
visitSingleValueExpr(expr);
}
linkAction(actionIExpr);
BType[] returnParamTypes = actionIExpr.getCallableUnit().getReturnParamTypes();
actionIExpr.setTypes(returnParamTypes);
}
@Override
public void visit(BasicLiteral basicLiteral) {
BType bType = BTypes.resolveType(basicLiteral.getTypeName(), currentScope, basicLiteral.getNodeLocation());
basicLiteral.setType(bType);
}
@Override
public void visit(DivideExpr divideExpr) {
BType arithmeticExprType = verifyBinaryArithmeticExprType(divideExpr);
if (arithmeticExprType == BTypes.typeInt) {
divideExpr.setEvalFunc(DivideExpr.DIV_INT_FUNC);
} else if (arithmeticExprType == BTypes.typeFloat) {
divideExpr.setEvalFunc(DivideExpr.DIV_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(divideExpr);
}
}
@Override
public void visit(ModExpression modExpression) {
BType arithmeticExprType = verifyBinaryArithmeticExprType(modExpression);
if (arithmeticExprType == BTypes.typeInt) {
modExpression.setEvalFunc(ModExpression.MOD_INT_FUNC);
} else if (arithmeticExprType == BTypes.typeFloat) {
modExpression.setEvalFunc(ModExpression.MOD_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(modExpression);
}
}
@Override
public void visit(UnaryExpression unaryExpr) {
visitSingleValueExpr(unaryExpr.getRExpr());
unaryExpr.setType(unaryExpr.getRExpr().getType());
if (Operator.SUB.equals(unaryExpr.getOperator())) {
if (unaryExpr.getType() == BTypes.typeInt) {
unaryExpr.setEvalFunc(UnaryExpression.NEGATIVE_INT_FUNC);
} else if (unaryExpr.getType() == BTypes.typeFloat) {
unaryExpr.setEvalFunc(UnaryExpression.NEGATIVE_FLOAT_FUNC);
} else {
throwInvalidUnaryOpError(unaryExpr);
}
} else if (Operator.ADD.equals(unaryExpr.getOperator())) {
if (unaryExpr.getType() == BTypes.typeInt) {
unaryExpr.setEvalFunc(UnaryExpression.POSITIVE_INT_FUNC);
} else if (unaryExpr.getType() == BTypes.typeFloat) {
unaryExpr.setEvalFunc(UnaryExpression.POSITIVE_FLOAT_FUNC);
} else {
throwInvalidUnaryOpError(unaryExpr);
}
} else if (Operator.NOT.equals(unaryExpr.getOperator())) {
if (unaryExpr.getType() == BTypes.typeBoolean) {
unaryExpr.setEvalFunc(UnaryExpression.NOT_BOOLEAN_FUNC);
} else {
throwInvalidUnaryOpError(unaryExpr);
}
} else {
BLangExceptionHelper.throwSemanticError(unaryExpr, SemanticErrors.UNKNOWN_OPERATOR_IN_UNARY,
unaryExpr.getOperator());
}
}
@Override
public void visit(AddExpression addExpr) {
BType arithmeticExprType = verifyBinaryArithmeticExprType(addExpr);
if (arithmeticExprType == BTypes.typeInt) {
addExpr.setEvalFunc(AddExpression.ADD_INT_FUNC);
} else if (arithmeticExprType == BTypes.typeFloat) {
addExpr.setEvalFunc(AddExpression.ADD_FLOAT_FUNC);
} else if (arithmeticExprType == BTypes.typeString) {
addExpr.setEvalFunc(AddExpression.ADD_STRING_FUNC);
} else {
throwInvalidBinaryOpError(addExpr);
}
}
@Override
public void visit(MultExpression multExpr) {
BType binaryExprType = verifyBinaryArithmeticExprType(multExpr);
if (binaryExprType == BTypes.typeInt) {
multExpr.setEvalFunc(MultExpression.MULT_INT_FUNC);
} else if (binaryExprType == BTypes.typeFloat) {
multExpr.setEvalFunc(MultExpression.MULT_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(multExpr);
}
}
@Override
public void visit(SubtractExpression subtractExpr) {
BType binaryExprType = verifyBinaryArithmeticExprType(subtractExpr);
if (binaryExprType == BTypes.typeInt) {
subtractExpr.setEvalFunc(SubtractExpression.SUB_INT_FUNC);
} else if (binaryExprType == BTypes.typeFloat) {
subtractExpr.setEvalFunc(SubtractExpression.SUB_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(subtractExpr);
}
}
@Override
public void visit(AndExpression andExpr) {
visitBinaryLogicalExpr(andExpr);
andExpr.setEvalFunc(AndExpression.AND_FUNC);
}
@Override
public void visit(OrExpression orExpr) {
visitBinaryLogicalExpr(orExpr);
orExpr.setEvalFunc(OrExpression.OR_FUNC);
}
@Override
public void visit(EqualExpression equalExpr) {
BType compareExprType = verifyBinaryEqualityExprType(equalExpr);
if (compareExprType == BTypes.typeInt) {
equalExpr.setEvalFunc(EqualExpression.EQUAL_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
equalExpr.setEvalFunc(EqualExpression.EQUAL_FLOAT_FUNC);
} else if (compareExprType == BTypes.typeBoolean) {
equalExpr.setEvalFunc(EqualExpression.EQUAL_BOOLEAN_FUNC);
} else if (compareExprType == BTypes.typeString) {
equalExpr.setEvalFunc(EqualExpression.EQUAL_STRING_FUNC);
} else if (compareExprType == BTypes.typeNull) {
equalExpr.setRefTypeEvalFunc(EqualExpression.EQUAL_NULL_FUNC);
} else {
throwInvalidBinaryOpError(equalExpr);
}
}
@Override
public void visit(NotEqualExpression notEqualExpr) {
BType compareExprType = verifyBinaryEqualityExprType(notEqualExpr);
if (compareExprType == BTypes.typeInt) {
notEqualExpr.setEvalFunc(NotEqualExpression.NOT_EQUAL_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
notEqualExpr.setEvalFunc(NotEqualExpression.NOT_EQUAL_FLOAT_FUNC);
} else if (compareExprType == BTypes.typeBoolean) {
notEqualExpr.setEvalFunc(NotEqualExpression.NOT_EQUAL_BOOLEAN_FUNC);
} else if (compareExprType == BTypes.typeString) {
notEqualExpr.setEvalFunc(NotEqualExpression.NOT_EQUAL_STRING_FUNC);
} else if (compareExprType == BTypes.typeNull) {
notEqualExpr.setRefTypeEvalFunc(NotEqualExpression.NOT_EQUAL_NULL_FUNC);
} else {
throwInvalidBinaryOpError(notEqualExpr);
}
}
@Override
public void visit(GreaterEqualExpression greaterEqualExpr) {
BType compareExprType = verifyBinaryCompareExprType(greaterEqualExpr);
if (compareExprType == BTypes.typeInt) {
greaterEqualExpr.setEvalFunc(GreaterEqualExpression.GREATER_EQUAL_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
greaterEqualExpr.setEvalFunc(GreaterEqualExpression.GREATER_EQUAL_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(greaterEqualExpr);
}
}
@Override
public void visit(GreaterThanExpression greaterThanExpr) {
BType compareExprType = verifyBinaryCompareExprType(greaterThanExpr);
if (compareExprType == BTypes.typeInt) {
greaterThanExpr.setEvalFunc(GreaterThanExpression.GREATER_THAN_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
greaterThanExpr.setEvalFunc(GreaterThanExpression.GREATER_THAN_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(greaterThanExpr);
}
}
@Override
public void visit(LessEqualExpression lessEqualExpr) {
BType compareExprType = verifyBinaryCompareExprType(lessEqualExpr);
if (compareExprType == BTypes.typeInt) {
lessEqualExpr.setEvalFunc(LessEqualExpression.LESS_EQUAL_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
lessEqualExpr.setEvalFunc(LessEqualExpression.LESS_EQUAL_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(lessEqualExpr);
}
}
@Override
public void visit(LessThanExpression lessThanExpr) {
BType compareExprType = verifyBinaryCompareExprType(lessThanExpr);
if (compareExprType == BTypes.typeInt) {
lessThanExpr.setEvalFunc(LessThanExpression.LESS_THAN_INT_FUNC);
} else if (compareExprType == BTypes.typeFloat) {
lessThanExpr.setEvalFunc(LessThanExpression.LESS_THAN_FLOAT_FUNC);
} else {
throwInvalidBinaryOpError(lessThanExpr);
}
}
@Override
public void visit(ArrayMapAccessExpr arrayMapAccessExpr) {
VariableRefExpr arrayMapVarRefExpr = (VariableRefExpr) arrayMapAccessExpr.getRExpr();
arrayMapVarRefExpr.accept(this);
handleArrayType(arrayMapAccessExpr);
}
@Override
public void visit(FieldAccessExpr fieldAccessExpr) {
visitField(fieldAccessExpr, currentScope);
}
@Override
public void visit(JSONFieldAccessExpr jsonFieldExpr) {
}
@Override
public void visit(RefTypeInitExpr refTypeInitExpr) {
visitMapJsonInitExpr(refTypeInitExpr);
}
@Override
public void visit(MapInitExpr mapInitExpr) {
visitMapJsonInitExpr(mapInitExpr);
}
@Override
public void visit(JSONInitExpr jsonInitExpr) {
visitMapJsonInitExpr(jsonInitExpr);
}
@Override
public void visit(JSONArrayInitExpr jsonArrayInitExpr) {
BType inheritedType = jsonArrayInitExpr.getInheritedType();
jsonArrayInitExpr.setType(inheritedType);
Expression[] argExprs = jsonArrayInitExpr.getArgExprs();
for (int i = 0; i < argExprs.length; i++) {
Expression argExpr = argExprs[i];
if (argExpr instanceof RefTypeInitExpr) {
argExpr = getNestedInitExpr(argExpr, inheritedType);
argExprs[i] = argExpr;
}
visitSingleValueExpr(argExpr);
BType argType = argExpr.getType();
if (BTypes.isValueType(argType) || NativeCastMapper.isCompatible(BTypes.typeJSON, argType)) {
continue;
}
TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, argExpr);
if (typeCastExpr == null) {
BLangExceptionHelper.throwSemanticError(jsonArrayInitExpr,
SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, argExpr.getType(), BTypes.typeJSON);
}
argExprs[i] = typeCastExpr;
}
}
@Override
public void visit(ConnectorInitExpr connectorInitExpr) {
BType inheritedType = connectorInitExpr.getInheritedType();
if (!(inheritedType instanceof BallerinaConnectorDef)) {
BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.CONNECTOR_INIT_NOT_ALLOWED);
}
connectorInitExpr.setType(inheritedType);
for (Expression argExpr : connectorInitExpr.getArgExprs()) {
visitSingleValueExpr(argExpr);
}
Expression[] argExprs = connectorInitExpr.getArgExprs();
ParameterDef[] parameterDefs = ((BallerinaConnectorDef) inheritedType).getParameterDefs();
for (int i = 0; i < argExprs.length; i++) {
SimpleTypeName simpleTypeName = parameterDefs[i].getTypeName();
BType paramType = BTypes.resolveType(simpleTypeName, currentScope, connectorInitExpr.getNodeLocation());
parameterDefs[i].setType(paramType);
Expression argExpr = argExprs[i];
if (parameterDefs[i].getType() != argExpr.getType()) {
BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.INCOMPATIBLE_TYPES,
parameterDefs[i].getType(), argExpr.getType());
}
}
}
@Override
public void visit(ArrayInitExpr arrayInitExpr) {
if (!(arrayInitExpr.getInheritedType() instanceof BArrayType)) {
BLangExceptionHelper.throwSemanticError(arrayInitExpr, SemanticErrors.ARRAY_INIT_NOT_ALLOWED_HERE);
}
visitArrayInitExpr(arrayInitExpr);
}
private void visitArrayInitExpr(ArrayInitExpr arrayInitExpr) {
BType inheritedType = arrayInitExpr.getInheritedType();
arrayInitExpr.setType(inheritedType);
Expression[] argExprs = arrayInitExpr.getArgExprs();
if (argExprs.length == 0) {
return;
}
BType expectedElementType = ((BArrayType) inheritedType).getElementType();
for (int i = 0; i < argExprs.length; i++) {
Expression argExpr = argExprs[i];
if (argExpr instanceof RefTypeInitExpr) {
((RefTypeInitExpr) argExpr).setInheritedType(expectedElementType);
argExpr = getNestedInitExpr(argExpr, expectedElementType);
argExprs[i] = argExpr;
}
visitSingleValueExpr(argExpr);
if (NativeCastMapper.isCompatible(expectedElementType, argExpr.getType())) {
continue;
}
TypeCastExpression typeCastExpr = checkWideningPossible(expectedElementType, argExpr);
if (typeCastExpr == null) {
BLangExceptionHelper.throwSemanticError(arrayInitExpr,
SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, argExpr.getType(), expectedElementType);
}
argExprs[i] = typeCastExpr;
}
}
/**
* Visit and analyze ballerina Struct initializing expression.
*/
@Override
public void visit(StructInitExpr structInitExpr) {
BType inheritedType = structInitExpr.getInheritedType();
structInitExpr.setType(inheritedType);
Expression[] argExprs = structInitExpr.getArgExprs();
if (argExprs.length == 0) {
return;
}
StructDef structDef = (StructDef) inheritedType;
for (Expression argExpr : argExprs) {
KeyValueExpr keyValueExpr = (KeyValueExpr) argExpr;
Expression keyExpr = keyValueExpr.getKeyExpr();
if (!(keyExpr instanceof VariableRefExpr)) {
BLangExceptionHelper.throwSemanticError(keyExpr, SemanticErrors.INVALID_FIELD_NAME_STRUCT_INIT);
}
VariableRefExpr varRefExpr = (VariableRefExpr) keyExpr;
BLangSymbol varDefSymbol = structDef.resolveMembers(new SymbolName(varRefExpr.getSymbolName().getName(),
structDef.getPackagePath()));
if (varDefSymbol == null) {
BLangExceptionHelper.throwSemanticError(keyExpr, SemanticErrors.UNKNOWN_FIELD_IN_STRUCT,
varRefExpr.getVarName(), structDef.getName());
}
if (!(varDefSymbol instanceof VariableDef)) {
BLangExceptionHelper.throwSemanticError(varRefExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
varDefSymbol.getSymbolName());
}
VariableDef varDef = (VariableDef) varDefSymbol;
varRefExpr.setVariableDef(varDef);
Expression valueExpr = keyValueExpr.getValueExpr();
BType structFieldType = varDef.getType();
if (valueExpr instanceof RefTypeInitExpr) {
valueExpr = getNestedInitExpr(valueExpr, structFieldType);
keyValueExpr.setValueExpr(valueExpr);
}
valueExpr.accept(this);
if (!NativeCastMapper.isCompatible(structFieldType, valueExpr.getType())) {
BLangExceptionHelper.throwSemanticError(keyExpr, SemanticErrors.INCOMPATIBLE_TYPES,
varDef.getType(), valueExpr.getType());
}
}
}
@Override
public void visit(BacktickExpr backtickExpr) {
BType inheritedType = backtickExpr.getInheritedType();
if (inheritedType != BTypes.typeXML) {
BLangExceptionHelper.throwSemanticError(backtickExpr, SemanticErrors.INCOMPATIBLE_TYPES_EXPECTED_XML);
}
backtickExpr.setType(inheritedType);
String[] literals = backtickExpr.getTemplateStr().split(patternString);
List<Expression> argExprList = new ArrayList<>();
int i = 0;
if (literals.length > i) {
BasicLiteral basicLiteral = new BasicLiteral(backtickExpr.getNodeLocation(),
new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(literals[i]));
visit(basicLiteral);
argExprList.add(basicLiteral);
i++;
}
Matcher m = compiledPattern.matcher(backtickExpr.getTemplateStr());
while (m.find()) {
if (m.group(3) != null) {
BasicLiteral indexExpr;
if (m.group(5) != null) {
indexExpr = new BasicLiteral(backtickExpr.getNodeLocation(),
new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(m.group(5)));
indexExpr.setType(BTypes.typeString);
} else {
indexExpr = new BasicLiteral(backtickExpr.getNodeLocation(),
new SimpleTypeName(TypeConstants.INT_TNAME), new BInteger(Integer.parseInt(m.group(4))));
indexExpr.setType(BTypes.typeInt);
}
SymbolName mapOrArrName = new SymbolName(m.group(2), currentPkg);
ArrayMapAccessExpr.ArrayMapAccessExprBuilder builder =
new ArrayMapAccessExpr.ArrayMapAccessExprBuilder();
VariableRefExpr arrayMapVarRefExpr = new VariableRefExpr(backtickExpr.getNodeLocation(), mapOrArrName);
visit(arrayMapVarRefExpr);
builder.setArrayMapVarRefExpr(arrayMapVarRefExpr);
builder.setSymbolName(mapOrArrName);
Expression[] exprs = {indexExpr};
builder.setIndexExprs(exprs);
ArrayMapAccessExpr arrayMapAccessExpr = builder.buildWithSymbol();
visit(arrayMapAccessExpr);
argExprList.add(arrayMapAccessExpr);
} else {
VariableRefExpr variableRefExpr = new VariableRefExpr(backtickExpr.getNodeLocation(),
new SymbolName(m.group(1), currentPkg));
visit(variableRefExpr);
argExprList.add(variableRefExpr);
}
if (literals.length > i) {
BasicLiteral basicLiteral = new BasicLiteral(backtickExpr.getNodeLocation(),
new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(literals[i]));
visit(basicLiteral);
argExprList.add(basicLiteral);
i++;
}
}
backtickExpr.setArgsExprs(argExprList.toArray(new Expression[argExprList.size()]));
}
@Override
public void visit(KeyValueExpr keyValueExpr) {
}
@Override
public void visit(VariableRefExpr variableRefExpr) {
SymbolName symbolName = variableRefExpr.getSymbolName();
BLangSymbol varDefSymbol = currentScope.resolve(symbolName);
if (varDefSymbol == null) {
BLangExceptionHelper.throwSemanticError(variableRefExpr, SemanticErrors.UNDEFINED_SYMBOL,
symbolName);
}
if (!(varDefSymbol instanceof VariableDef)) {
BLangExceptionHelper.throwSemanticError(variableRefExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
}
variableRefExpr.setVariableDef((VariableDef) varDefSymbol);
}
@Override
public void visit(TypeCastExpression typeCastExpression) {
Expression rExpr = typeCastExpression.getRExpr();
visitSingleValueExpr(rExpr);
BType sourceType = rExpr.getType();
BType targetType = typeCastExpression.getTargetType();
if (targetType == null) {
targetType = BTypes.resolveType(typeCastExpression.getTypeName(), currentScope, null);
typeCastExpression.setTargetType(targetType);
}
if (rExpr instanceof NullLiteral) {
BLangExceptionHelper.throwSemanticError(typeCastExpression, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST,
sourceType, targetType);
}
TypeEdge newEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, null);
if (newEdge != null) {
typeCastExpression.setEvalFunc(newEdge.getTypeMapperFunction());
} else {
linkTypeMapper(typeCastExpression, sourceType, targetType);
}
}
@Override
public void visit(NullLiteral nullLiteral) {
nullLiteral.setType(BTypes.typeNull);
}
@Override
public void visit(StackVarLocation stackVarLocation) {
}
@Override
public void visit(ServiceVarLocation serviceVarLocation) {
}
@Override
public void visit(GlobalVarLocation globalVarLocation) {
}
@Override
public void visit(ConnectorVarLocation connectorVarLocation) {
}
@Override
public void visit(ConstantLocation constantLocation) {
}
@Override
public void visit(StructVarLocation structVarLocation) {
}
@Override
public void visit(WorkerVarLocation workerVarLocation) {
}
public void visit(ResourceInvocationExpr resourceIExpr) {
}
public void visit(MainInvoker mainInvoker) {
}
private void openScope(SymbolScope symbolScope) {
currentScope = symbolScope;
}
private void closeScope() {
currentScope = currentScope.getEnclosingScope();
}
private void handleArrayType(ArrayMapAccessExpr arrayMapAccessExpr) {
ReferenceExpr arrayMapVarRefExpr = (ReferenceExpr) arrayMapAccessExpr.getRExpr();
if (arrayMapVarRefExpr.getType() instanceof BArrayType) {
for (Expression indexExpr : arrayMapAccessExpr.getIndexExprs()) {
visitSingleValueExpr(indexExpr);
if (indexExpr.getType() != BTypes.typeInt) {
BLangExceptionHelper.throwSemanticError(arrayMapAccessExpr, SemanticErrors.NON_INTEGER_ARRAY_INDEX,
indexExpr.getType());
}
}
BType expectedType = arrayMapVarRefExpr.getType();
for (int i = 0; i < arrayMapAccessExpr.getIndexExprs().length; i++) {
expectedType = ((BArrayType) expectedType).getElementType();
}
arrayMapAccessExpr.setType(expectedType);
} else if (arrayMapVarRefExpr.getType() instanceof BMapType) {
Expression indexExpr = arrayMapAccessExpr.getIndexExprs()[0];
visitSingleValueExpr(indexExpr);
if (indexExpr.getType() != BTypes.typeString) {
BLangExceptionHelper.throwSemanticError(arrayMapAccessExpr, SemanticErrors.NON_STRING_MAP_INDEX,
indexExpr.getType());
}
BMapType typeOfMap = (BMapType) arrayMapVarRefExpr.getType();
arrayMapAccessExpr.setType(typeOfMap.getElementType());
} else {
BLangExceptionHelper.throwSemanticError(arrayMapAccessExpr,
SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, arrayMapVarRefExpr.getType());
}
}
private void visitBinaryExpr(BinaryExpression expr) {
visitSingleValueExpr(expr.getLExpr());
visitSingleValueExpr(expr.getRExpr());
}
private void visitSingleValueExpr(Expression expr) {
expr.accept(this);
if (expr.isMultiReturnExpr()) {
FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) expr;
String nameWithPkgName = (funcIExpr.getPackageName() != null) ? funcIExpr.getPackageName()
+ ":" + funcIExpr.getName() : funcIExpr.getName();
BLangExceptionHelper.throwSemanticError(expr, SemanticErrors.MULTIPLE_VALUE_IN_SINGLE_VALUE_CONTEXT,
nameWithPkgName);
}
}
private BType verifyBinaryArithmeticExprType(BinaryArithmeticExpression binaryArithmeticExpr) {
visitBinaryExpr(binaryArithmeticExpr);
BType type = verifyBinaryExprType(binaryArithmeticExpr);
binaryArithmeticExpr.setType(type);
return type;
}
private BType verifyBinaryCompareExprType(BinaryExpression binaryExpression) {
visitBinaryExpr(binaryExpression);
BType type = verifyBinaryExprType(binaryExpression);
binaryExpression.setType(BTypes.typeBoolean);
return type;
}
private BType verifyBinaryEqualityExprType(BinaryExpression binaryExpression) {
visitBinaryExpr(binaryExpression);
BType rType = binaryExpression.getRExpr().getType();
BType lType = binaryExpression.getLExpr().getType();
BType type;
if (rType == BTypes.typeNull) {
if (BTypes.isValueType(lType)) {
BLangExceptionHelper.throwSemanticError(binaryExpression,
SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lType, rType);
}
type = rType;
} else if (lType == BTypes.typeNull) {
if (BTypes.isValueType(rType)) {
BLangExceptionHelper.throwSemanticError(binaryExpression,
SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lType, rType);
}
type = lType;
} else {
type = verifyBinaryExprType(binaryExpression);
}
binaryExpression.setType(BTypes.typeBoolean);
return type;
}
private BType verifyBinaryExprType(BinaryExpression binaryExpr) {
Expression rExpr = binaryExpr.getRExpr();
Expression lExpr = binaryExpr.getLExpr();
BType rType = rExpr.getType();
BType lType = lExpr.getType();
if (!(rType.equals(lType))) {
TypeCastExpression newExpr;
TypeEdge newEdge;
if (((rType.equals(BTypes.typeString) || lType.equals(BTypes.typeString))
&& binaryExpr.getOperator().equals(Operator.ADD)) || (!(rType.equals(BTypes.typeString)) &&
!(lType.equals(BTypes.typeString)))) {
newEdge = TypeLattice.getImplicitCastLattice().getEdgeFromTypes(rType, lType, null);
if (newEdge != null) {
newExpr = new TypeCastExpression(rExpr.getNodeLocation(), rExpr, lType);
newExpr.setEvalFunc(newEdge.getTypeMapperFunction());
newExpr.accept(this);
binaryExpr.setRExpr(newExpr);
return lType;
} else {
newEdge = TypeLattice.getImplicitCastLattice().getEdgeFromTypes(lType, rType, null);
if (newEdge != null) {
newExpr = new TypeCastExpression(lExpr.getNodeLocation(), lExpr, rType);
newExpr.setEvalFunc(newEdge.getTypeMapperFunction());
newExpr.accept(this);
binaryExpr.setLExpr(newExpr);
return rType;
}
}
}
throwInvalidBinaryOpError(binaryExpr);
}
return rType;
}
private void visitBinaryLogicalExpr(BinaryLogicalExpression expr) {
visitBinaryExpr(expr);
Expression rExpr = expr.getRExpr();
Expression lExpr = expr.getLExpr();
if (lExpr.getType() == BTypes.typeBoolean && rExpr.getType() == BTypes.typeBoolean) {
expr.setType(BTypes.typeBoolean);
} else {
throwInvalidBinaryOpError(expr);
}
}
private String getVarNameFromExpression(Expression expr) {
if (expr instanceof ArrayMapAccessExpr) {
return ((ArrayMapAccessExpr) expr).getSymbolName().getName();
} else if (expr instanceof FieldAccessExpr) {
return getVarNameFromExpression(((FieldAccessExpr) expr).getVarRef());
} else {
return ((VariableRefExpr) expr).getSymbolName().getName();
}
}
private void checkForConstAssignment(AssignStmt assignStmt, Expression lExpr) {
if (lExpr instanceof VariableRefExpr &&
((VariableRefExpr) lExpr).getMemoryLocation() instanceof ConstantLocation) {
BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.CANNOT_ASSIGN_VALUE_CONSTANT,
((VariableRefExpr) lExpr).getSymbolName());
}
}
private void checkForMultiAssignmentErrors(AssignStmt assignStmt, Expression[] lExprs,
CallableUnitInvocationExpr rExpr) {
BType[] returnTypes = rExpr.getTypes();
if (lExprs.length != returnTypes.length) {
BLangExceptionHelper.throwSemanticError(assignStmt,
SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, lExprs.length, returnTypes.length);
}
for (int i = 0; i < lExprs.length; i++) {
Expression lExpr = lExprs[i];
BType returnType = returnTypes[i];
if ((lExpr.getType() != BTypes.typeAny) && (!lExpr.getType().equals(returnType))) {
String varName = getVarNameFromExpression(lExpr);
BLangExceptionHelper.throwSemanticError(assignStmt,
SemanticErrors.CANNOT_ASSIGN_IN_MULTIPLE_ASSIGNMENT, returnType, varName, lExpr.getType());
}
}
}
private void visitLExprsOfAssignment(AssignStmt assignStmt, Expression[] lExprs) {
Set<String> varNameSet = new HashSet<>();
for (Expression lExpr : lExprs) {
String varName = getVarNameFromExpression(lExpr);
if (!varNameSet.add(varName)) {
BLangExceptionHelper.throwSemanticError(assignStmt,
SemanticErrors.VAR_IS_REPEATED_ON_LEFT_SIDE_ASSIGNMENT, varName);
}
if (lExpr instanceof ArrayMapAccessExpr) {
((ArrayMapAccessExpr) lExpr).setLHSExpr(true);
} else if (lExpr instanceof FieldAccessExpr) {
((FieldAccessExpr) lExpr).setLHSExpr(true);
}
lExpr.accept(this);
checkForConstAssignment(assignStmt, lExpr);
}
}
private void linkFunction(FunctionInvocationExpr funcIExpr) {
String pkgPath = funcIExpr.getPackagePath();
Expression[] exprs = funcIExpr.getArgExprs();
BType[] paramTypes = new BType[exprs.length];
for (int i = 0; i < exprs.length; i++) {
paramTypes[i] = exprs[i].getType();
}
FunctionSymbolName symbolName = LangModelUtils.getFuncSymNameWithParams(funcIExpr.getName(),
pkgPath, paramTypes);
BLangSymbol functionSymbol = currentScope.resolve(symbolName);
if (functionSymbol == null) {
functionSymbol = findBestMatchForFunctionSymbol(funcIExpr, symbolName);
}
if (functionSymbol == null) {
String funcName = (funcIExpr.getPackageName() != null) ? funcIExpr.getPackageName() + ":" +
funcIExpr.getName() : funcIExpr.getName();
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.UNDEFINED_FUNCTION, funcName);
return;
}
Function function;
if (functionSymbol.isNative()) {
functionSymbol = ((BallerinaFunction) functionSymbol).getNativeFunction();
NativeUnit nativeUnit = ((NativeUnitProxy) functionSymbol).load();
SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames();
BType[] returnTypes = new BType[returnParamTypeNames.length];
for (int i = 0; i < returnParamTypeNames.length; i++) {
SimpleTypeName typeName = returnParamTypeNames[i];
BType bType = BTypes.resolveType(typeName, currentScope, funcIExpr.getNodeLocation());
returnTypes[i] = bType;
}
if (!(nativeUnit instanceof Function)) {
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
}
function = (Function) nativeUnit;
function.setReturnParamTypes(returnTypes);
} else {
if (!(functionSymbol instanceof Function)) {
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
return;
}
function = (Function) functionSymbol;
}
funcIExpr.setCallableUnit(function);
}
/**
* Helper method to find the best function match when there is no direct match.
*
* @param symbolName
* @return bLangSymbol
*/
private BLangSymbol findBestMatchForFunctionSymbol(FunctionInvocationExpr funcIExpr,
FunctionSymbolName symbolName) {
BLangSymbol functionSymbol = null;
BLangSymbol pkgSymbol = null;
if (symbolName.getPkgPath() == null) {
pkgSymbol = (BLangPackage) getCurrentPackageScope(currentScope);
} else {
SymbolName pkgSymbolName = new SymbolName(symbolName.getPkgPath());
pkgSymbol = currentScope.resolve(pkgSymbolName);
}
if (pkgSymbol == null) {
return null;
}
Expression[] argExprs = funcIExpr.getArgExprs();
Expression[] updatedArgExprs = new Expression[argExprs.length];
for (Map.Entry entry : ((SymbolScope) pkgSymbol).getSymbolMap().entrySet()) {
if (!(entry.getKey() instanceof FunctionSymbolName)) {
continue;
}
FunctionSymbolName funcSymName = (FunctionSymbolName) entry.getKey();
if (!funcSymName.isNameAndParamCountMatch(symbolName)) {
continue;
}
boolean implicitCastPossible = true;
for (int i = 0; i < argExprs.length; i++) {
Expression argExpr = argExprs[i];
updatedArgExprs[i] = argExpr;
BType lhsType;
if (entry.getValue() instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) entry.getValue()).load();
SimpleTypeName simpleTypeName = nativeUnit.getArgumentTypeNames()[i];
lhsType = BTypes.resolveType(simpleTypeName, currentScope, funcIExpr.getNodeLocation());
} else {
if (!(entry.getValue() instanceof Function)) {
continue;
}
lhsType = ((Function) entry.getValue()).getParameterDefs()[i].getType();
}
BType rhsType = argExpr.getType();
if (rhsType instanceof BNullType && !BTypes.isValueType(lhsType)) {
continue;
}
if (rhsType != null && lhsType.equals(rhsType)) {
continue;
}
if (lhsType == BTypes.typeAny) {
continue;
}
TypeCastExpression newExpr = checkWideningPossible(lhsType, argExpr);
if (newExpr != null) {
updatedArgExprs[i] = newExpr;
} else {
implicitCastPossible = false;
break;
}
}
if (implicitCastPossible) {
if (functionSymbol == null) {
functionSymbol = (BLangSymbol) entry.getValue();
} else {
/**
* This way second ambiguous function will cause this method to throw semantic error, so in a
* scenario where there are more than two ambiguous functions, then this will show only the
* first two.
*/
String ambiguousFunc1 = generateErrorMessage(funcIExpr, functionSymbol, symbolName.getPkgPath());
String ambiguousFunc2 = generateErrorMessage(funcIExpr, (BLangSymbol) entry.getValue(),
symbolName.getPkgPath());
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.AMBIGUOUS_FUNCTIONS,
funcSymName.getFuncName(), ambiguousFunc1, ambiguousFunc2);
break;
}
}
}
for (int i = 0; i < updatedArgExprs.length; i++) {
funcIExpr.getArgExprs()[i] = updatedArgExprs[i];
}
return functionSymbol;
}
/**
* Helper method to generate error message for each ambiguous function.
*
* @param funcIExpr
* @param functionSymbol
* @return errorMsg
*/
private static String generateErrorMessage(FunctionInvocationExpr funcIExpr, BLangSymbol functionSymbol,
String packagePath) {
Function function;
if (functionSymbol instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) functionSymbol).load();
if (!(nativeUnit instanceof Function)) {
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
functionSymbol.getName());
}
function = (Function) nativeUnit;
} else {
if (!(functionSymbol instanceof Function)) {
BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
functionSymbol.getName());
}
function = (Function) functionSymbol;
}
String funcName = (function.getPackagePath() == null || function.getPackagePath().equals(".")) ?
function.getName() : function.getPackagePath() + ":" + function.getName();
StringBuilder sBuilder = new StringBuilder(funcName + "(");
String prefix = "";
for (ParameterDef parameterDef : function.getParameterDefs()) {
sBuilder.append(prefix);
prefix = ",";
String pkgPath = parameterDef.getTypeName().getPackagePath();
if (pkgPath != null) {
sBuilder.append(pkgPath).append(":");
}
sBuilder.append(parameterDef.getTypeName().getName());
}
sBuilder.append(")");
return sBuilder.toString();
}
/**
* Get current package Scope.
*
* @param scope
* @return scope
*/
private SymbolScope getCurrentPackageScope(SymbolScope scope) {
if (scope instanceof BLangPackage) {
return scope;
} else {
return getCurrentPackageScope(scope.getEnclosingScope());
}
}
private void linkAction(ActionInvocationExpr actionIExpr) {
String pkgPath = actionIExpr.getPackagePath();
String connectorName = actionIExpr.getConnectorName();
SymbolName connectorSymbolName = new SymbolName(connectorName, pkgPath);
BLangSymbol connectorSymbol = currentScope.resolve(connectorSymbolName);
if (connectorSymbol == null) {
String connectorWithPkgName = (actionIExpr.getPackageName() != null) ? actionIExpr.getPackageName() +
":" + actionIExpr.getConnectorName() : actionIExpr.getConnectorName();
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.UNDEFINED_CONNECTOR,
connectorWithPkgName);
return;
}
Expression[] exprs = actionIExpr.getArgExprs();
BType[] paramTypes = new BType[exprs.length];
for (int i = 0; i < exprs.length; i++) {
paramTypes[i] = exprs[i].getType();
}
SymbolName actionSymbolName = LangModelUtils.getActionSymName(actionIExpr.getName(),
actionIExpr.getPackagePath(), actionIExpr.getConnectorName(), paramTypes);
BLangSymbol actionSymbol = null;
if (connectorSymbol instanceof BallerinaConnectorDef) {
actionSymbol = ((BallerinaConnectorDef) connectorSymbol).resolveMembers(actionSymbolName);
} else {
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_CONNECTOR_EXPECTED,
connectorSymbolName);
}
if ((actionSymbol instanceof BallerinaAction) && (actionSymbol.isNative())) {
actionSymbol = ((BallerinaAction) actionSymbol).getNativeAction();
}
if (actionSymbol == null) {
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.UNDEFINED_ACTION,
actionIExpr.getName(), connectorSymbol.getSymbolName());
}
Action action = null;
if (actionSymbol instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) actionSymbol).load();
SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames();
BType[] returnTypes = new BType[returnParamTypeNames.length];
for (int i = 0; i < returnParamTypeNames.length; i++) {
SimpleTypeName typeName = returnParamTypeNames[i];
BType bType = BTypes.resolveType(typeName, currentScope, actionIExpr.getNodeLocation());
returnTypes[i] = bType;
}
if (!(nativeUnit instanceof Action)) {
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
actionSymbolName);
}
action = (Action) nativeUnit;
action.setReturnParamTypes(returnTypes);
} else if (actionSymbol instanceof Action) {
action = (Action) actionSymbol;
} else {
BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
actionSymbolName);
}
actionIExpr.setCallableUnit(action);
}
private void linkWorker(WorkerInvocationStmt workerInvocationStmt) {
String workerName = workerInvocationStmt.getCallableUnitName();
SymbolName workerSymbolName = new SymbolName(workerName);
Worker worker = (Worker) currentScope.resolve(workerSymbolName);
if (worker == null) {
throw new LinkerException(workerInvocationStmt.getNodeLocation().getFileName() + ":" +
workerInvocationStmt.getNodeLocation().getLineNumber() +
": undefined worker '" + workerInvocationStmt.getCallableUnitName() + "'");
}
workerInvocationStmt.setCallableUnit(worker);
}
private void throwInvalidBinaryOpError(BinaryExpression binaryExpr) {
BType lExprType = binaryExpr.getLExpr().getType();
BType rExprType = binaryExpr.getRExpr().getType();
if (lExprType == rExprType) {
BLangExceptionHelper.throwSemanticError(binaryExpr,
SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, binaryExpr.getOperator(), lExprType);
} else {
BLangExceptionHelper.throwSemanticError(binaryExpr,
SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lExprType, rExprType);
}
}
private void throwInvalidUnaryOpError(UnaryExpression unaryExpr) {
BType rExprType = unaryExpr.getRExpr().getType();
BLangExceptionHelper.throwSemanticError(unaryExpr,
SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, unaryExpr.getOperator(), rExprType);
}
private void visitField(FieldAccessExpr fieldAccessExpr, SymbolScope enclosingScope) {
ReferenceExpr varRefExpr = (ReferenceExpr) fieldAccessExpr.getVarRef();
SymbolName symbolName = varRefExpr.getSymbolName();
BLangSymbol fieldSymbol;
if (enclosingScope instanceof StructDef) {
fieldSymbol = ((StructDef) enclosingScope).resolveMembers(new SymbolName(symbolName.getName(),
((StructDef) enclosingScope).getPackagePath()));
} else {
fieldSymbol = enclosingScope.resolve(symbolName);
}
if (fieldSymbol == null) {
if (enclosingScope instanceof StructDef) {
BLangExceptionHelper.throwSemanticError(fieldAccessExpr, SemanticErrors.UNKNOWN_FIELD_IN_STRUCT,
symbolName.getName(), ((StructDef) enclosingScope).getName());
} else {
BLangExceptionHelper.throwSemanticError(fieldAccessExpr, SemanticErrors.UNDEFINED_SYMBOL,
symbolName.getName());
}
}
if (!(fieldSymbol instanceof VariableDef)) {
BLangExceptionHelper.throwSemanticError(varRefExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND,
symbolName);
}
VariableDef varDef = (VariableDef) fieldSymbol;
BType exprType = varDef.getType();
/* Get the actual var representation of this field, and semantically analyze. This will check for semantic
* errors of arrays/map accesses, used in this field.
* eg: in dpt.employee[2].name , below will check for semantics of 'employee[2]',
* treating them as individual arrays/map variables.
*/
if (varRefExpr instanceof ArrayMapAccessExpr) {
Expression rExpr = ((ArrayMapAccessExpr) varRefExpr).getRExpr();
if (rExpr instanceof VariableRefExpr) {
((VariableRefExpr) rExpr).setVariableDef(varDef);
}
if (exprType instanceof BArrayType) {
exprType = ((BArrayType) varDef.getType()).getElementType();
}
handleArrayType((ArrayMapAccessExpr) varRefExpr);
} else {
((VariableRefExpr) varRefExpr).setVariableDef(varDef);
}
FieldAccessExpr fieldExpr = (FieldAccessExpr) fieldAccessExpr.getFieldExpr();
if (fieldExpr == null) {
return;
}
if (exprType instanceof StructDef) {
visitStructAccessExpr(fieldExpr, exprType);
} else if (exprType instanceof BJSONType) {
visitJSONAccessExpr(fieldAccessExpr, fieldExpr);
} else if (exprType instanceof BMapType) {
visitMapAccessExpr(fieldAccessExpr, varRefExpr, fieldExpr, enclosingScope);
} else if (exprType instanceof BArrayType) {
visitArrayAccessExpr(fieldAccessExpr, varRefExpr, fieldExpr, exprType, enclosingScope);
} else {
BLangExceptionHelper.throwSemanticError(fieldAccessExpr,
SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, exprType);
}
}
/**
* Visit a struct and its fields and semantically validate the field expression.
*
* @param fieldExpr field expression to validate
* @param exprType Struct definition
*/
private void visitStructAccessExpr(FieldAccessExpr fieldExpr, BType exprType) {
Expression fieldVar = fieldExpr.getVarRef();
if (fieldVar instanceof BasicLiteral) {
String varName = ((BasicLiteral) fieldVar).getBValue().stringValue();
VariableRefExpr varRef = new VariableRefExpr(fieldVar.getNodeLocation(), varName);
fieldExpr.setVarRef(varRef);
fieldExpr.setIsStaticField(true);
}
if (!fieldExpr.isStaticField()) {
BLangExceptionHelper.throwSemanticError(fieldVar, SemanticErrors.DYNAMIC_KEYS_NOT_SUPPORTED_FOR_STRUCT);
}
visitField(fieldExpr, ((StructDef) exprType));
}
/**
* Visits a JSON access expression. Rewrites the tree by replacing the {@link FieldAccessExpr}
* with a {@link JSONFieldAccessExpr}.
*
* @param parentExpr Current expression
* @param fieldExpr Field access expression of the current expression
*/
private void visitJSONAccessExpr(FieldAccessExpr parentExpr, FieldAccessExpr fieldExpr) {
if (fieldExpr == null) {
return;
}
FieldAccessExpr currentFieldExpr;
FieldAccessExpr nextFieldExpr = fieldExpr.getFieldExpr();
if (fieldExpr instanceof JSONFieldAccessExpr) {
currentFieldExpr = fieldExpr;
} else {
Expression varRefExpr = fieldExpr.getVarRef();
varRefExpr.accept(this);
currentFieldExpr = new JSONFieldAccessExpr(fieldExpr.getNodeLocation(), varRefExpr, nextFieldExpr);
}
parentExpr.setFieldExpr(currentFieldExpr);
visitJSONAccessExpr(currentFieldExpr, nextFieldExpr);
}
/**
* Visits a map access expression. Rewrites the tree by replacing the {@link FieldAccessExpr} with an
* {@link ArrayMapAccessExpr}. Then revisits the rewritten branch, and check for semantic.
*
* @param parentExpr Current expression
* @param varRefExpr VariableRefExpression of the current expression
* @param fieldExpr Field access expression of the current expression
* @param enclosingScope Enclosing scope
*/
private void visitMapAccessExpr(FieldAccessExpr parentExpr, ReferenceExpr varRefExpr, FieldAccessExpr fieldExpr,
SymbolScope enclosingScope) {
Expression fieldVar = fieldExpr.getVarRef();
if (fieldExpr.getFieldExpr() != null) {
BLangExceptionHelper.throwSemanticError(fieldExpr, SemanticErrors.INDEXING_NOT_SUPPORTED_FOR_MAP_ELEMENT,
BTypes.typeAny);
}
Expression indexExpr[] = new Expression[]{fieldVar};
ArrayMapAccessExpr.ArrayMapAccessExprBuilder builder = new ArrayMapAccessExpr.ArrayMapAccessExprBuilder();
builder.setVarName(varRefExpr.getVarName());
builder.setPkgName(varRefExpr.getPkgName());
builder.setPkgPath(varRefExpr.getPkgPath());
builder.setIndexExprs(indexExpr);
builder.setArrayMapVarRefExpr(varRefExpr);
builder.setNodeLocation(fieldExpr.getNodeLocation());
ArrayMapAccessExpr accessExpr = builder.build();
parentExpr.setFieldExpr(fieldExpr.getFieldExpr());
parentExpr.setVarRef(accessExpr);
accessExpr.setLHSExpr(parentExpr.isLHSExpr());
visitField(parentExpr, enclosingScope);
}
/**
* Visits an array access expression. Rewrites the tree by replacing the {@link FieldAccessExpr} with an
* {@link ArrayMapAccessExpr}. Then revisits the rewritten branch, and check for semantic.
*
* @param parentExpr Current expression
* @param varRefExpr VariableRefExpression of the current expression
* @param fieldExpr Field access expression of the current expression
* @param exprType Type to which the expression evaluates
* @param enclosingScope Enclosing scope
*/
private void visitArrayAccessExpr(FieldAccessExpr parentExpr, ReferenceExpr varRefExpr, FieldAccessExpr fieldExpr,
BType exprType, SymbolScope enclosingScope) {
int dimensions = ((BArrayType) exprType).getDimensions();
List<Expression> indexExprs = new ArrayList<Expression>();
for (int i = 0; i < dimensions; i++) {
if (fieldExpr == null) {
break;
}
indexExprs.add(fieldExpr.getVarRef());
fieldExpr = (FieldAccessExpr) fieldExpr.getFieldExpr();
}
Collections.reverse(indexExprs);
ArrayMapAccessExpr.ArrayMapAccessExprBuilder builder = new ArrayMapAccessExpr.ArrayMapAccessExprBuilder();
builder.setVarName(varRefExpr.getVarName());
builder.setPkgName(varRefExpr.getPkgName());
builder.setPkgPath(varRefExpr.getPkgPath());
builder.setIndexExprs(indexExprs.toArray(new Expression[0]));
builder.setArrayMapVarRefExpr(varRefExpr);
builder.setNodeLocation(parentExpr.getNodeLocation());
ArrayMapAccessExpr accessExpr = builder.build();
parentExpr.setFieldExpr(fieldExpr);
parentExpr.setVarRef(accessExpr);
accessExpr.setLHSExpr(parentExpr.isLHSExpr());
visitField(parentExpr, enclosingScope);
}
private void linkTypeMapper(TypeCastExpression typeCastExpression, BType sourceType, BType targetType) {
TypeEdge newEdge = null;
TypeMapper typeMapper;
newEdge = packageTypeLattice.getEdgeFromTypes(sourceType, targetType, currentPkg);
if (newEdge != null) {
typeMapper = newEdge.getTypeMapper();
if (typeMapper != null) {
typeCastExpression.setCallableUnit(typeMapper);
}
} else {
newEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, currentPkg);
if (newEdge != null) {
typeMapper = newEdge.getTypeMapper();
if (typeMapper != null) {
typeCastExpression.setCallableUnit(typeMapper);
}
} else {
newEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, null);
if (newEdge != null) {
typeMapper = newEdge.getTypeMapper();
if (typeMapper != null) {
typeCastExpression.setCallableUnit(typeMapper);
}
} else {
String pkgPath = typeCastExpression.getPackagePath();
Expression[] exprs = typeCastExpression.getArgExprs();
BType[] paramTypes = new BType[exprs.length];
for (int i = 0; i < exprs.length; i++) {
paramTypes[i] = exprs[i].getType();
}
SymbolName symbolName = LangModelUtils.getTypeMapperSymName(pkgPath,
sourceType, targetType);
BLangSymbol typeMapperSymbol = nativeScope.resolve(symbolName);
if (typeMapperSymbol == null) {
BLangExceptionHelper.throwSemanticError(typeCastExpression,
SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType);
}
if (typeMapperSymbol instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) typeMapperSymbol).load();
SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames();
BType[] returnTypes = new BType[returnParamTypeNames.length];
for (int i = 0; i < returnParamTypeNames.length; i++) {
SimpleTypeName typeName = returnParamTypeNames[i];
BType bType = BTypes.resolveType(typeName, currentScope,
typeCastExpression.getNodeLocation());
returnTypes[i] = bType;
}
if (!(nativeUnit instanceof TypeMapper)) {
BLangExceptionHelper.throwSemanticError(typeCastExpression,
SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName);
}
typeMapper = (TypeMapper) nativeUnit;
typeMapper.setReturnParamTypes(returnTypes);
} else {
if (!(typeMapperSymbol instanceof TypeMapper)) {
BLangExceptionHelper.throwSemanticError(typeCastExpression,
SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName);
}
typeMapper = (TypeMapper) typeMapperSymbol;
}
if (typeMapper != null) {
typeMapper.setParameterTypes(paramTypes);
typeCastExpression.setCallableUnit(typeMapper);
} else {
BLangExceptionHelper.throwSemanticError(typeCastExpression,
SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType);
}
}
}
}
}
private TypeCastExpression checkWideningPossible(BType lhsType, Expression rhsExpr) {
BType rhsType = rhsExpr.getType();
if (rhsType == null && rhsExpr instanceof TypeCastExpression) {
rhsType = BTypes.resolveType(((TypeCastExpression) rhsExpr).getTypeName(), currentScope, null);
}
TypeCastExpression newExpr = null;
TypeEdge newEdge;
newEdge = TypeLattice.getImplicitCastLattice().getEdgeFromTypes(rhsType, lhsType, null);
if (newEdge != null) {
newExpr = new TypeCastExpression(rhsExpr.getNodeLocation(), rhsExpr, lhsType);
newExpr.setEvalFunc(newEdge.getTypeMapperFunction());
}
return newExpr;
}
private void setMemoryLocation(VariableDef variableDef) {
if (currentScope.getScopeName() == SymbolScope.ScopeName.LOCAL) {
if (currentScope.getEnclosingScope().getScopeName() == SymbolScope.ScopeName.WORKER) {
variableDef.setMemoryLocation(new WorkerVarLocation(++workerMemAddrOffset));
} else {
variableDef.setMemoryLocation(new StackVarLocation(++stackFrameOffset));
}
} else if (currentScope.getScopeName() == SymbolScope.ScopeName.SERVICE) {
variableDef.setMemoryLocation(new ServiceVarLocation(++staticMemAddrOffset));
} else if (currentScope.getScopeName() == SymbolScope.ScopeName.CONNECTOR) {
variableDef.setMemoryLocation(new ConnectorVarLocation(++connectorMemAddrOffset));
} else if (currentScope.getScopeName() == SymbolScope.ScopeName.STRUCT) {
variableDef.setMemoryLocation(new StructVarLocation(++structMemAddrOffset));
} else if (currentScope.getScopeName() == SymbolScope.ScopeName.PACKAGE) {
variableDef.setMemoryLocation(new GlobalVarLocation(++staticMemAddrOffset));
}
}
private void defineFunctions(Function[] functions) {
for (Function function : functions) {
ParameterDef[] paramDefArray = function.getParameterDefs();
BType[] paramTypes = new BType[paramDefArray.length];
for (int i = 0; i < paramDefArray.length; i++) {
ParameterDef paramDef = paramDefArray[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
paramTypes[i] = bType;
}
function.setParameterTypes(paramTypes);
FunctionSymbolName symbolName = LangModelUtils.getFuncSymNameWithParams(function.getName(),
function.getPackagePath(), paramTypes);
function.setSymbolName(symbolName);
BLangSymbol functionSymbol = currentScope.resolve(symbolName);
if (!function.isNative() && functionSymbol != null) {
BLangExceptionHelper.throwSemanticError(function,
SemanticErrors.REDECLARED_SYMBOL, function.getName());
}
if (function.isNative() && functionSymbol == null) {
functionSymbol = nativeScope.resolve(symbolName);
if (functionSymbol == null) {
BLangExceptionHelper.throwSemanticError(function,
SemanticErrors.UNDEFINED_FUNCTION, function.getName());
}
if (function instanceof BallerinaFunction) {
((BallerinaFunction) function).setNativeFunction((NativeUnitProxy) functionSymbol);
}
}
currentScope.define(symbolName, function);
ParameterDef[] returnParameters = function.getReturnParameters();
BType[] returnTypes = new BType[returnParameters.length];
for (int i = 0; i < returnParameters.length; i++) {
ParameterDef paramDef = returnParameters[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
returnTypes[i] = bType;
}
function.setReturnParamTypes(returnTypes);
}
}
private void defineTypeMappers(TypeMapper[] typeMappers) {
for (TypeMapper typeMapper : typeMappers) {
NodeLocation location = typeMapper.getNodeLocation();
SimpleTypeName sourceType = typeMapper.getParameterDefs()[0].getTypeName();
BType sourceBType = BTypes.resolveType(sourceType, currentScope, location);
typeMapper.setParameterTypes(new BType[] { sourceBType });
SimpleTypeName targetType = typeMapper.getReturnParameters()[0].getTypeName();
BType targetBType = BTypes.resolveType(targetType, currentScope, location);
TypeVertex sourceV = new TypeVertex(sourceBType);
TypeVertex targetV = new TypeVertex(targetBType);
typeMapper.setReturnParamTypes(new BType[] { targetBType });
SymbolName symbolName = LangModelUtils
.getTypeMapperSymName(typeMapper.getPackagePath(), sourceBType, targetBType);
typeMapper.setSymbolName(symbolName);
BLangSymbol typConvertorSymbol = currentScope.resolve(symbolName);
if (typeMapper.isNative() && typConvertorSymbol == null) {
BLangExceptionHelper
.throwSemanticError(typeMapper, SemanticErrors.UNDEFINED_TYPE_MAPPER, typeMapper.getName());
}
if (!typeMapper.isNative()) {
if (typConvertorSymbol != null) {
BLangExceptionHelper
.throwSemanticError(typeMapper, SemanticErrors.REDECLARED_SYMBOL, typeMapper.getName());
}
currentScope.define(symbolName, typeMapper);
}
packageTypeLattice.addVertex(sourceV, true);
packageTypeLattice.addVertex(targetV, true);
packageTypeLattice.addEdge(sourceV, targetV, typeMapper,
typeMapper.getPackagePath() != null ? typeMapper.getPackagePath() : ".");
}
}
private void defineConnectors(BallerinaConnectorDef[] connectorDefArray) {
for (BallerinaConnectorDef connectorDef : connectorDefArray) {
String connectorName = connectorDef.getName();
SymbolName connectorSymbolName = new SymbolName(connectorName, connectorDef.getPackagePath());
BLangSymbol connectorSymbol = currentScope.resolve(connectorSymbolName);
if (connectorSymbol != null) {
BLangExceptionHelper.throwSemanticError(connectorDef,
SemanticErrors.REDECLARED_SYMBOL, connectorName);
}
currentScope.define(connectorSymbolName, connectorDef);
BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(
connectorDef.getNodeLocation(), connectorDef);
for (VariableDefStmt variableDefStmt : connectorDef.getVariableDefStmts()) {
blockStmtBuilder.addStmt(variableDefStmt);
}
BallerinaFunction.BallerinaFunctionBuilder functionBuilder =
new BallerinaFunction.BallerinaFunctionBuilder(connectorDef);
functionBuilder.setNodeLocation(connectorDef.getNodeLocation());
functionBuilder.setIdentifier(new Identifier(connectorName + ".<init>"));
functionBuilder.setPkgPath(connectorDef.getPackagePath());
functionBuilder.setBody(blockStmtBuilder.build());
connectorDef.setInitFunction(functionBuilder.buildFunction());
BLangSymbol actionSymbol = null;
SymbolName name = new SymbolName("NativeAction." + connectorName
+ ".<init>", connectorDef.getPackagePath());
actionSymbol = nativeScope.resolve(name);
if (actionSymbol != null) {
if (actionSymbol instanceof NativeUnitProxy) {
NativeUnit nativeUnit = ((NativeUnitProxy) actionSymbol).load();
Action action = (Action) nativeUnit;
connectorDef.setInitAction(action);
}
}
}
for (BallerinaConnectorDef connectorDef : connectorDefArray) {
openScope(connectorDef);
for (BallerinaAction bAction : connectorDef.getActions()) {
bAction.setConnectorDef(connectorDef);
defineAction(bAction, connectorDef);
}
closeScope();
}
}
private void defineAction(BallerinaAction action, BallerinaConnectorDef connectorDef) {
ParameterDef[] paramDefArray = action.getParameterDefs();
BType[] paramTypes = new BType[paramDefArray.length];
for (int i = 0; i < paramDefArray.length; i++) {
ParameterDef paramDef = paramDefArray[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
paramTypes[i] = bType;
}
action.setParameterTypes(paramTypes);
SymbolName symbolName = LangModelUtils.getActionSymName(action.getName(), action.getPackagePath(),
connectorDef.getName(), paramTypes);
action.setSymbolName(symbolName);
BLangSymbol actionSymbol = currentScope.resolve(symbolName);
if (actionSymbol != null) {
BLangExceptionHelper.throwSemanticError(action, SemanticErrors.REDECLARED_SYMBOL, action.getName());
}
currentScope.define(symbolName, action);
if (action.isNative()) {
SymbolName nativeActionSymName = LangModelUtils.getNativeActionSymName(action.getName(),
connectorDef.getName(), action.getPackagePath(), paramTypes);
BLangSymbol nativeAction = nativeScope.resolve(nativeActionSymName);
if (nativeAction == null || !(nativeAction instanceof NativeUnitProxy)) {
BLangExceptionHelper.throwSemanticError(connectorDef,
SemanticErrors.UNDEFINED_NATIVE_ACTION, action.getName(), connectorDef.getName());
return;
}
action.setNativeAction((NativeUnitProxy) nativeAction);
}
ParameterDef[] returnParameters = action.getReturnParameters();
BType[] returnTypes = new BType[returnParameters.length];
for (int i = 0; i < returnParameters.length; i++) {
ParameterDef paramDef = returnParameters[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
returnTypes[i] = bType;
}
action.setReturnParamTypes(returnTypes);
}
private void defineServices(Service[] services) {
for (Service service : services) {
if (currentScope.resolve(service.getSymbolName()) != null) {
BLangExceptionHelper.throwSemanticError(service, SemanticErrors.REDECLARED_SYMBOL, service.getName());
}
currentScope.define(service.getSymbolName(), service);
BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(
service.getNodeLocation(), service);
for (VariableDefStmt variableDefStmt : service.getVariableDefStmts()) {
blockStmtBuilder.addStmt(variableDefStmt);
}
BallerinaFunction.BallerinaFunctionBuilder functionBuilder =
new BallerinaFunction.BallerinaFunctionBuilder(service);
functionBuilder.setNodeLocation(service.getNodeLocation());
functionBuilder.setIdentifier(new Identifier(service.getName() + ".<init>"));
functionBuilder.setPkgPath(service.getPackagePath());
functionBuilder.setBody(blockStmtBuilder.build());
service.setInitFunction(functionBuilder.buildFunction());
openScope(service);
for (Resource resource : service.getResources()) {
defineResource(resource, service);
}
closeScope();
}
}
private void defineResource(Resource resource, Service service) {
ParameterDef[] paramDefArray = resource.getParameterDefs();
BType[] paramTypes = new BType[paramDefArray.length];
for (int i = 0; i < paramDefArray.length; i++) {
ParameterDef paramDef = paramDefArray[i];
BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation());
paramDef.setType(bType);
paramTypes[i] = bType;
}
resource.setParameterTypes(paramTypes);
SymbolName symbolName = LangModelUtils.getActionSymName(resource.getName(),
resource.getPackagePath(), service.getName(), paramTypes);
resource.setSymbolName(symbolName);
if (currentScope.resolve(symbolName) != null) {
BLangExceptionHelper.throwSemanticError(resource, SemanticErrors.REDECLARED_SYMBOL, resource.getName());
}
currentScope.define(symbolName, resource);
}
private void defineStructs(StructDef[] structDefs) {
for (StructDef structDef : structDefs) {
SymbolName symbolName = new SymbolName(structDef.getName(), structDef.getPackagePath());
if (currentScope.resolve(symbolName) != null) {
BLangExceptionHelper.throwSemanticError(structDef,
SemanticErrors.REDECLARED_SYMBOL, structDef.getName());
}
currentScope.define(symbolName, structDef);
BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(
structDef.getNodeLocation(), structDef);
for (VariableDefStmt variableDefStmt : structDef.getFieldDefStmts()) {
blockStmtBuilder.addStmt(variableDefStmt);
}
BallerinaFunction.BallerinaFunctionBuilder functionBuilder =
new BallerinaFunction.BallerinaFunctionBuilder(structDef);
functionBuilder.setNodeLocation(structDef.getNodeLocation());
functionBuilder.setIdentifier(new Identifier(structDef + ".<init>"));
functionBuilder.setPkgPath(structDef.getPackagePath());
functionBuilder.setBody(blockStmtBuilder.build());
structDef.setInitFunction(functionBuilder.buildFunction());
}
for (StructDef structDef : structDefs) {
SymbolScope tmpScope = currentScope;
currentScope = structDef;
for (VariableDefStmt fieldDefStmt : structDef.getFieldDefStmts()) {
fieldDefStmt.accept(this);
}
structDef.setStructMemorySize(structMemAddrOffset + 1);
structMemAddrOffset = -1;
currentScope = tmpScope;
}
for (StructDef structDef : structDefs) {
TypeLattice.addStructEdges(structDef, currentScope);
}
}
/**
* Add the annotation definitions to the current scope.
*
* @param annotationDefs Annotations definitions list
*/
private void defineAnnotations(AnnotationDef[] annotationDefs) {
for (AnnotationDef annotationDef : annotationDefs) {
SymbolName symbolName = new SymbolName(annotationDef.getName(), currentPkg);
if (currentScope.resolve(symbolName) != null) {
BLangExceptionHelper.throwSemanticError(annotationDef,
SemanticErrors.REDECLARED_SYMBOL, annotationDef.getSymbolName().getName());
}
currentScope.define(symbolName, annotationDef);
}
}
private void resolveStructFieldTypes(StructDef[] structDefs) {
for (StructDef structDef : structDefs) {
for (VariableDefStmt fieldDefStmt : structDef.getFieldDefStmts()) {
VariableDef fieldDef = fieldDefStmt.getVariableDef();
BType fieldType = BTypes.resolveType(fieldDef.getTypeName(), currentScope,
fieldDef.getNodeLocation());
fieldDef.setType(fieldType);
}
}
}
private void checkUnreachableStmt(Statement[] stmts, int stmtIndex) {
if (stmts.length > stmtIndex) {
if (stmts[stmtIndex] instanceof CommentStmt) {
checkUnreachableStmt(stmts, ++stmtIndex);
} else {
BLangExceptionHelper.throwSemanticError(stmts[stmtIndex], SemanticErrors.UNREACHABLE_STATEMENT);
}
}
}
/**
* Recursively visits a nested init expression. Reconstruct the init expression with the
* specific init expression type, and replaces the generic {@link RefTypeInitExpr}.
*
* @param fieldType Type of the current field
* @return reconstructed nested init expression
*/
private RefTypeInitExpr getNestedInitExpr(Expression expr, BType fieldType) {
RefTypeInitExpr refTypeInitExpr = (RefTypeInitExpr) expr;
if (refTypeInitExpr instanceof ArrayInitExpr) {
if (fieldType == BTypes.typeAny || fieldType == BTypes.typeMap) {
fieldType = BTypes.resolveType(new SimpleTypeName(BTypes.typeAny.getName(),
true, 1), currentScope, expr.getNodeLocation());
} else if (fieldType == BTypes.typeJSON) {
refTypeInitExpr = new JSONArrayInitExpr(refTypeInitExpr.getNodeLocation(),
refTypeInitExpr.getArgExprs());
}
} else if (!(refTypeInitExpr instanceof BacktickExpr)) {
if (fieldType == BTypes.typeAny) {
fieldType = BTypes.typeMap;
}
if (fieldType == BTypes.typeMap) {
refTypeInitExpr = new MapInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getArgExprs());
} else if (fieldType == BTypes.typeJSON) {
refTypeInitExpr = new JSONInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getArgExprs());
} else if (fieldType instanceof StructDef) {
refTypeInitExpr = new StructInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getArgExprs());
}
}
refTypeInitExpr.setInheritedType(fieldType);
return refTypeInitExpr;
}
/**
* Visit and validate map/json initialize expression.
*
* @param initExpr Expression to visit.
*/
private void visitMapJsonInitExpr(RefTypeInitExpr initExpr) {
BType inheritedType = initExpr.getInheritedType();
initExpr.setType(inheritedType);
Expression[] argExprs = initExpr.getArgExprs();
for (int i = 0; i < argExprs.length; i++) {
Expression argExpr = argExprs[i];
KeyValueExpr keyValueExpr = (KeyValueExpr) argExpr;
Expression keyExpr = keyValueExpr.getKeyExpr();
if (keyExpr instanceof VariableRefExpr) {
BString key = new BString(((VariableRefExpr) keyExpr).getVarName());
keyExpr = new BasicLiteral(keyExpr.getNodeLocation(), new SimpleTypeName(TypeConstants.STRING_TNAME),
key);
keyValueExpr.setKeyExpr(keyExpr);
}
visitSingleValueExpr(keyExpr);
Expression valueExpr = keyValueExpr.getValueExpr();
if (valueExpr instanceof RefTypeInitExpr) {
valueExpr = getNestedInitExpr(valueExpr, inheritedType);
keyValueExpr.setValueExpr(valueExpr);
}
valueExpr.accept(this);
if (inheritedType == BTypes.typeMap) {
continue;
}
BType valueType = valueExpr.getType();
if (BTypes.isValueType(valueType) || NativeCastMapper.isCompatible(BTypes.typeJSON, valueType)) {
continue;
}
TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, valueExpr);
if (typeCastExpr == null) {
BLangExceptionHelper.throwSemanticError(initExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT,
valueExpr.getType(), BTypes.typeJSON);
}
argExprs[i] = typeCastExpr;
}
}
private void addDependentPkgInitCalls(List<BallerinaFunction> initFunctionList,
BlockStmt.BlockStmtBuilder blockStmtBuilder, NodeLocation initFuncLocation) {
for (BallerinaFunction initFunc : initFunctionList) {
FunctionInvocationExpr funcIExpr = new FunctionInvocationExpr(initFuncLocation,
initFunc.getName(), null,
initFunc.getPackagePath(), new Expression[] {});
funcIExpr.setCallableUnit(initFunc);
FunctionInvocationStmt funcIStmt = new FunctionInvocationStmt(initFuncLocation, funcIExpr);
blockStmtBuilder.addStmt(funcIStmt);
}
}
}
|
Periodic tasks are added to the Task framework later.
|
public List<Task> showTask() {
List<Task> taskList = Lists.newArrayList();
taskList.addAll(manualTaskMap.values());
return taskList;
}
|
taskList.addAll(manualTaskMap.values());
|
public List<Task> showTask() {
List<Task> taskList = Lists.newArrayList();
taskList.addAll(manualTaskMap.values());
return taskList;
}
|
class TaskManager {
private static final Logger LOG = LogManager.getLogger(TaskManager.class);
public static final long TASK_EXISTS = -1L;
public static final long DUPLICATE_CREATE_TASK = -2L;
public static final long TASK_CREATE_TIMEOUT = -3L;
private final Map<Long, Task> manualTaskMap;
private final Map<String, Task> nameToTaskMap;
private final TaskRunManager taskRunManager;
private final ScheduledExecutorService dispatchScheduler = Executors.newScheduledThreadPool(1);
private final QueryableReentrantLock lock;
public TaskManager() {
manualTaskMap = Maps.newConcurrentMap();
nameToTaskMap = Maps.newConcurrentMap();
taskRunManager = new TaskRunManager();
lock = new QueryableReentrantLock(true);
dispatchScheduler.scheduleAtFixedRate(() -> {
if (!tryLock()) {
return;
}
try {
taskRunManager.checkRunningTaskRun();
taskRunManager.scheduledPendingTaskRun();
} catch (Exception ex) {
LOG.warn("failed to dispatch job.", ex);
} finally {
unlock();
}
}, 0, 1, TimeUnit.SECONDS);
}
public long createTask(Task task) {
if (!tryLock()) {
return TASK_CREATE_TIMEOUT;
}
try {
if (nameToTaskMap.containsKey(task.getName())) {
return TASK_EXISTS;
}
nameToTaskMap.put(task.getName(), task);
if (manualTaskMap.containsKey(task.getId())) {
return DUPLICATE_CREATE_TASK;
}
manualTaskMap.put(task.getId(), task);
return task.getId();
} finally {
unlock();
}
}
public String executeTask(String taskName) {
Task task = nameToTaskMap.get(taskName);
if (task == null) {
return null;
}
return taskRunManager.addTaskRun(TaskRunBuilder.newBuilder(task).build());
}
public void dropTask(String taskName) {
Task task = nameToTaskMap.get(taskName);
if (task == null) {
return;
}
nameToTaskMap.remove(taskName);
manualTaskMap.remove(task.getId());
}
private boolean tryLock() {
try {
if (!lock.tryLock(1, TimeUnit.SECONDS)) {
Thread owner = lock.getOwner();
if (owner != null) {
LOG.warn("materialized view lock is held by: {}", Util.dumpThread(owner, 50));
} else {
LOG.warn("materialized view lock owner is null");
}
return false;
}
return true;
} catch (InterruptedException e) {
LOG.warn("got exception while getting materialized view lock", e);
}
return lock.isHeldByCurrentThread();
}
private void unlock() {
this.lock.unlock();
}
public void replayCreateTask(Task task) {
createTask(task);
}
public void replayDropTask(String taskName) {
dropTask(taskName);
}
public TaskRunManager getTaskRunManager() {
return taskRunManager;
}
public ShowResultSet handleSubmitTaskStmt(SubmitTaskStmt submitTaskStmt) throws DdlException {
Task task = TaskBuilder.buildTask(submitTaskStmt, ConnectContext.get());
Long createResult = this.createTask(task);
String taskName = task.getName();
if (createResult < 0) {
if (createResult == TASK_EXISTS) {
throw new DdlException("Task " + taskName + " already exist.");
}
throw new DdlException("Failed to create Task: " + taskName + ", ErrorCode: " + createResult);
}
String queryId = this.executeTask(taskName);
ShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder();
builder.addColumn(new Column("TaskName", ScalarType.createVarchar(40)));
builder.addColumn(new Column("Status", ScalarType.createVarchar(10)));
List<String> item = ImmutableList.of(taskName, "Submitted");
List<List<String>> result = ImmutableList.of(item);
return new ShowResultSet(builder.build(), result);
}
}
|
class TaskManager {
private static final Logger LOG = LogManager.getLogger(TaskManager.class);
public static final long TASK_EXISTS = -1L;
public static final long DUPLICATE_CREATE_TASK = -2L;
public static final long GET_TASK_LOCK_FAILED = -3L;
private final Map<Long, Task> manualTaskMap;
private final Map<String, Task> nameToTaskMap;
private final TaskRunManager taskRunManager;
private final ScheduledExecutorService dispatchScheduler = Executors.newScheduledThreadPool(1);
private final QueryableReentrantLock lock;
public TaskManager() {
manualTaskMap = Maps.newConcurrentMap();
nameToTaskMap = Maps.newConcurrentMap();
taskRunManager = new TaskRunManager();
lock = new QueryableReentrantLock(true);
dispatchScheduler.scheduleAtFixedRate(() -> {
if (!tryLock()) {
return;
}
try {
taskRunManager.checkRunningTaskRun();
taskRunManager.scheduledPendingTaskRun();
} catch (Exception ex) {
LOG.warn("failed to dispatch job.", ex);
} finally {
unlock();
}
}, 0, 1, TimeUnit.SECONDS);
}
public long createTask(Task task) {
if (!tryLock()) {
return GET_TASK_LOCK_FAILED;
}
try {
if (nameToTaskMap.containsKey(task.getName())) {
return TASK_EXISTS;
}
nameToTaskMap.put(task.getName(), task);
if (manualTaskMap.containsKey(task.getId())) {
return DUPLICATE_CREATE_TASK;
}
manualTaskMap.put(task.getId(), task);
return task.getId();
} finally {
unlock();
}
}
public String executeTask(String taskName) {
Task task = nameToTaskMap.get(taskName);
if (task == null) {
return null;
}
return taskRunManager.addTaskRun(TaskRunBuilder.newBuilder(task).build());
}
public void dropTask(String taskName) {
Task task = nameToTaskMap.get(taskName);
if (task == null) {
return;
}
nameToTaskMap.remove(taskName);
manualTaskMap.remove(task.getId());
}
private boolean tryLock() {
try {
if (!lock.tryLock(1, TimeUnit.SECONDS)) {
Thread owner = lock.getOwner();
if (owner != null) {
LOG.warn("task lock is held by: {}", Util.dumpThread(owner, 50));
} else {
LOG.warn("task lock owner is null");
}
return false;
}
return true;
} catch (InterruptedException e) {
LOG.warn("got exception while getting task lock", e);
}
return lock.isHeldByCurrentThread();
}
private void unlock() {
this.lock.unlock();
}
public void replayCreateTask(Task task) {
createTask(task);
}
public void replayDropTask(String taskName) {
dropTask(taskName);
}
public TaskRunManager getTaskRunManager() {
return taskRunManager;
}
public ShowResultSet handleSubmitTaskStmt(SubmitTaskStmt submitTaskStmt) throws DdlException {
Task task = TaskBuilder.buildTask(submitTaskStmt, ConnectContext.get());
Long createResult = this.createTask(task);
String taskName = task.getName();
if (createResult < 0) {
if (createResult == TASK_EXISTS) {
throw new DdlException("Task " + taskName + " already exist.");
}
throw new DdlException("Failed to create Task: " + taskName + ", ErrorCode: " + createResult);
}
String queryId = this.executeTask(taskName);
ShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder();
builder.addColumn(new Column("TaskName", ScalarType.createVarchar(40)));
builder.addColumn(new Column("Status", ScalarType.createVarchar(10)));
List<String> item = ImmutableList.of(taskName, "Submitted");
List<List<String>> result = ImmutableList.of(item);
return new ShowResultSet(builder.build(), result);
}
}
|
Removes the use of a Supplier function to store the TableSchema and simply uses a class property to hold the TableSchema.
|
public T read(T reuse, Decoder in) throws IOException {
GenericRecord record = (GenericRecord) this.reader.read(reuse, in);
return parseFn.apply(new SchemaAndRecord(record, this.tableSchema));
}
|
return parseFn.apply(new SchemaAndRecord(record, this.tableSchema));
|
public T read(T reuse, Decoder in) throws IOException {
GenericRecord record = (GenericRecord) this.reader.read(reuse, in);
return parseFn.apply(new SchemaAndRecord(record, this.tableSchema));
}
|
class GenericDatumTransformer<T> implements DatumReader<T> {
private final SerializableFunction<SchemaAndRecord, T> parseFn;
private final TableSchema tableSchema;
private GenericDatumReader<T> reader;
private org.apache.avro.Schema writerSchema;
public GenericDatumTransformer(
SerializableFunction<SchemaAndRecord, T> parseFn,
String tableSchema,
org.apache.avro.Schema writer) {
this.parseFn = parseFn;
this.tableSchema = new TableSchemaFunction().apply(tableSchema);
this.writerSchema = writer;
this.reader = new GenericDatumReader<>(this.writerSchema);
}
public GenericDatumTransformer(
SerializableFunction<SchemaAndRecord, T> parseFn,
TableSchema tableSchema,
org.apache.avro.Schema writer) {
this.parseFn = parseFn;
this.tableSchema = tableSchema;
this.writerSchema = writer;
this.reader = new GenericDatumReader<>(this.writerSchema);
}
@Override
public void setSchema(org.apache.avro.Schema schema) {
if (this.writerSchema.equals(schema)) {
return;
}
this.writerSchema = schema;
this.reader = new GenericDatumReader<>(this.writerSchema);
}
@Override
}
|
class GenericDatumTransformer<T> implements DatumReader<T> {
private final SerializableFunction<SchemaAndRecord, T> parseFn;
private final TableSchema tableSchema;
private GenericDatumReader<T> reader;
private org.apache.avro.Schema writerSchema;
public GenericDatumTransformer(
SerializableFunction<SchemaAndRecord, T> parseFn,
TableSchema tableSchema,
org.apache.avro.Schema writer) {
this.parseFn = parseFn;
this.tableSchema = tableSchema;
this.writerSchema = writer;
this.reader = new GenericDatumReader<>(this.writerSchema);
}
@Override
public void setSchema(org.apache.avro.Schema schema) {
if (this.writerSchema.equals(schema)) {
return;
}
this.writerSchema = schema;
this.reader = new GenericDatumReader<>(this.writerSchema);
}
@Override
}
|
~~That said, we could make it configurable...~~ Actually, it would make more sense to allow the test to modify the `SmallRyeConfigBuilder`. This would be a more advanced feature compared to the `QuarkusComponentTest#configConverters()`. And it could be only used from the `QuarkusComponentTestExtensionBuilder`.
|
private void startContainer(ExtensionContext context, Lifecycle testInstanceLifecycle) throws Exception {
if (testInstanceLifecycle.equals(context.getTestInstanceLifecycle().orElse(Lifecycle.PER_METHOD))) {
Arc.initialize();
QuarkusComponentTestConfiguration configuration = context.getRoot().getStore(NAMESPACE)
.get(KEY_TEST_CLASS_CONFIG, QuarkusComponentTestConfiguration.class);
Optional<Method> testMethod = context.getTestMethod();
if (testMethod.isPresent()) {
configuration = configuration.update(testMethod.get());
}
ConfigProviderResolver oldConfigProviderResolver = ConfigProviderResolver.instance();
context.getRoot().getStore(NAMESPACE).put(KEY_OLD_CONFIG_PROVIDER_RESOLVER, oldConfigProviderResolver);
SmallRyeConfigProviderResolver smallRyeConfigProviderResolver = new SmallRyeConfigProviderResolver();
ConfigProviderResolver.setInstance(smallRyeConfigProviderResolver);
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
SmallRyeConfigBuilder configBuilder = new SmallRyeConfigBuilder().forClassLoader(tccl)
.addDefaultInterceptors()
.withConverters(configuration.configConverters.toArray(new Converter<?>[] {}))
.addDefaultSources()
.withSources(new ApplicationPropertiesConfigSourceLoader.InFileSystem())
.withSources(new ApplicationPropertiesConfigSourceLoader.InClassPath())
.withSources(
new QuarkusComponentTestConfigSource(configuration.configProperties,
configuration.configSourceOrdinal));
@SuppressWarnings("unchecked")
Set<ConfigClassWithPrefix> configMappings = context.getRoot().getStore(NAMESPACE).get(KEY_CONFIG_MAPPINGS,
Set.class);
if (configMappings != null) {
for (ConfigClassWithPrefix mapping : configMappings) {
configBuilder.withMapping(mapping.getKlass(), mapping.getPrefix());
}
}
SmallRyeConfig config = configBuilder.build();
smallRyeConfigProviderResolver.registerConfig(config, tccl);
context.getRoot().getStore(NAMESPACE).put(KEY_CONFIG, config);
ConfigBeanCreator.setClassLoader(tccl);
Object testInstance = context.getRequiredTestInstance();
context.getRoot().getStore(NAMESPACE).put(KEY_INJECTED_FIELDS,
injectFields(context.getRequiredTestClass(), testInstance));
context.getRoot().getStore(NAMESPACE).put(KEY_INJECTED_PARAMS, new CopyOnWriteArrayList<>());
}
}
|
.addDefaultInterceptors()
|
private void startContainer(ExtensionContext context, Lifecycle testInstanceLifecycle) throws Exception {
if (testInstanceLifecycle.equals(context.getTestInstanceLifecycle().orElse(Lifecycle.PER_METHOD))) {
Arc.initialize();
QuarkusComponentTestConfiguration configuration = store(context).get(KEY_TEST_CLASS_CONFIG,
QuarkusComponentTestConfiguration.class);
Optional<Method> testMethod = context.getTestMethod();
if (testMethod.isPresent()) {
configuration = configuration.update(testMethod.get());
}
ConfigProviderResolver oldConfigProviderResolver = ConfigProviderResolver.instance();
store(context).put(KEY_OLD_CONFIG_PROVIDER_RESOLVER, oldConfigProviderResolver);
SmallRyeConfigProviderResolver smallRyeConfigProviderResolver = new SmallRyeConfigProviderResolver();
ConfigProviderResolver.setInstance(smallRyeConfigProviderResolver);
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
SmallRyeConfigBuilder configBuilder = new SmallRyeConfigBuilder().forClassLoader(tccl)
.addDefaultInterceptors()
.withConverters(configuration.configConverters.toArray(new Converter<?>[] {}))
.addDefaultSources()
.withSources(new ApplicationPropertiesConfigSourceLoader.InFileSystem())
.withSources(new ApplicationPropertiesConfigSourceLoader.InClassPath())
.withSources(
new QuarkusComponentTestConfigSource(configuration.configProperties,
configuration.configSourceOrdinal));
@SuppressWarnings("unchecked")
Set<ConfigClassWithPrefix> configMappings = store(context).get(KEY_CONFIG_MAPPINGS, Set.class);
if (configMappings != null) {
for (ConfigClassWithPrefix mapping : configMappings) {
configBuilder.withMapping(mapping.getKlass(), mapping.getPrefix());
}
}
if (configuration.configBuilderCustomizer != null) {
configuration.configBuilderCustomizer.accept(configBuilder);
}
SmallRyeConfig config = configBuilder.build();
smallRyeConfigProviderResolver.registerConfig(config, tccl);
store(context).put(KEY_CONFIG, config);
ConfigBeanCreator.setClassLoader(tccl);
Object testInstance = context.getRequiredTestInstance();
store(context).put(KEY_INJECTED_FIELDS, injectFields(context.getRequiredTestClass(), testInstance));
store(context).put(KEY_INJECTED_PARAMS, new CopyOnWriteArrayList<>());
}
}
|
class QuarkusComponentTestExtension
implements BeforeAllCallback, AfterAllCallback, BeforeEachCallback, AfterEachCallback, TestInstancePostProcessor,
ParameterResolver {
public static QuarkusComponentTestExtensionBuilder builder() {
return new QuarkusComponentTestExtensionBuilder();
}
private static final Logger LOG = Logger.getLogger(QuarkusComponentTestExtension.class);
private static final ExtensionContext.Namespace NAMESPACE = ExtensionContext.Namespace
.create(QuarkusComponentTestExtension.class);
private static final String KEY_OLD_TCCL = "oldTccl";
private static final String KEY_OLD_CONFIG_PROVIDER_RESOLVER = "oldConfigProviderResolver";
private static final String KEY_GENERATED_RESOURCES = "generatedResources";
private static final String KEY_INJECTED_FIELDS = "injectedFields";
private static final String KEY_INJECTED_PARAMS = "injectedParams";
private static final String KEY_TEST_INSTANCE = "testInstance";
private static final String KEY_CONFIG = "config";
private static final String KEY_TEST_CLASS_CONFIG = "testClassConfig";
private static final String KEY_CONFIG_MAPPINGS = "configMappings";
private static final String QUARKUS_TEST_COMPONENT_OUTPUT_DIRECTORY = "quarkus.test.component.output-directory";
private final QuarkusComponentTestConfiguration baseConfiguration;
public QuarkusComponentTestExtension() {
this(QuarkusComponentTestConfiguration.DEFAULT);
}
/**
* The initial set of components under test is derived from the test class. The types of all fields annotated with
* {@link jakarta.inject.Inject} are considered the component types.
*
* @param additionalComponentClasses
*/
public QuarkusComponentTestExtension(Class<?>... additionalComponentClasses) {
this(new QuarkusComponentTestConfiguration(Map.of(), List.of(additionalComponentClasses),
List.of(), false, true, QuarkusComponentTestExtensionBuilder.DEFAULT_CONFIG_SOURCE_ORDINAL,
List.of(), List.of()));
}
QuarkusComponentTestExtension(QuarkusComponentTestConfiguration baseConfiguration) {
this.baseConfiguration = baseConfiguration;
}
@Override
public void beforeAll(ExtensionContext context) throws Exception {
long start = System.nanoTime();
buildContainer(context);
startContainer(context, Lifecycle.PER_CLASS);
LOG.debugf("beforeAll: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
@Override
public void afterAll(ExtensionContext context) throws Exception {
long start = System.nanoTime();
stopContainer(context, Lifecycle.PER_CLASS);
cleanup(context);
LOG.debugf("afterAll: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
@Override
public void beforeEach(ExtensionContext context) throws Exception {
long start = System.nanoTime();
startContainer(context, Lifecycle.PER_METHOD);
Arc.container().requestContext().activate();
LOG.debugf("beforeEach: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
@Override
public void afterEach(ExtensionContext context) throws Exception {
long start = System.nanoTime();
Arc.container().requestContext().terminate();
destroyDependentTestMethodParams(context);
stopContainer(context, Lifecycle.PER_METHOD);
LOG.debugf("afterEach: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
@Override
public void postProcessTestInstance(Object testInstance, ExtensionContext context) throws Exception {
long start = System.nanoTime();
context.getRoot().getStore(NAMESPACE).put(KEY_TEST_INSTANCE, testInstance);
LOG.debugf("postProcessTestInstance: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
static final Predicate<Parameter> BUILTIN_PARAMETER = new Predicate<Parameter>() {
@Override
public boolean test(Parameter parameter) {
if (parameter.isAnnotationPresent(TempDir.class)) {
return true;
}
java.lang.reflect.Type type = parameter.getParameterizedType();
return type.equals(TestInfo.class) || type.equals(RepetitionInfo.class) || type.equals(TestReporter.class);
}
};
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
if (
parameterContext.getTarget().isPresent()
&& isTestMethod(parameterContext.getDeclaringExecutable())
&& !parameterContext.isAnnotated(SkipInject.class)
&& !parameterContext.getDeclaringExecutable().isAnnotationPresent(SkipInject.class)
&& !parameterContext.isAnnotated(Mock.class)
&& !BUILTIN_PARAMETER.test(parameterContext.getParameter())) {
BeanManager beanManager = Arc.container().beanManager();
java.lang.reflect.Type requiredType = parameterContext.getParameter().getParameterizedType();
Annotation[] qualifiers = getQualifiers(parameterContext.getAnnotatedElement(), beanManager);
if (qualifiers.length > 0 && Arrays.stream(qualifiers).anyMatch(All.Literal.INSTANCE::equals)) {
if (isListRequiredType(requiredType)) {
return true;
} else {
throw new IllegalStateException("Invalid injection point type: " + parameterContext.getParameter());
}
} else {
try {
Bean<?> bean = beanManager.resolve(beanManager.getBeans(requiredType, qualifiers));
if (bean == null) {
String msg = String.format("No matching bean found for the type [%s] and qualifiers %s",
requiredType, Arrays.toString(qualifiers));
if (parameterContext.isAnnotated(InjectMock.class) || qualifiers.length > 0) {
throw new IllegalStateException(msg);
} else {
LOG.info(msg + " - consider annotating the parameter with @SkipInject");
return false;
}
}
return true;
} catch (AmbiguousResolutionException e) {
String msg = String.format(
"Multiple matching beans found for the type [%s] and qualifiers %s\n\t- if this parameter should not be resolved by CDI then use @SkipInject\n\t- found beans: %s",
requiredType, Arrays.toString(qualifiers), e.getMessage());
throw new IllegalStateException(msg);
}
}
}
return false;
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext context)
throws ParameterResolutionException {
@SuppressWarnings("unchecked")
List<InstanceHandle<?>> injectedParams = context.getRoot().getStore(NAMESPACE).get(KEY_INJECTED_PARAMS, List.class);
ArcContainer container = Arc.container();
BeanManager beanManager = container.beanManager();
java.lang.reflect.Type requiredType = parameterContext.getParameter().getParameterizedType();
Annotation[] qualifiers = getQualifiers(parameterContext.getAnnotatedElement(), beanManager);
if (qualifiers.length > 0 && Arrays.stream(qualifiers).anyMatch(All.Literal.INSTANCE::equals)) {
return handleListAll(requiredType, qualifiers, container, injectedParams);
} else {
InstanceHandle<?> handle = container.instance(requiredType, qualifiers);
injectedParams.add(handle);
return handle.get();
}
}
private void destroyDependentTestMethodParams(ExtensionContext context) {
@SuppressWarnings("unchecked")
List<InstanceHandle<?>> injectedParams = context.getRoot().getStore(NAMESPACE).get(KEY_INJECTED_PARAMS, List.class);
for (InstanceHandle<?> handle : injectedParams) {
if (handle.getBean() != null && handle.getBean().getScope().equals(Dependent.class)) {
try {
handle.destroy();
} catch (Exception e) {
LOG.errorf(e, "Unable to destroy the injected %s", handle.getBean());
}
}
}
injectedParams.clear();
}
private void buildContainer(ExtensionContext context) {
QuarkusComponentTestConfiguration testClassConfiguration = baseConfiguration
.update(context.getRequiredTestClass());
context.getRoot().getStore(NAMESPACE).put(KEY_TEST_CLASS_CONFIG, testClassConfiguration);
ClassLoader oldTccl = initArcContainer(context, testClassConfiguration);
context.getRoot().getStore(NAMESPACE).put(KEY_OLD_TCCL, oldTccl);
}
@SuppressWarnings("unchecked")
private void cleanup(ExtensionContext context) {
ClassLoader oldTccl = context.getRoot().getStore(NAMESPACE).get(KEY_OLD_TCCL, ClassLoader.class);
Thread.currentThread().setContextClassLoader(oldTccl);
context.getRoot().getStore(NAMESPACE).remove(KEY_CONFIG_MAPPINGS);
Set<Path> generatedResources = context.getRoot().getStore(NAMESPACE).get(KEY_GENERATED_RESOURCES, Set.class);
for (Path path : generatedResources) {
try {
LOG.debugf("Delete generated %s", path);
Files.deleteIfExists(path);
} catch (IOException e) {
LOG.errorf("Unable to delete the generated resource %s: ", path, e.getMessage());
}
}
}
@SuppressWarnings("unchecked")
private void stopContainer(ExtensionContext context, Lifecycle testInstanceLifecycle) throws Exception {
if (testInstanceLifecycle.equals(context.getTestInstanceLifecycle().orElse(Lifecycle.PER_METHOD))) {
for (FieldInjector fieldInjector : (List<FieldInjector>) context.getRoot().getStore(NAMESPACE)
.get(KEY_INJECTED_FIELDS, List.class)) {
fieldInjector.unset(context.getRequiredTestInstance());
}
try {
Arc.shutdown();
} catch (Exception e) {
LOG.error("An error occured during ArC shutdown: " + e);
}
MockBeanCreator.clear();
ConfigBeanCreator.clear();
InterceptorMethodCreator.clear();
SmallRyeConfig config = context.getRoot().getStore(NAMESPACE).get(KEY_CONFIG, SmallRyeConfig.class);
ConfigProviderResolver.instance().releaseConfig(config);
ConfigProviderResolver
.setInstance(context.getRoot().getStore(NAMESPACE).get(KEY_OLD_CONFIG_PROVIDER_RESOLVER,
ConfigProviderResolver.class));
}
}
private BeanRegistrar registrarForMock(MockBeanConfiguratorImpl<?> mock) {
return new BeanRegistrar() {
@Override
public void register(RegistrationContext context) {
BeanConfigurator<Object> configurator = context.configure(mock.beanClass);
configurator.scope(mock.scope);
mock.jandexTypes().forEach(configurator::addType);
mock.jandexQualifiers().forEach(configurator::addQualifier);
if (mock.name != null) {
configurator.name(mock.name);
}
configurator.alternative(mock.alternative);
if (mock.priority != null) {
configurator.priority(mock.priority);
}
if (mock.defaultBean) {
configurator.defaultBean();
}
String key = UUID.randomUUID().toString();
MockBeanCreator.registerCreate(key, cast(mock.create));
configurator.creator(MockBeanCreator.class).param(MockBeanCreator.CREATE_KEY, key).done();
}
};
}
private static Annotation[] getQualifiers(AnnotatedElement element, BeanManager beanManager) {
List<Annotation> ret = new ArrayList<>();
Annotation[] annotations = element.getDeclaredAnnotations();
for (Annotation fieldAnnotation : annotations) {
if (beanManager.isQualifier(fieldAnnotation.annotationType())) {
ret.add(fieldAnnotation);
}
}
return ret.toArray(new Annotation[0]);
}
private static Set<AnnotationInstance> getQualifiers(AnnotatedElement element, Collection<DotName> qualifiers) {
Set<AnnotationInstance> ret = new HashSet<>();
Annotation[] annotations = element.getDeclaredAnnotations();
for (Annotation annotation : annotations) {
if (qualifiers.contains(DotName.createSimple(annotation.annotationType()))) {
ret.add(Annotations.jandexAnnotation(annotation));
}
}
return ret;
}
private ClassLoader initArcContainer(ExtensionContext extensionContext, QuarkusComponentTestConfiguration configuration) {
if (configuration.componentClasses.isEmpty()) {
throw new IllegalStateException("No component classes to test");
}
try {
Arc.shutdown();
} catch (Exception e) {
throw new IllegalStateException("An error occured during ArC shutdown: " + e);
}
IndexView index;
try {
Indexer indexer = new Indexer();
for (Class<?> componentClass : configuration.componentClasses) {
indexComponentClass(indexer, componentClass);
}
indexer.indexClass(ConfigProperty.class);
index = BeanArchives.buildImmutableBeanArchiveIndex(indexer.complete());
} catch (IOException e) {
throw new IllegalStateException("Failed to create index", e);
}
Class<?> testClass = extensionContext.getRequiredTestClass();
ClassLoader testClassClassLoader = testClass.getClassLoader();
boolean isContinuousTesting = testClassClassLoader instanceof QuarkusClassLoader;
ClassLoader oldTccl = Thread.currentThread().getContextClassLoader();
IndexView computingIndex = BeanArchives.buildComputingBeanArchiveIndex(oldTccl,
new ConcurrentHashMap<>(), index);
try {
List<DotName> qualifiers = new ArrayList<>();
Set<String> interceptorBindings = new HashSet<>();
AtomicReference<BeanResolver> beanResolver = new AtomicReference<>();
List<Field> injectFields = findInjectFields(testClass);
List<Parameter> injectParams = findInjectParams(testClass);
BeanProcessor.Builder builder = BeanProcessor.builder()
.setName(testClass.getName().replace('.', '_'))
.addRemovalExclusion(b -> {
if (b.getTarget().isPresent()
&& b.getTarget().get().hasDeclaredAnnotation(Unremovable.class)) {
return true;
}
for (Field injectionPoint : injectFields) {
if (beanResolver.get().matches(b, Types.jandexType(injectionPoint.getGenericType()),
getQualifiers(injectionPoint, qualifiers))) {
return true;
}
}
for (Parameter param : injectParams) {
if (beanResolver.get().matches(b, Types.jandexType(param.getParameterizedType()),
getQualifiers(param, qualifiers))) {
return true;
}
}
return false;
})
.setImmutableBeanArchiveIndex(index)
.setComputingBeanArchiveIndex(computingIndex)
.setRemoveUnusedBeans(true);
Set<Path> generatedResources;
File componentsProviderFile = getComponentsProviderFile(testClass);
if (isContinuousTesting) {
generatedResources = Set.of();
Map<String, byte[]> classes = new HashMap<>();
builder.setOutput(new ResourceOutput() {
@Override
public void writeResource(Resource resource) throws IOException {
switch (resource.getType()) {
case JAVA_CLASS:
classes.put(resource.getName() + ".class", resource.getData());
((QuarkusClassLoader) testClass.getClassLoader()).reset(classes, Map.of());
break;
case SERVICE_PROVIDER:
if (resource.getName()
.endsWith(ComponentsProvider.class.getName())) {
componentsProviderFile.getParentFile()
.mkdirs();
try (FileOutputStream out = new FileOutputStream(componentsProviderFile)) {
out.write(resource.getData());
}
}
break;
default:
throw new IllegalArgumentException("Unsupported resource type: " + resource.getType());
}
}
});
} else {
generatedResources = new HashSet<>();
File testOutputDirectory = getTestOutputDirectory(testClass);
builder.setOutput(new ResourceOutput() {
@Override
public void writeResource(Resource resource) throws IOException {
switch (resource.getType()) {
case JAVA_CLASS:
generatedResources.add(resource.writeTo(testOutputDirectory).toPath());
break;
case SERVICE_PROVIDER:
if (resource.getName()
.endsWith(ComponentsProvider.class.getName())) {
componentsProviderFile.getParentFile()
.mkdirs();
try (FileOutputStream out = new FileOutputStream(componentsProviderFile)) {
out.write(resource.getData());
}
}
break;
default:
throw new IllegalArgumentException("Unsupported resource type: " + resource.getType());
}
}
});
}
extensionContext.getRoot().getStore(NAMESPACE).put(KEY_GENERATED_RESOURCES, generatedResources);
builder.addAnnotationTransformation(AnnotationsTransformer.appliedToField().whenContainsAny(qualifiers)
.whenContainsNone(DotName.createSimple(Inject.class)).thenTransform(t -> t.add(Inject.class)));
builder.addAnnotationTransformation(new JaxrsSingletonTransformer());
for (AnnotationsTransformer transformer : configuration.annotationsTransformers) {
builder.addAnnotationTransformation(transformer);
}
builder.addBeanRegistrar(new BeanRegistrar() {
@Override
public void register(RegistrationContext registrationContext) {
long start = System.nanoTime();
List<BeanInfo> beans = registrationContext.beans().collect();
BeanDeployment beanDeployment = registrationContext.get(Key.DEPLOYMENT);
Set<TypeAndQualifiers> unsatisfiedInjectionPoints = new HashSet<>();
boolean configInjectionPoint = false;
Set<TypeAndQualifiers> configPropertyInjectionPoints = new HashSet<>();
Map<String, Set<String>> prefixToConfigMappings = new HashMap<>();
DotName configDotName = DotName.createSimple(Config.class);
DotName configPropertyDotName = DotName.createSimple(ConfigProperty.class);
DotName configMappingDotName = DotName.createSimple(ConfigMapping.class);
for (InjectionPointInfo injectionPoint : registrationContext.getInjectionPoints()) {
if (injectionPoint.getRequiredType().name().equals(configDotName)
&& injectionPoint.hasDefaultedQualifier()) {
configInjectionPoint = true;
continue;
}
if (injectionPoint.getRequiredQualifier(configPropertyDotName) != null) {
configPropertyInjectionPoints.add(new TypeAndQualifiers(injectionPoint.getRequiredType(),
injectionPoint.getRequiredQualifiers()));
continue;
}
BuiltinBean builtin = BuiltinBean.resolve(injectionPoint);
if (builtin != null && builtin != BuiltinBean.INSTANCE && builtin != BuiltinBean.LIST) {
continue;
}
Type requiredType = injectionPoint.getRequiredType();
Set<AnnotationInstance> requiredQualifiers = injectionPoint.getRequiredQualifiers();
if (builtin == BuiltinBean.LIST) {
requiredType = requiredType.asParameterizedType().arguments().get(0);
requiredQualifiers = new HashSet<>(requiredQualifiers);
requiredQualifiers.removeIf(q -> q.name().equals(DotNames.ALL));
if (requiredQualifiers.isEmpty()) {
requiredQualifiers.add(AnnotationInstance.builder(DotNames.DEFAULT).build());
}
}
if (requiredType.kind() == Kind.CLASS) {
ClassInfo clazz = computingIndex.getClassByName(requiredType.name());
if (clazz != null && clazz.isInterface()) {
AnnotationInstance configMapping = clazz.declaredAnnotation(configMappingDotName);
if (configMapping != null) {
AnnotationValue prefixValue = configMapping.value("prefix");
String prefix = prefixValue == null ? "" : prefixValue.asString();
Set<String> mappingClasses = prefixToConfigMappings.computeIfAbsent(prefix,
k -> new HashSet<>());
mappingClasses.add(clazz.name().toString());
}
}
}
if (isSatisfied(requiredType, requiredQualifiers, injectionPoint, beans, beanDeployment,
configuration)) {
continue;
}
if (requiredType.kind() == Kind.PRIMITIVE || requiredType.kind() == Kind.ARRAY) {
throw new IllegalStateException(
"Found an unmockable unsatisfied injection point: " + injectionPoint.getTargetInfo());
}
unsatisfiedInjectionPoints.add(new TypeAndQualifiers(requiredType, requiredQualifiers));
LOG.debugf("Unsatisfied injection point found: %s", injectionPoint.getTargetInfo());
}
for (Field field : findFields(testClass, List.of(InjectMock.class))) {
Set<AnnotationInstance> requiredQualifiers = getQualifiers(field, qualifiers);
if (requiredQualifiers.isEmpty()) {
requiredQualifiers = Set.of(AnnotationInstance.builder(DotNames.DEFAULT).build());
}
unsatisfiedInjectionPoints
.add(new TypeAndQualifiers(Types.jandexType(field.getGenericType()), requiredQualifiers));
}
for (Parameter param : findInjectMockParams(testClass)) {
Set<AnnotationInstance> requiredQualifiers = getQualifiers(param, qualifiers);
if (requiredQualifiers.isEmpty()) {
requiredQualifiers = Set.of(AnnotationInstance.builder(DotNames.DEFAULT).build());
}
unsatisfiedInjectionPoints
.add(new TypeAndQualifiers(Types.jandexType(param.getParameterizedType()), requiredQualifiers));
}
for (TypeAndQualifiers unsatisfied : unsatisfiedInjectionPoints) {
ClassInfo implementationClass = computingIndex.getClassByName(unsatisfied.type.name());
BeanConfigurator<Object> configurator = registrationContext.configure(implementationClass.name())
.scope(Singleton.class)
.addType(unsatisfied.type);
unsatisfied.qualifiers.forEach(configurator::addQualifier);
configurator.param("implementationClass", implementationClass)
.creator(MockBeanCreator.class)
.defaultBean()
.identifier("dummy")
.done();
}
if (configInjectionPoint) {
registrationContext.configure(Config.class)
.addType(Config.class)
.creator(ConfigBeanCreator.class)
.done();
}
if (!configPropertyInjectionPoints.isEmpty()) {
BeanConfigurator<Object> configPropertyConfigurator = registrationContext.configure(Object.class)
.identifier("configProperty")
.addQualifier(ConfigProperty.class)
.param("useDefaultConfigProperties", configuration.useDefaultConfigProperties)
.addInjectionPoint(ClassType.create(InjectionPoint.class))
.creator(ConfigPropertyBeanCreator.class);
for (TypeAndQualifiers configPropertyInjectionPoint : configPropertyInjectionPoints) {
configPropertyConfigurator.addType(configPropertyInjectionPoint.type);
}
configPropertyConfigurator.done();
}
if (!prefixToConfigMappings.isEmpty()) {
Set<ConfigClassWithPrefix> configMappings = new HashSet<>();
for (Entry<String, Set<String>> e : prefixToConfigMappings.entrySet()) {
for (String mapping : e.getValue()) {
DotName mappingName = DotName.createSimple(mapping);
registrationContext.configure(mappingName)
.addType(mappingName)
.creator(ConfigMappingBeanCreator.class)
.param("mappingClass", mapping)
.param("prefix", e.getKey())
.done();
configMappings.add(ConfigClassWithPrefix
.configClassWithPrefix(ConfigMappingBeanCreator.tryLoad(mapping), e.getKey()));
}
}
extensionContext.getRoot().getStore(NAMESPACE).put(KEY_CONFIG_MAPPINGS, configMappings);
}
LOG.debugf("Test injection points analyzed in %s ms [found: %s, mocked: %s]",
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start),
registrationContext.getInjectionPoints().size(),
unsatisfiedInjectionPoints.size());
processTestInterceptorMethods(testClass, extensionContext, registrationContext, interceptorBindings);
}
});
for (MockBeanConfiguratorImpl<?> mockConfigurator : configuration.mockConfigurators) {
builder.addBeanRegistrar(registrarForMock(mockConfigurator));
}
BeanProcessor beanProcessor = builder.build();
try {
Consumer<BytecodeTransformer> unsupportedBytecodeTransformer = new Consumer<BytecodeTransformer>() {
@Override
public void accept(BytecodeTransformer transformer) {
throw new UnsupportedOperationException();
}
};
ContextRegistrar.RegistrationContext registrationContext = beanProcessor.registerCustomContexts();
qualifiers.addAll(registrationContext.get(Key.QUALIFIERS).keySet());
for (DotName binding : registrationContext.get(Key.INTERCEPTOR_BINDINGS).keySet()) {
interceptorBindings.add(binding.toString());
}
beanResolver.set(registrationContext.get(Key.DEPLOYMENT).getBeanResolver());
beanProcessor.registerScopes();
beanProcessor.registerBeans();
beanProcessor.getBeanDeployment().initBeanByTypeMap();
beanProcessor.registerSyntheticObservers();
beanProcessor.initialize(unsupportedBytecodeTransformer, Collections.emptyList());
ValidationContext validationContext = beanProcessor.validate(unsupportedBytecodeTransformer);
beanProcessor.processValidationErrors(validationContext);
ExecutorService executor = Executors.newCachedThreadPool();
beanProcessor.generateResources(null, new HashSet<>(), unsupportedBytecodeTransformer, true, executor);
executor.shutdown();
} catch (IOException e) {
throw new IllegalStateException("Error generating resources", e);
}
QuarkusComponentTestClassLoader testClassLoader = new QuarkusComponentTestClassLoader(
isContinuousTesting ? testClassClassLoader : oldTccl,
componentsProviderFile,
null);
Thread.currentThread().setContextClassLoader(testClassLoader);
} catch (Throwable e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new RuntimeException(e);
}
}
return oldTccl;
}
private void processTestInterceptorMethods(Class<?> testClass, ExtensionContext extensionContext,
BeanRegistrar.RegistrationContext registrationContext, Set<String> interceptorBindings) {
List<Class<? extends Annotation>> annotations = List.of(AroundInvoke.class, PostConstruct.class, PreDestroy.class,
AroundConstruct.class);
Predicate<Method> predicate = m -> {
for (Class<? extends Annotation> annotation : annotations) {
if (m.isAnnotationPresent(annotation)) {
return true;
}
}
return false;
};
for (Method method : findMethods(testClass, predicate)) {
Set<Annotation> bindings = findBindings(method, interceptorBindings);
if (bindings.isEmpty()) {
throw new IllegalStateException("No bindings declared on a test interceptor method: " + method);
}
validateTestInterceptorMethod(method);
String key = UUID.randomUUID().toString();
InterceptorMethodCreator.registerCreate(key, ctx -> {
return ic -> {
Object instance = null;
if (!Modifier.isStatic(method.getModifiers())) {
Object testInstance = extensionContext.getRoot().getStore(NAMESPACE).get(KEY_TEST_INSTANCE);
if (testInstance == null) {
throw new IllegalStateException("Test instance not available");
}
instance = testInstance;
if (!method.canAccess(instance)) {
method.setAccessible(true);
}
}
return method.invoke(instance, ic);
};
});
InterceptionType interceptionType;
if (method.isAnnotationPresent(AroundInvoke.class)) {
interceptionType = InterceptionType.AROUND_INVOKE;
} else if (method.isAnnotationPresent(PostConstruct.class)) {
interceptionType = InterceptionType.POST_CONSTRUCT;
} else if (method.isAnnotationPresent(PreDestroy.class)) {
interceptionType = InterceptionType.PRE_DESTROY;
} else if (method.isAnnotationPresent(AroundConstruct.class)) {
interceptionType = InterceptionType.AROUND_CONSTRUCT;
} else {
throw new IllegalStateException("No interceptor annotation declared on: " + method);
}
int priority = 1;
Priority priorityAnnotation = method.getAnnotation(Priority.class);
if (priorityAnnotation != null) {
priority = priorityAnnotation.value();
}
registrationContext.configureInterceptor(interceptionType)
.identifier(key)
.priority(priority)
.bindings(bindings.stream().map(Annotations::jandexAnnotation)
.toArray(AnnotationInstance[]::new))
.param(InterceptorMethodCreator.CREATE_KEY, key)
.creator(InterceptorMethodCreator.class);
}
}
private void validateTestInterceptorMethod(Method method) {
Parameter[] params = method.getParameters();
if (params.length != 1 || !InvocationContext.class.isAssignableFrom(params[0].getType())) {
throw new IllegalStateException("A test interceptor method must declare exactly one InvocationContext parameter:"
+ Arrays.toString(params));
}
}
private Set<Annotation> findBindings(Method method, Set<String> bindings) {
return Arrays.stream(method.getAnnotations()).filter(a -> bindings.contains(a.annotationType().getName()))
.collect(Collectors.toSet());
}
private void indexComponentClass(Indexer indexer, Class<?> componentClass) {
try {
while (componentClass != null) {
indexer.indexClass(componentClass);
for (Annotation annotation : componentClass.getAnnotations()) {
indexer.indexClass(annotation.annotationType());
}
for (Field field : componentClass.getDeclaredFields()) {
indexAnnotatedElement(indexer, field);
}
for (Method method : componentClass.getDeclaredMethods()) {
indexAnnotatedElement(indexer, method);
for (Parameter param : method.getParameters()) {
indexAnnotatedElement(indexer, param);
}
}
for (Class<?> iface : componentClass.getInterfaces()) {
indexComponentClass(indexer, iface);
}
componentClass = componentClass.getSuperclass();
}
} catch (IOException e) {
throw new IllegalStateException("Failed to index:" + componentClass, e);
}
}
private void indexAnnotatedElement(Indexer indexer, AnnotatedElement element) throws IOException {
for (Annotation annotation : element.getAnnotations()) {
indexer.indexClass(annotation.annotationType());
}
}
private boolean isSatisfied(Type requiredType, Set<AnnotationInstance> qualifiers, InjectionPointInfo injectionPoint,
Iterable<BeanInfo> beans, BeanDeployment beanDeployment, QuarkusComponentTestConfiguration configuration) {
for (BeanInfo bean : beans) {
if (Beans.matches(bean, requiredType, qualifiers)) {
LOG.debugf("Injection point %s satisfied by %s", injectionPoint.getTargetInfo(),
bean.toString());
return true;
}
}
for (MockBeanConfiguratorImpl<?> mock : configuration.mockConfigurators) {
if (mock.matches(beanDeployment.getBeanResolver(), requiredType, qualifiers)) {
LOG.debugf("Injection point %s satisfied by %s", injectionPoint.getTargetInfo(),
mock);
return true;
}
}
return false;
}
private String nameToPath(String name) {
return name.replace('.', File.separatorChar);
}
@SuppressWarnings("unchecked")
static <T> T cast(Object obj) {
return (T) obj;
}
private List<FieldInjector> injectFields(Class<?> testClass, Object testInstance) throws Exception {
List<FieldInjector> injectedFields = new ArrayList<>();
for (Field field : findInjectFields(testClass)) {
injectedFields.add(new FieldInjector(field, testInstance));
}
return injectedFields;
}
private List<Field> findInjectFields(Class<?> testClass) {
List<Class<? extends Annotation>> injectAnnotations;
Class<? extends Annotation> deprecatedInjectMock = loadDeprecatedInjectMock();
if (deprecatedInjectMock != null) {
injectAnnotations = List.of(Inject.class, InjectMock.class, deprecatedInjectMock);
} else {
injectAnnotations = List.of(Inject.class, InjectMock.class);
}
return findFields(testClass, injectAnnotations);
}
private List<Parameter> findInjectParams(Class<?> testClass) {
List<Method> testMethods = findMethods(testClass, QuarkusComponentTestExtension::isTestMethod);
List<Parameter> ret = new ArrayList<>();
for (Method method : testMethods) {
for (Parameter param : method.getParameters()) {
if (BUILTIN_PARAMETER.test(param)
|| param.isAnnotationPresent(SkipInject.class)) {
continue;
}
ret.add(param);
}
}
return ret;
}
private List<Parameter> findInjectMockParams(Class<?> testClass) {
List<Method> testMethods = findMethods(testClass, QuarkusComponentTestExtension::isTestMethod);
List<Parameter> ret = new ArrayList<>();
for (Method method : testMethods) {
for (Parameter param : method.getParameters()) {
if (param.isAnnotationPresent(InjectMock.class)
&& !BUILTIN_PARAMETER.test(param)) {
ret.add(param);
}
}
}
return ret;
}
static boolean isTestMethod(Executable method) {
return method.isAnnotationPresent(Test.class)
|| method.isAnnotationPresent(ParameterizedTest.class)
|| method.isAnnotationPresent(RepeatedTest.class);
}
private List<Field> findFields(Class<?> testClass, List<Class<? extends Annotation>> annotations) {
List<Field> fields = new ArrayList<>();
Class<?> current = testClass;
while (current.getSuperclass() != null) {
for (Field field : current.getDeclaredFields()) {
for (Class<? extends Annotation> annotation : annotations) {
if (field.isAnnotationPresent(annotation)) {
fields.add(field);
break;
}
}
}
current = current.getSuperclass();
}
return fields;
}
private List<Method> findMethods(Class<?> testClass, Predicate<Method> methodPredicate) {
List<Method> methods = new ArrayList<>();
Class<?> current = testClass;
while (current.getSuperclass() != null) {
for (Method method : current.getDeclaredMethods()) {
if (methodPredicate.test(method)) {
methods.add(method);
}
}
current = current.getSuperclass();
}
return methods;
}
static class FieldInjector {
private final Field field;
private final List<InstanceHandle<?>> unsetHandles;
public FieldInjector(Field field, Object testInstance) throws Exception {
this.field = field;
ArcContainer container = Arc.container();
BeanManager beanManager = container.beanManager();
java.lang.reflect.Type requiredType = field.getGenericType();
Annotation[] qualifiers = getQualifiers(field, beanManager);
Object injectedInstance;
if (qualifiers.length > 0 && Arrays.stream(qualifiers).anyMatch(All.Literal.INSTANCE::equals)) {
if (isListRequiredType(requiredType)) {
unsetHandles = new ArrayList<>();
injectedInstance = handleListAll(requiredType, qualifiers, container, unsetHandles);
} else {
throw new IllegalStateException("Invalid injection point type: " + field);
}
} else {
InstanceHandle<?> handle = container.instance(requiredType, qualifiers);
if (field.isAnnotationPresent(Inject.class)) {
if (!handle.isAvailable()) {
throw new IllegalStateException(String
.format("The injected field [%s] expects a real component; but no matching component was registered",
field,
handle.getBean()));
}
if (handle.getBean().getKind() == io.quarkus.arc.InjectableBean.Kind.SYNTHETIC) {
throw new IllegalStateException(String
.format("The injected field [%s] expects a real component; but obtained: %s", field,
handle.getBean()));
}
} else {
if (!handle.isAvailable()) {
throw new IllegalStateException(String
.format("The injected field [%s] expects a mocked bean; but obtained null", field));
}
if (handle.getBean().getKind() != io.quarkus.arc.InjectableBean.Kind.SYNTHETIC) {
throw new IllegalStateException(String
.format("The injected field [%s] expects a mocked bean; but obtained: %s", field,
handle.getBean()));
}
}
injectedInstance = handle.get();
unsetHandles = List.of(handle);
}
if (!field.canAccess(testInstance)) {
field.setAccessible(true);
}
field.set(testInstance, injectedInstance);
}
void unset(Object testInstance) throws Exception {
for (InstanceHandle<?> handle : unsetHandles) {
if (handle.getBean() != null && handle.getBean().getScope().equals(Dependent.class)) {
try {
handle.destroy();
} catch (Exception e) {
LOG.errorf(e, "Unable to destroy the injected %s", handle.getBean());
}
}
}
field.set(testInstance, null);
}
}
private static Object handleListAll(java.lang.reflect.Type requiredType, Annotation[] qualifiers, ArcContainer container,
Collection<InstanceHandle<?>> cleanupHandles) {
Set<Annotation> qualifiersSet = new HashSet<>();
Collections.addAll(qualifiersSet, qualifiers);
qualifiersSet.remove(All.Literal.INSTANCE);
if (qualifiersSet.isEmpty()) {
qualifiers = new Annotation[] { Default.Literal.INSTANCE };
} else {
qualifiers = qualifiersSet.toArray(new Annotation[] {});
}
List<InstanceHandle<Object>> handles = container.listAll(getListRequiredType(requiredType), qualifiers);
cleanupHandles.addAll(handles);
return isTypeArgumentInstanceHandle(requiredType) ? handles
: handles.stream().map(InstanceHandle::get).collect(Collectors.toUnmodifiableList());
}
@SuppressWarnings("unchecked")
private Class<? extends Annotation> loadDeprecatedInjectMock() {
try {
return (Class<? extends Annotation>) Class.forName("io.quarkus.test.junit.mockito.InjectMock");
} catch (Throwable e) {
return null;
}
}
private static boolean isListRequiredType(java.lang.reflect.Type type) {
if (type instanceof ParameterizedType) {
final ParameterizedType parameterizedType = (ParameterizedType) type;
return List.class.equals(parameterizedType.getRawType());
}
return false;
}
private static java.lang.reflect.Type getListRequiredType(java.lang.reflect.Type requiredType) {
if (requiredType instanceof ParameterizedType) {
final ParameterizedType parameterizedType = (ParameterizedType) requiredType;
if (List.class.equals(parameterizedType.getRawType())) {
return parameterizedType.getActualTypeArguments()[0];
}
}
return null;
}
private static boolean isTypeArgumentInstanceHandle(java.lang.reflect.Type type) {
java.lang.reflect.Type typeArgument = ((ParameterizedType) type).getActualTypeArguments()[0];
if (typeArgument instanceof ParameterizedType) {
return ((ParameterizedType) typeArgument).getRawType().equals(InstanceHandle.class);
}
return false;
}
private File getTestOutputDirectory(Class<?> testClass) {
String outputDirectory = System.getProperty(QUARKUS_TEST_COMPONENT_OUTPUT_DIRECTORY);
File testOutputDirectory;
if (outputDirectory != null) {
testOutputDirectory = new File(outputDirectory);
} else {
String testClassResourceName = fromClassNameToResourceName(testClass.getName());
String testPath = testClass.getClassLoader().getResource(testClassResourceName).toString();
String testClassesRootPath = testPath.substring(0, testPath.length() - testClassResourceName.length() - 1);
testOutputDirectory = new File(URI.create(testClassesRootPath));
}
if (!testOutputDirectory.canWrite()) {
throw new IllegalStateException("Invalid test output directory: " + testOutputDirectory);
}
return testOutputDirectory;
}
private File getComponentsProviderFile(Class<?> testClass) {
File generatedSourcesDirectory;
File targetDir = new File("target");
if (targetDir.canWrite()) {
generatedSourcesDirectory = new File(targetDir, "generated-arc-sources");
} else {
File buildDir = new File("build");
if (buildDir.canWrite()) {
generatedSourcesDirectory = new File(buildDir, "generated-arc-sources");
} else {
generatedSourcesDirectory = new File("quarkus-component-test/generated-arc-sources");
}
}
return new File(new File(generatedSourcesDirectory, nameToPath(testClass.getPackage().getName())),
ComponentsProvider.class.getSimpleName());
}
}
|
class QuarkusComponentTestExtension
implements BeforeAllCallback, AfterAllCallback, BeforeEachCallback, AfterEachCallback, TestInstancePostProcessor,
ParameterResolver {
public static QuarkusComponentTestExtensionBuilder builder() {
return new QuarkusComponentTestExtensionBuilder();
}
private static final Logger LOG = Logger.getLogger(QuarkusComponentTestExtension.class);
private static final ExtensionContext.Namespace NAMESPACE = ExtensionContext.Namespace
.create(QuarkusComponentTestExtension.class);
private static final String KEY_OLD_TCCL = "oldTccl";
private static final String KEY_OLD_CONFIG_PROVIDER_RESOLVER = "oldConfigProviderResolver";
private static final String KEY_GENERATED_RESOURCES = "generatedResources";
private static final String KEY_INJECTED_FIELDS = "injectedFields";
private static final String KEY_INJECTED_PARAMS = "injectedParams";
private static final String KEY_TEST_INSTANCE = "testInstance";
private static final String KEY_CONFIG = "config";
private static final String KEY_TEST_CLASS_CONFIG = "testClassConfig";
private static final String KEY_CONFIG_MAPPINGS = "configMappings";
private static final String QUARKUS_TEST_COMPONENT_OUTPUT_DIRECTORY = "quarkus.test.component.output-directory";
private final QuarkusComponentTestConfiguration baseConfiguration;
public QuarkusComponentTestExtension() {
this(QuarkusComponentTestConfiguration.DEFAULT);
}
/**
* The initial set of components under test is derived from the test class. The types of all fields annotated with
* {@link jakarta.inject.Inject} are considered the component types.
*
* @param additionalComponentClasses
*/
public QuarkusComponentTestExtension(Class<?>... additionalComponentClasses) {
this(new QuarkusComponentTestConfiguration(Map.of(), List.of(additionalComponentClasses),
List.of(), false, true, QuarkusComponentTestExtensionBuilder.DEFAULT_CONFIG_SOURCE_ORDINAL,
List.of(), List.of(), null));
}
QuarkusComponentTestExtension(QuarkusComponentTestConfiguration baseConfiguration) {
this.baseConfiguration = baseConfiguration;
}
@Override
public void beforeAll(ExtensionContext context) throws Exception {
long start = System.nanoTime();
buildContainer(context);
startContainer(context, Lifecycle.PER_CLASS);
LOG.debugf("beforeAll: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
@Override
public void afterAll(ExtensionContext context) throws Exception {
long start = System.nanoTime();
stopContainer(context, Lifecycle.PER_CLASS);
cleanup(context);
LOG.debugf("afterAll: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
@Override
public void beforeEach(ExtensionContext context) throws Exception {
long start = System.nanoTime();
startContainer(context, Lifecycle.PER_METHOD);
Arc.container().requestContext().activate();
LOG.debugf("beforeEach: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
@Override
public void afterEach(ExtensionContext context) throws Exception {
long start = System.nanoTime();
Arc.container().requestContext().terminate();
destroyDependentTestMethodParams(context);
stopContainer(context, Lifecycle.PER_METHOD);
LOG.debugf("afterEach: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
@Override
public void postProcessTestInstance(Object testInstance, ExtensionContext context) throws Exception {
long start = System.nanoTime();
store(context).put(KEY_TEST_INSTANCE, testInstance);
LOG.debugf("postProcessTestInstance: %s ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
static final Predicate<Parameter> BUILTIN_PARAMETER = new Predicate<Parameter>() {
@Override
public boolean test(Parameter parameter) {
if (parameter.isAnnotationPresent(TempDir.class)) {
return true;
}
java.lang.reflect.Type type = parameter.getParameterizedType();
return type.equals(TestInfo.class) || type.equals(RepetitionInfo.class) || type.equals(TestReporter.class);
}
};
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
if (
parameterContext.getTarget().isPresent()
&& isTestMethod(parameterContext.getDeclaringExecutable())
&& !parameterContext.isAnnotated(SkipInject.class)
&& !parameterContext.getDeclaringExecutable().isAnnotationPresent(SkipInject.class)
&& !parameterContext.isAnnotated(Mock.class)
&& !BUILTIN_PARAMETER.test(parameterContext.getParameter())) {
BeanManager beanManager = Arc.container().beanManager();
java.lang.reflect.Type requiredType = parameterContext.getParameter().getParameterizedType();
Annotation[] qualifiers = getQualifiers(parameterContext.getAnnotatedElement(), beanManager);
if (qualifiers.length > 0 && Arrays.stream(qualifiers).anyMatch(All.Literal.INSTANCE::equals)) {
if (isListRequiredType(requiredType)) {
return true;
} else {
throw new IllegalStateException("Invalid injection point type: " + parameterContext.getParameter());
}
} else {
try {
Bean<?> bean = beanManager.resolve(beanManager.getBeans(requiredType, qualifiers));
if (bean == null) {
String msg = String.format("No matching bean found for the type [%s] and qualifiers %s",
requiredType, Arrays.toString(qualifiers));
if (parameterContext.isAnnotated(InjectMock.class) || qualifiers.length > 0) {
throw new IllegalStateException(msg);
} else {
LOG.info(msg + " - consider annotating the parameter with @SkipInject");
return false;
}
}
return true;
} catch (AmbiguousResolutionException e) {
String msg = String.format(
"Multiple matching beans found for the type [%s] and qualifiers %s\n\t- if this parameter should not be resolved by CDI then use @SkipInject\n\t- found beans: %s",
requiredType, Arrays.toString(qualifiers), e.getMessage());
throw new IllegalStateException(msg);
}
}
}
return false;
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext context)
throws ParameterResolutionException {
@SuppressWarnings("unchecked")
List<InstanceHandle<?>> injectedParams = store(context).get(KEY_INJECTED_PARAMS, List.class);
ArcContainer container = Arc.container();
BeanManager beanManager = container.beanManager();
java.lang.reflect.Type requiredType = parameterContext.getParameter().getParameterizedType();
Annotation[] qualifiers = getQualifiers(parameterContext.getAnnotatedElement(), beanManager);
if (qualifiers.length > 0 && Arrays.stream(qualifiers).anyMatch(All.Literal.INSTANCE::equals)) {
return handleListAll(requiredType, qualifiers, container, injectedParams);
} else {
InstanceHandle<?> handle = container.instance(requiredType, qualifiers);
injectedParams.add(handle);
return handle.get();
}
}
private void destroyDependentTestMethodParams(ExtensionContext context) {
@SuppressWarnings("unchecked")
List<InstanceHandle<?>> injectedParams = store(context).get(KEY_INJECTED_PARAMS, List.class);
for (InstanceHandle<?> handle : injectedParams) {
if (handle.getBean() != null && handle.getBean().getScope().equals(Dependent.class)) {
try {
handle.destroy();
} catch (Exception e) {
LOG.errorf(e, "Unable to destroy the injected %s", handle.getBean());
}
}
}
injectedParams.clear();
}
private void buildContainer(ExtensionContext context) {
QuarkusComponentTestConfiguration testClassConfiguration = baseConfiguration
.update(context.getRequiredTestClass());
store(context).put(KEY_TEST_CLASS_CONFIG, testClassConfiguration);
ClassLoader oldTccl = initArcContainer(context, testClassConfiguration);
store(context).put(KEY_OLD_TCCL, oldTccl);
}
@SuppressWarnings("unchecked")
private void cleanup(ExtensionContext context) {
ClassLoader oldTccl = store(context).get(KEY_OLD_TCCL, ClassLoader.class);
Thread.currentThread().setContextClassLoader(oldTccl);
store(context).remove(KEY_CONFIG_MAPPINGS);
Set<Path> generatedResources = store(context).get(KEY_GENERATED_RESOURCES, Set.class);
for (Path path : generatedResources) {
try {
LOG.debugf("Delete generated %s", path);
Files.deleteIfExists(path);
} catch (IOException e) {
LOG.errorf("Unable to delete the generated resource %s: ", path, e.getMessage());
}
}
}
@SuppressWarnings("unchecked")
private void stopContainer(ExtensionContext context, Lifecycle testInstanceLifecycle) throws Exception {
if (testInstanceLifecycle.equals(context.getTestInstanceLifecycle().orElse(Lifecycle.PER_METHOD))) {
for (FieldInjector fieldInjector : (List<FieldInjector>) store(context)
.get(KEY_INJECTED_FIELDS, List.class)) {
fieldInjector.unset(context.getRequiredTestInstance());
}
try {
Arc.shutdown();
} catch (Exception e) {
LOG.error("An error occured during ArC shutdown: " + e);
}
MockBeanCreator.clear();
ConfigBeanCreator.clear();
InterceptorMethodCreator.clear();
SmallRyeConfig config = store(context).get(KEY_CONFIG, SmallRyeConfig.class);
ConfigProviderResolver.instance().releaseConfig(config);
ConfigProviderResolver
.setInstance(store(context).get(KEY_OLD_CONFIG_PROVIDER_RESOLVER,
ConfigProviderResolver.class));
}
}
private Store store(ExtensionContext context) {
return context.getRoot().getStore(NAMESPACE);
}
private BeanRegistrar registrarForMock(MockBeanConfiguratorImpl<?> mock) {
return new BeanRegistrar() {
@Override
public void register(RegistrationContext context) {
BeanConfigurator<Object> configurator = context.configure(mock.beanClass);
configurator.scope(mock.scope);
mock.jandexTypes().forEach(configurator::addType);
mock.jandexQualifiers().forEach(configurator::addQualifier);
if (mock.name != null) {
configurator.name(mock.name);
}
configurator.alternative(mock.alternative);
if (mock.priority != null) {
configurator.priority(mock.priority);
}
if (mock.defaultBean) {
configurator.defaultBean();
}
String key = UUID.randomUUID().toString();
MockBeanCreator.registerCreate(key, cast(mock.create));
configurator.creator(MockBeanCreator.class).param(MockBeanCreator.CREATE_KEY, key).done();
}
};
}
private static Annotation[] getQualifiers(AnnotatedElement element, BeanManager beanManager) {
List<Annotation> ret = new ArrayList<>();
Annotation[] annotations = element.getDeclaredAnnotations();
for (Annotation fieldAnnotation : annotations) {
if (beanManager.isQualifier(fieldAnnotation.annotationType())) {
ret.add(fieldAnnotation);
}
}
return ret.toArray(new Annotation[0]);
}
private static Set<AnnotationInstance> getQualifiers(AnnotatedElement element, Collection<DotName> qualifiers) {
Set<AnnotationInstance> ret = new HashSet<>();
Annotation[] annotations = element.getDeclaredAnnotations();
for (Annotation annotation : annotations) {
if (qualifiers.contains(DotName.createSimple(annotation.annotationType()))) {
ret.add(Annotations.jandexAnnotation(annotation));
}
}
return ret;
}
private ClassLoader initArcContainer(ExtensionContext extensionContext, QuarkusComponentTestConfiguration configuration) {
if (configuration.componentClasses.isEmpty()) {
throw new IllegalStateException("No component classes to test");
}
try {
Arc.shutdown();
} catch (Exception e) {
throw new IllegalStateException("An error occured during ArC shutdown: " + e);
}
IndexView index;
try {
Indexer indexer = new Indexer();
for (Class<?> componentClass : configuration.componentClasses) {
indexComponentClass(indexer, componentClass);
}
indexer.indexClass(ConfigProperty.class);
index = BeanArchives.buildImmutableBeanArchiveIndex(indexer.complete());
} catch (IOException e) {
throw new IllegalStateException("Failed to create index", e);
}
Class<?> testClass = extensionContext.getRequiredTestClass();
ClassLoader testClassClassLoader = testClass.getClassLoader();
boolean isContinuousTesting = testClassClassLoader instanceof QuarkusClassLoader;
ClassLoader oldTccl = Thread.currentThread().getContextClassLoader();
IndexView computingIndex = BeanArchives.buildComputingBeanArchiveIndex(oldTccl,
new ConcurrentHashMap<>(), index);
try {
List<DotName> qualifiers = new ArrayList<>();
Set<String> interceptorBindings = new HashSet<>();
AtomicReference<BeanResolver> beanResolver = new AtomicReference<>();
List<Field> injectFields = findInjectFields(testClass);
List<Parameter> injectParams = findInjectParams(testClass);
BeanProcessor.Builder builder = BeanProcessor.builder()
.setName(testClass.getName().replace('.', '_'))
.addRemovalExclusion(b -> {
if (b.getTarget().isPresent()
&& b.getTarget().get().hasDeclaredAnnotation(Unremovable.class)) {
return true;
}
for (Field injectionPoint : injectFields) {
if (beanResolver.get().matches(b, Types.jandexType(injectionPoint.getGenericType()),
getQualifiers(injectionPoint, qualifiers))) {
return true;
}
}
for (Parameter param : injectParams) {
if (beanResolver.get().matches(b, Types.jandexType(param.getParameterizedType()),
getQualifiers(param, qualifiers))) {
return true;
}
}
return false;
})
.setImmutableBeanArchiveIndex(index)
.setComputingBeanArchiveIndex(computingIndex)
.setRemoveUnusedBeans(true);
Set<Path> generatedResources;
File componentsProviderFile = getComponentsProviderFile(testClass);
if (isContinuousTesting) {
generatedResources = Set.of();
Map<String, byte[]> classes = new HashMap<>();
builder.setOutput(new ResourceOutput() {
@Override
public void writeResource(Resource resource) throws IOException {
switch (resource.getType()) {
case JAVA_CLASS:
classes.put(resource.getName() + ".class", resource.getData());
((QuarkusClassLoader) testClass.getClassLoader()).reset(classes, Map.of());
break;
case SERVICE_PROVIDER:
if (resource.getName()
.endsWith(ComponentsProvider.class.getName())) {
componentsProviderFile.getParentFile()
.mkdirs();
try (FileOutputStream out = new FileOutputStream(componentsProviderFile)) {
out.write(resource.getData());
}
}
break;
default:
throw new IllegalArgumentException("Unsupported resource type: " + resource.getType());
}
}
});
} else {
generatedResources = new HashSet<>();
File testOutputDirectory = getTestOutputDirectory(testClass);
builder.setOutput(new ResourceOutput() {
@Override
public void writeResource(Resource resource) throws IOException {
switch (resource.getType()) {
case JAVA_CLASS:
generatedResources.add(resource.writeTo(testOutputDirectory).toPath());
break;
case SERVICE_PROVIDER:
if (resource.getName()
.endsWith(ComponentsProvider.class.getName())) {
componentsProviderFile.getParentFile()
.mkdirs();
try (FileOutputStream out = new FileOutputStream(componentsProviderFile)) {
out.write(resource.getData());
}
}
break;
default:
throw new IllegalArgumentException("Unsupported resource type: " + resource.getType());
}
}
});
}
store(extensionContext).put(KEY_GENERATED_RESOURCES, generatedResources);
builder.addAnnotationTransformation(AnnotationsTransformer.appliedToField().whenContainsAny(qualifiers)
.whenContainsNone(DotName.createSimple(Inject.class)).thenTransform(t -> t.add(Inject.class)));
builder.addAnnotationTransformation(new JaxrsSingletonTransformer());
for (AnnotationsTransformer transformer : configuration.annotationsTransformers) {
builder.addAnnotationTransformation(transformer);
}
builder.addBeanRegistrar(new BeanRegistrar() {
@Override
public void register(RegistrationContext registrationContext) {
long start = System.nanoTime();
List<BeanInfo> beans = registrationContext.beans().collect();
BeanDeployment beanDeployment = registrationContext.get(Key.DEPLOYMENT);
Set<TypeAndQualifiers> unsatisfiedInjectionPoints = new HashSet<>();
boolean configInjectionPoint = false;
Set<TypeAndQualifiers> configPropertyInjectionPoints = new HashSet<>();
Map<String, Set<String>> prefixToConfigMappings = new HashMap<>();
DotName configDotName = DotName.createSimple(Config.class);
DotName configPropertyDotName = DotName.createSimple(ConfigProperty.class);
DotName configMappingDotName = DotName.createSimple(ConfigMapping.class);
for (InjectionPointInfo injectionPoint : registrationContext.getInjectionPoints()) {
if (injectionPoint.getRequiredType().name().equals(configDotName)
&& injectionPoint.hasDefaultedQualifier()) {
configInjectionPoint = true;
continue;
}
if (injectionPoint.getRequiredQualifier(configPropertyDotName) != null) {
configPropertyInjectionPoints.add(new TypeAndQualifiers(injectionPoint.getRequiredType(),
injectionPoint.getRequiredQualifiers()));
continue;
}
BuiltinBean builtin = BuiltinBean.resolve(injectionPoint);
if (builtin != null && builtin != BuiltinBean.INSTANCE && builtin != BuiltinBean.LIST) {
continue;
}
Type requiredType = injectionPoint.getRequiredType();
Set<AnnotationInstance> requiredQualifiers = injectionPoint.getRequiredQualifiers();
if (builtin == BuiltinBean.LIST) {
requiredType = requiredType.asParameterizedType().arguments().get(0);
requiredQualifiers = new HashSet<>(requiredQualifiers);
requiredQualifiers.removeIf(q -> q.name().equals(DotNames.ALL));
if (requiredQualifiers.isEmpty()) {
requiredQualifiers.add(AnnotationInstance.builder(DotNames.DEFAULT).build());
}
}
if (requiredType.kind() == Kind.CLASS) {
ClassInfo clazz = computingIndex.getClassByName(requiredType.name());
if (clazz != null && clazz.isInterface()) {
AnnotationInstance configMapping = clazz.declaredAnnotation(configMappingDotName);
if (configMapping != null) {
AnnotationValue prefixValue = configMapping.value("prefix");
String prefix = prefixValue == null ? "" : prefixValue.asString();
Set<String> mappingClasses = prefixToConfigMappings.computeIfAbsent(prefix,
k -> new HashSet<>());
mappingClasses.add(clazz.name().toString());
}
}
}
if (isSatisfied(requiredType, requiredQualifiers, injectionPoint, beans, beanDeployment,
configuration)) {
continue;
}
if (requiredType.kind() == Kind.PRIMITIVE || requiredType.kind() == Kind.ARRAY) {
throw new IllegalStateException(
"Found an unmockable unsatisfied injection point: " + injectionPoint.getTargetInfo());
}
unsatisfiedInjectionPoints.add(new TypeAndQualifiers(requiredType, requiredQualifiers));
LOG.debugf("Unsatisfied injection point found: %s", injectionPoint.getTargetInfo());
}
for (Field field : findFields(testClass, List.of(InjectMock.class))) {
Set<AnnotationInstance> requiredQualifiers = getQualifiers(field, qualifiers);
if (requiredQualifiers.isEmpty()) {
requiredQualifiers = Set.of(AnnotationInstance.builder(DotNames.DEFAULT).build());
}
unsatisfiedInjectionPoints
.add(new TypeAndQualifiers(Types.jandexType(field.getGenericType()), requiredQualifiers));
}
for (Parameter param : findInjectMockParams(testClass)) {
Set<AnnotationInstance> requiredQualifiers = getQualifiers(param, qualifiers);
if (requiredQualifiers.isEmpty()) {
requiredQualifiers = Set.of(AnnotationInstance.builder(DotNames.DEFAULT).build());
}
unsatisfiedInjectionPoints
.add(new TypeAndQualifiers(Types.jandexType(param.getParameterizedType()), requiredQualifiers));
}
for (TypeAndQualifiers unsatisfied : unsatisfiedInjectionPoints) {
ClassInfo implementationClass = computingIndex.getClassByName(unsatisfied.type.name());
BeanConfigurator<Object> configurator = registrationContext.configure(implementationClass.name())
.scope(Singleton.class)
.addType(unsatisfied.type);
unsatisfied.qualifiers.forEach(configurator::addQualifier);
configurator.param("implementationClass", implementationClass)
.creator(MockBeanCreator.class)
.defaultBean()
.identifier("dummy")
.done();
}
if (configInjectionPoint) {
registrationContext.configure(Config.class)
.addType(Config.class)
.creator(ConfigBeanCreator.class)
.done();
}
if (!configPropertyInjectionPoints.isEmpty()) {
BeanConfigurator<Object> configPropertyConfigurator = registrationContext.configure(Object.class)
.identifier("configProperty")
.addQualifier(ConfigProperty.class)
.param("useDefaultConfigProperties", configuration.useDefaultConfigProperties)
.addInjectionPoint(ClassType.create(InjectionPoint.class))
.creator(ConfigPropertyBeanCreator.class);
for (TypeAndQualifiers configPropertyInjectionPoint : configPropertyInjectionPoints) {
configPropertyConfigurator.addType(configPropertyInjectionPoint.type);
}
configPropertyConfigurator.done();
}
if (!prefixToConfigMappings.isEmpty()) {
Set<ConfigClassWithPrefix> configMappings = new HashSet<>();
for (Entry<String, Set<String>> e : prefixToConfigMappings.entrySet()) {
for (String mapping : e.getValue()) {
DotName mappingName = DotName.createSimple(mapping);
registrationContext.configure(mappingName)
.addType(mappingName)
.creator(ConfigMappingBeanCreator.class)
.param("mappingClass", mapping)
.param("prefix", e.getKey())
.done();
configMappings.add(ConfigClassWithPrefix
.configClassWithPrefix(ConfigMappingBeanCreator.tryLoad(mapping), e.getKey()));
}
}
store(extensionContext).put(KEY_CONFIG_MAPPINGS, configMappings);
}
LOG.debugf("Test injection points analyzed in %s ms [found: %s, mocked: %s]",
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start),
registrationContext.getInjectionPoints().size(),
unsatisfiedInjectionPoints.size());
processTestInterceptorMethods(testClass, extensionContext, registrationContext, interceptorBindings);
}
});
for (MockBeanConfiguratorImpl<?> mockConfigurator : configuration.mockConfigurators) {
builder.addBeanRegistrar(registrarForMock(mockConfigurator));
}
BeanProcessor beanProcessor = builder.build();
try {
Consumer<BytecodeTransformer> unsupportedBytecodeTransformer = new Consumer<BytecodeTransformer>() {
@Override
public void accept(BytecodeTransformer transformer) {
throw new UnsupportedOperationException();
}
};
ContextRegistrar.RegistrationContext registrationContext = beanProcessor.registerCustomContexts();
qualifiers.addAll(registrationContext.get(Key.QUALIFIERS).keySet());
for (DotName binding : registrationContext.get(Key.INTERCEPTOR_BINDINGS).keySet()) {
interceptorBindings.add(binding.toString());
}
beanResolver.set(registrationContext.get(Key.DEPLOYMENT).getBeanResolver());
beanProcessor.registerScopes();
beanProcessor.registerBeans();
beanProcessor.getBeanDeployment().initBeanByTypeMap();
beanProcessor.registerSyntheticObservers();
beanProcessor.initialize(unsupportedBytecodeTransformer, Collections.emptyList());
ValidationContext validationContext = beanProcessor.validate(unsupportedBytecodeTransformer);
beanProcessor.processValidationErrors(validationContext);
ExecutorService executor = Executors.newCachedThreadPool();
beanProcessor.generateResources(null, new HashSet<>(), unsupportedBytecodeTransformer, true, executor);
executor.shutdown();
} catch (IOException e) {
throw new IllegalStateException("Error generating resources", e);
}
QuarkusComponentTestClassLoader testClassLoader = new QuarkusComponentTestClassLoader(
isContinuousTesting ? testClassClassLoader : oldTccl,
componentsProviderFile,
null);
Thread.currentThread().setContextClassLoader(testClassLoader);
} catch (Throwable e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new RuntimeException(e);
}
}
return oldTccl;
}
private void processTestInterceptorMethods(Class<?> testClass, ExtensionContext context,
BeanRegistrar.RegistrationContext registrationContext, Set<String> interceptorBindings) {
List<Class<? extends Annotation>> annotations = List.of(AroundInvoke.class, PostConstruct.class, PreDestroy.class,
AroundConstruct.class);
Predicate<Method> predicate = m -> {
for (Class<? extends Annotation> annotation : annotations) {
if (m.isAnnotationPresent(annotation)) {
return true;
}
}
return false;
};
for (Method method : findMethods(testClass, predicate)) {
Set<Annotation> bindings = findBindings(method, interceptorBindings);
if (bindings.isEmpty()) {
throw new IllegalStateException("No bindings declared on a test interceptor method: " + method);
}
validateTestInterceptorMethod(method);
String key = UUID.randomUUID().toString();
InterceptorMethodCreator.registerCreate(key, ctx -> {
return ic -> {
Object instance = null;
if (!Modifier.isStatic(method.getModifiers())) {
Object testInstance = store(context).get(KEY_TEST_INSTANCE);
if (testInstance == null) {
throw new IllegalStateException("Test instance not available");
}
instance = testInstance;
if (!method.canAccess(instance)) {
method.setAccessible(true);
}
}
return method.invoke(instance, ic);
};
});
InterceptionType interceptionType;
if (method.isAnnotationPresent(AroundInvoke.class)) {
interceptionType = InterceptionType.AROUND_INVOKE;
} else if (method.isAnnotationPresent(PostConstruct.class)) {
interceptionType = InterceptionType.POST_CONSTRUCT;
} else if (method.isAnnotationPresent(PreDestroy.class)) {
interceptionType = InterceptionType.PRE_DESTROY;
} else if (method.isAnnotationPresent(AroundConstruct.class)) {
interceptionType = InterceptionType.AROUND_CONSTRUCT;
} else {
throw new IllegalStateException("No interceptor annotation declared on: " + method);
}
int priority = 1;
Priority priorityAnnotation = method.getAnnotation(Priority.class);
if (priorityAnnotation != null) {
priority = priorityAnnotation.value();
}
registrationContext.configureInterceptor(interceptionType)
.identifier(key)
.priority(priority)
.bindings(bindings.stream().map(Annotations::jandexAnnotation)
.toArray(AnnotationInstance[]::new))
.param(InterceptorMethodCreator.CREATE_KEY, key)
.creator(InterceptorMethodCreator.class);
}
}
private void validateTestInterceptorMethod(Method method) {
Parameter[] params = method.getParameters();
if (params.length != 1 || !InvocationContext.class.isAssignableFrom(params[0].getType())) {
throw new IllegalStateException("A test interceptor method must declare exactly one InvocationContext parameter:"
+ Arrays.toString(params));
}
}
private Set<Annotation> findBindings(Method method, Set<String> bindings) {
return Arrays.stream(method.getAnnotations()).filter(a -> bindings.contains(a.annotationType().getName()))
.collect(Collectors.toSet());
}
private void indexComponentClass(Indexer indexer, Class<?> componentClass) {
try {
while (componentClass != null) {
indexer.indexClass(componentClass);
for (Annotation annotation : componentClass.getAnnotations()) {
indexer.indexClass(annotation.annotationType());
}
for (Field field : componentClass.getDeclaredFields()) {
indexAnnotatedElement(indexer, field);
}
for (Method method : componentClass.getDeclaredMethods()) {
indexAnnotatedElement(indexer, method);
for (Parameter param : method.getParameters()) {
indexAnnotatedElement(indexer, param);
}
}
for (Class<?> iface : componentClass.getInterfaces()) {
indexComponentClass(indexer, iface);
}
componentClass = componentClass.getSuperclass();
}
} catch (IOException e) {
throw new IllegalStateException("Failed to index:" + componentClass, e);
}
}
private void indexAnnotatedElement(Indexer indexer, AnnotatedElement element) throws IOException {
for (Annotation annotation : element.getAnnotations()) {
indexer.indexClass(annotation.annotationType());
}
}
private boolean isSatisfied(Type requiredType, Set<AnnotationInstance> qualifiers, InjectionPointInfo injectionPoint,
Iterable<BeanInfo> beans, BeanDeployment beanDeployment, QuarkusComponentTestConfiguration configuration) {
for (BeanInfo bean : beans) {
if (Beans.matches(bean, requiredType, qualifiers)) {
LOG.debugf("Injection point %s satisfied by %s", injectionPoint.getTargetInfo(),
bean.toString());
return true;
}
}
for (MockBeanConfiguratorImpl<?> mock : configuration.mockConfigurators) {
if (mock.matches(beanDeployment.getBeanResolver(), requiredType, qualifiers)) {
LOG.debugf("Injection point %s satisfied by %s", injectionPoint.getTargetInfo(),
mock);
return true;
}
}
return false;
}
private String nameToPath(String name) {
return name.replace('.', File.separatorChar);
}
@SuppressWarnings("unchecked")
static <T> T cast(Object obj) {
return (T) obj;
}
private List<FieldInjector> injectFields(Class<?> testClass, Object testInstance) throws Exception {
List<FieldInjector> injectedFields = new ArrayList<>();
for (Field field : findInjectFields(testClass)) {
injectedFields.add(new FieldInjector(field, testInstance));
}
return injectedFields;
}
private List<Field> findInjectFields(Class<?> testClass) {
List<Class<? extends Annotation>> injectAnnotations;
Class<? extends Annotation> deprecatedInjectMock = loadDeprecatedInjectMock();
if (deprecatedInjectMock != null) {
injectAnnotations = List.of(Inject.class, InjectMock.class, deprecatedInjectMock);
} else {
injectAnnotations = List.of(Inject.class, InjectMock.class);
}
return findFields(testClass, injectAnnotations);
}
private List<Parameter> findInjectParams(Class<?> testClass) {
List<Method> testMethods = findMethods(testClass, QuarkusComponentTestExtension::isTestMethod);
List<Parameter> ret = new ArrayList<>();
for (Method method : testMethods) {
for (Parameter param : method.getParameters()) {
if (BUILTIN_PARAMETER.test(param)
|| param.isAnnotationPresent(SkipInject.class)) {
continue;
}
ret.add(param);
}
}
return ret;
}
private List<Parameter> findInjectMockParams(Class<?> testClass) {
List<Method> testMethods = findMethods(testClass, QuarkusComponentTestExtension::isTestMethod);
List<Parameter> ret = new ArrayList<>();
for (Method method : testMethods) {
for (Parameter param : method.getParameters()) {
if (param.isAnnotationPresent(InjectMock.class)
&& !BUILTIN_PARAMETER.test(param)) {
ret.add(param);
}
}
}
return ret;
}
static boolean isTestMethod(Executable method) {
return method.isAnnotationPresent(Test.class)
|| method.isAnnotationPresent(ParameterizedTest.class)
|| method.isAnnotationPresent(RepeatedTest.class);
}
private List<Field> findFields(Class<?> testClass, List<Class<? extends Annotation>> annotations) {
List<Field> fields = new ArrayList<>();
Class<?> current = testClass;
while (current.getSuperclass() != null) {
for (Field field : current.getDeclaredFields()) {
for (Class<? extends Annotation> annotation : annotations) {
if (field.isAnnotationPresent(annotation)) {
fields.add(field);
break;
}
}
}
current = current.getSuperclass();
}
return fields;
}
private List<Method> findMethods(Class<?> testClass, Predicate<Method> methodPredicate) {
List<Method> methods = new ArrayList<>();
Class<?> current = testClass;
while (current.getSuperclass() != null) {
for (Method method : current.getDeclaredMethods()) {
if (methodPredicate.test(method)) {
methods.add(method);
}
}
current = current.getSuperclass();
}
return methods;
}
static class FieldInjector {
private final Field field;
private final List<InstanceHandle<?>> unsetHandles;
public FieldInjector(Field field, Object testInstance) throws Exception {
this.field = field;
ArcContainer container = Arc.container();
BeanManager beanManager = container.beanManager();
java.lang.reflect.Type requiredType = field.getGenericType();
Annotation[] qualifiers = getQualifiers(field, beanManager);
Object injectedInstance;
if (qualifiers.length > 0 && Arrays.stream(qualifiers).anyMatch(All.Literal.INSTANCE::equals)) {
if (isListRequiredType(requiredType)) {
unsetHandles = new ArrayList<>();
injectedInstance = handleListAll(requiredType, qualifiers, container, unsetHandles);
} else {
throw new IllegalStateException("Invalid injection point type: " + field);
}
} else {
InstanceHandle<?> handle = container.instance(requiredType, qualifiers);
if (field.isAnnotationPresent(Inject.class)) {
if (!handle.isAvailable()) {
throw new IllegalStateException(String
.format("The injected field [%s] expects a real component; but no matching component was registered",
field,
handle.getBean()));
}
if (handle.getBean().getKind() == io.quarkus.arc.InjectableBean.Kind.SYNTHETIC) {
throw new IllegalStateException(String
.format("The injected field [%s] expects a real component; but obtained: %s", field,
handle.getBean()));
}
} else {
if (!handle.isAvailable()) {
throw new IllegalStateException(String
.format("The injected field [%s] expects a mocked bean; but obtained null", field));
}
if (handle.getBean().getKind() != io.quarkus.arc.InjectableBean.Kind.SYNTHETIC) {
throw new IllegalStateException(String
.format("The injected field [%s] expects a mocked bean; but obtained: %s", field,
handle.getBean()));
}
}
injectedInstance = handle.get();
unsetHandles = List.of(handle);
}
if (!field.canAccess(testInstance)) {
field.setAccessible(true);
}
field.set(testInstance, injectedInstance);
}
void unset(Object testInstance) throws Exception {
for (InstanceHandle<?> handle : unsetHandles) {
if (handle.getBean() != null && handle.getBean().getScope().equals(Dependent.class)) {
try {
handle.destroy();
} catch (Exception e) {
LOG.errorf(e, "Unable to destroy the injected %s", handle.getBean());
}
}
}
field.set(testInstance, null);
}
}
private static Object handleListAll(java.lang.reflect.Type requiredType, Annotation[] qualifiers, ArcContainer container,
Collection<InstanceHandle<?>> cleanupHandles) {
Set<Annotation> qualifiersSet = new HashSet<>();
Collections.addAll(qualifiersSet, qualifiers);
qualifiersSet.remove(All.Literal.INSTANCE);
if (qualifiersSet.isEmpty()) {
qualifiers = new Annotation[] { Default.Literal.INSTANCE };
} else {
qualifiers = qualifiersSet.toArray(new Annotation[] {});
}
List<InstanceHandle<Object>> handles = container.listAll(getListRequiredType(requiredType), qualifiers);
cleanupHandles.addAll(handles);
return isTypeArgumentInstanceHandle(requiredType) ? handles
: handles.stream().map(InstanceHandle::get).collect(Collectors.toUnmodifiableList());
}
@SuppressWarnings("unchecked")
private Class<? extends Annotation> loadDeprecatedInjectMock() {
try {
return (Class<? extends Annotation>) Class.forName("io.quarkus.test.junit.mockito.InjectMock");
} catch (Throwable e) {
return null;
}
}
private static boolean isListRequiredType(java.lang.reflect.Type type) {
if (type instanceof ParameterizedType) {
final ParameterizedType parameterizedType = (ParameterizedType) type;
return List.class.equals(parameterizedType.getRawType());
}
return false;
}
private static java.lang.reflect.Type getListRequiredType(java.lang.reflect.Type requiredType) {
if (requiredType instanceof ParameterizedType) {
final ParameterizedType parameterizedType = (ParameterizedType) requiredType;
if (List.class.equals(parameterizedType.getRawType())) {
return parameterizedType.getActualTypeArguments()[0];
}
}
return null;
}
private static boolean isTypeArgumentInstanceHandle(java.lang.reflect.Type type) {
java.lang.reflect.Type typeArgument = ((ParameterizedType) type).getActualTypeArguments()[0];
if (typeArgument instanceof ParameterizedType) {
return ((ParameterizedType) typeArgument).getRawType().equals(InstanceHandle.class);
}
return false;
}
private File getTestOutputDirectory(Class<?> testClass) {
String outputDirectory = System.getProperty(QUARKUS_TEST_COMPONENT_OUTPUT_DIRECTORY);
File testOutputDirectory;
if (outputDirectory != null) {
testOutputDirectory = new File(outputDirectory);
} else {
String testClassResourceName = fromClassNameToResourceName(testClass.getName());
String testPath = testClass.getClassLoader().getResource(testClassResourceName).toString();
String testClassesRootPath = testPath.substring(0, testPath.length() - testClassResourceName.length() - 1);
testOutputDirectory = new File(URI.create(testClassesRootPath));
}
if (!testOutputDirectory.canWrite()) {
throw new IllegalStateException("Invalid test output directory: " + testOutputDirectory);
}
return testOutputDirectory;
}
private File getComponentsProviderFile(Class<?> testClass) {
File generatedSourcesDirectory;
File targetDir = new File("target");
if (targetDir.canWrite()) {
generatedSourcesDirectory = new File(targetDir, "generated-arc-sources");
} else {
File buildDir = new File("build");
if (buildDir.canWrite()) {
generatedSourcesDirectory = new File(buildDir, "generated-arc-sources");
} else {
generatedSourcesDirectory = new File("quarkus-component-test/generated-arc-sources");
}
}
return new File(new File(generatedSourcesDirectory, nameToPath(testClass.getPackage().getName())),
ComponentsProvider.class.getSimpleName());
}
}
|
Thanks for pointing this out, after rethinking the semantics of `read()`, I think changing the [NoFetchingInput#readBytes()](https://github.com/apache/flink/blob/9d2ae5572897f3e2d9089414261a250cfc2a2ab8/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/NoFetchingInput.java#L115-L140) is a better way to go. As https://docs.oracle.com/javase/8/docs/api/java/io/FileInputStream.html#read-- explained, > Returns: the total number of bytes read into the buffer, or -1 if there is no more data because the end of the file has been reached. We should keep the semantics of `read()` consistent.
|
public int read(byte[] b, int off, int len) throws IOException {
final int bytesLeft = data.length - index;
if (bytesLeft > 0) {
final int bytesToCopy = Math.min(len, bytesLeft);
System.arraycopy(data, index, b, off, bytesToCopy);
index += bytesToCopy;
return bytesToCopy;
} else {
return len == 0 ? 0 : -1;
}
}
|
return len == 0 ? 0 : -1;
|
public int read(byte[] b, int off, int len) throws IOException {
final int bytesLeft = data.length - index;
if (bytesLeft > 0) {
final int bytesToCopy = Math.min(len, bytesLeft);
System.arraycopy(data, index, b, off, bytesToCopy);
index += bytesToCopy;
return bytesToCopy;
} else {
return len == 0 ? 0 : -1;
}
}
|
class ByteStateHandleInputStream extends FSDataInputStream {
private final byte[] data;
private int index;
public ByteStateHandleInputStream(byte[] data) {
this.data = data;
}
@Override
public void seek(long desired) throws IOException {
if (desired >= 0 && desired <= data.length) {
index = (int) desired;
} else {
throw new IOException("position out of bounds");
}
}
@Override
public long getPos() throws IOException {
return index;
}
@Override
public int read() throws IOException {
return index < data.length ? data[index++] & 0xFF : -1;
}
@Override
}
|
class ByteStateHandleInputStream extends FSDataInputStream {
private final byte[] data;
private int index;
public ByteStateHandleInputStream(byte[] data) {
this.data = data;
}
@Override
public void seek(long desired) throws IOException {
if (desired >= 0 && desired <= data.length) {
index = (int) desired;
} else {
throw new IOException("position out of bounds");
}
}
@Override
public long getPos() throws IOException {
return index;
}
@Override
public int read() throws IOException {
return index < data.length ? data[index++] & 0xFF : -1;
}
@Override
}
|
These changes are not covered. Consider adding a test in `PrintConnectorITCase` so that the result shows this `printIdentifier`.
|
public SinkRuntimeProvider getSinkRuntimeProvider(DynamicTableSink.Context context) {
DataStructureConverter converter = context.createDataStructureConverter(type);
staticPartitions.forEach(
(key, value) -> {
printIdentifier = null != printIdentifier ? printIdentifier + ":" : "";
printIdentifier += key + "=" + value;
});
return SinkFunctionProvider.of(
new RowDataPrintFunction(converter, printIdentifier, stdErr), parallelism);
}
|
});
|
public SinkRuntimeProvider getSinkRuntimeProvider(DynamicTableSink.Context context) {
DataStructureConverter converter = context.createDataStructureConverter(type);
staticPartitions.forEach(
(key, value) -> {
printIdentifier = null != printIdentifier ? printIdentifier + ":" : "";
printIdentifier += key + "=" + value;
});
return SinkFunctionProvider.of(
new RowDataPrintFunction(converter, printIdentifier, stdErr), parallelism);
}
|
class PrintSink implements DynamicTableSink, SupportsPartitioning {
private final DataType type;
private String printIdentifier;
private final boolean stdErr;
private final @Nullable Integer parallelism;
private final List<String> partitionKeys;
private Map<String, String> staticPartitions = new LinkedHashMap<>();
private PrintSink(
DataType type,
List<String> partitionKeys,
String printIdentifier,
boolean stdErr,
Integer parallelism) {
this.type = type;
this.partitionKeys = partitionKeys;
this.printIdentifier = printIdentifier;
this.stdErr = stdErr;
this.parallelism = parallelism;
}
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return requestedMode;
}
@Override
@Override
public DynamicTableSink copy() {
return new PrintSink(type, partitionKeys, printIdentifier, stdErr, parallelism);
}
@Override
public String asSummaryString() {
return "Print to " + (stdErr ? "System.err" : "System.out");
}
@Override
public void applyStaticPartition(Map<String, String> partition) {
staticPartitions = new LinkedHashMap<>();
for (String partitionCol : partitionKeys) {
if (partition.containsKey(partitionCol)) {
staticPartitions.put(partitionCol, partition.get(partitionCol));
}
}
}
}
|
class PrintSink implements DynamicTableSink, SupportsPartitioning {
private final DataType type;
private String printIdentifier;
private final boolean stdErr;
private final @Nullable Integer parallelism;
private final List<String> partitionKeys;
private Map<String, String> staticPartitions = new LinkedHashMap<>();
private PrintSink(
DataType type,
List<String> partitionKeys,
String printIdentifier,
boolean stdErr,
Integer parallelism) {
this.type = type;
this.partitionKeys = partitionKeys;
this.printIdentifier = printIdentifier;
this.stdErr = stdErr;
this.parallelism = parallelism;
}
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return requestedMode;
}
@Override
@Override
public DynamicTableSink copy() {
return new PrintSink(type, partitionKeys, printIdentifier, stdErr, parallelism);
}
@Override
public String asSummaryString() {
return "Print to " + (stdErr ? "System.err" : "System.out");
}
@Override
public void applyStaticPartition(Map<String, String> partition) {
staticPartitions = new LinkedHashMap<>();
for (String partitionCol : partitionKeys) {
if (partition.containsKey(partitionCol)) {
staticPartitions.put(partitionCol, partition.get(partitionCol));
}
}
}
}
|
Can we consider adding integration test to cover this line? Or we can add a test in the `remote.management` module.
|
public Object getDetail(String detailKey) {
return details.getOrDefault(detailKey, null);
}
|
return details.getOrDefault(detailKey, null);
|
public Object getDetail(String detailKey) {
return details.getOrDefault(detailKey, null);
}
|
class ArtifactImpl extends Artifact {
private final Map<String, Object> details;
public ArtifactImpl(String name, ArtifactType type) {
super(name, type);
this.details = new HashMap<>();
}
private void addDetail(String detailsKey, Object value) {
this.details.put(detailsKey, value);
}
@Override
}
|
class ArtifactImpl extends Artifact {
private final Map<String, Object> details;
public ArtifactImpl(String name, ArtifactType type) {
super(name, type);
this.details = new HashMap<>();
}
private void addDetail(String detailsKey, Object value) {
this.details.put(detailsKey, value);
}
@Override
@Override
public Map<String, Object> getAllDetails() {
return Collections.unmodifiableMap(details);
}
}
|
@mosche, what about switching from `AtomicInteger` to `Integer` as @adude3141 suggested ? Indeed, this counter is always accessed in a synchronised block, there is no need to add extract sync mechanism. And by the way it will also allow to remove the `SuppressWarning`
|
private void setupSharedProducer() {
synchronized (producerRefCount) {
if (producer == null) {
producer =
spec.getAWSClientsProvider().createKinesisProducer(spec.producerConfiguration());
producerRefCount.set(0);
}
}
producerRefCount.incrementAndGet();
}
|
}
|
private void setupSharedProducer() {
synchronized (KinesisWriterFn.class) {
if (producer == null) {
producer =
spec.getAWSClientsProvider()
.createKinesisProducer(spec.createProducerConfiguration());
producerRefCount = 0;
}
producerRefCount++;
}
}
|
class KinesisWriterFn extends DoFn<byte[], Void> {
private static final int MAX_NUM_FAILURES = 10;
/** Usage count of static, shared Kinesis producer. */
private static final AtomicInteger producerRefCount = new AtomicInteger();
/** Static, shared Kinesis producer. */
private static IKinesisProducer producer;
private final KinesisIO.Write spec;
private transient KinesisPartitioner partitioner;
private transient LinkedBlockingDeque<KinesisWriteException> failures;
private transient List<Future<UserRecordResult>> putFutures;
KinesisWriterFn(KinesisIO.Write spec) {
this.spec = spec;
}
/**
* Initialize statically shared Kinesis producer if required and count usage.
*
* <p>NOTE: If there is, for whatever reasons, another instance of a {@link KinesisWriterFn}
* with different producer properties or even a different implementation of {@link
* AWSClientsProvider}, these changes will be silently discarded in favor of an existing
* producer instance.
*/
@SuppressFBWarnings("JLM_JSR166_UTILCONCURRENT_MONITORENTER")
/**
* Discard statically shared producer if it is not used anymore according to the usage count.
*/
@SuppressFBWarnings("JLM_JSR166_UTILCONCURRENT_MONITORENTER")
private void teardownSharedProducer() {
synchronized (producerRefCount) {
if (producerRefCount.decrementAndGet() == 0) {
if (producer == null) {
return;
}
if (producer.getOutstandingRecordsCount() > 0) {
producer.flushSync();
}
producer.destroy();
producer = null;
}
}
}
@Setup
public void setup() {
setupSharedProducer();
if (spec.getPartitioner() != null) {
partitioner = spec.getPartitioner();
}
}
@StartBundle
public void startBundle() {
putFutures = Collections.synchronizedList(new ArrayList<>());
/** Keep only the first {@link MAX_NUM_FAILURES} occurred exceptions */
failures = new LinkedBlockingDeque<>(MAX_NUM_FAILURES);
}
/**
* It adds a record asynchronously which then should be delivered by Kinesis producer in
* background (Kinesis producer forks native processes to do this job).
*
* <p>The records can be batched and then they will be sent in one HTTP request. Amazon KPL
* supports two types of batching - aggregation and collection - and they can be configured by
* producer properties.
*
* <p>More details can be found here: <a
* href="https:
* Concepts</a> and <a
* href="https:
* the KPL</a>
*/
@ProcessElement
public void processElement(ProcessContext c) {
ByteBuffer data = ByteBuffer.wrap(c.element());
String partitionKey = spec.getPartitionKey();
String explicitHashKey = null;
if (partitioner != null) {
partitionKey = partitioner.getPartitionKey(c.element());
explicitHashKey = partitioner.getExplicitHashKey(c.element());
}
ListenableFuture<UserRecordResult> f =
producer.addUserRecord(spec.getStreamName(), partitionKey, explicitHashKey, data);
putFutures.add(f);
}
@FinishBundle
public void finishBundle() throws Exception {
flushBundle();
}
/**
* Flush outstanding records until the total number of failed records will be less than 0 or
* the number of retries will be exhausted. The retry timeout starts from 1 second and it
* doubles on every iteration.
*/
private void flushBundle() throws InterruptedException, ExecutionException, IOException {
int retries = spec.getRetries();
int numFailedRecords;
int retryTimeout = 1000;
String message = "";
do {
numFailedRecords = 0;
producer.flush();
for (Future<UserRecordResult> f : putFutures) {
UserRecordResult result = f.get();
if (!result.isSuccessful()) {
numFailedRecords++;
}
}
Thread.sleep(retryTimeout);
retryTimeout *= 2;
} while (numFailedRecords > 0 && retries-- > 0);
if (numFailedRecords > 0) {
for (Future<UserRecordResult> f : putFutures) {
UserRecordResult result = f.get();
if (!result.isSuccessful()) {
failures.offer(
new KinesisWriteException(
"Put record was not successful.", new UserRecordFailedException(result)));
}
}
message =
String.format(
"After [%d] retries, number of failed records [%d] is still greater than 0",
spec.getRetries(), numFailedRecords);
LOG.error(message);
}
checkForFailures(message);
}
/** If any write has asynchronously failed, fail the bundle with a useful error. */
private void checkForFailures(String message) throws IOException {
if (failures.isEmpty()) {
return;
}
StringBuilder logEntry = new StringBuilder();
logEntry.append(message).append(System.lineSeparator());
int i = 0;
while (!failures.isEmpty()) {
i++;
KinesisWriteException exc = failures.remove();
logEntry.append(System.lineSeparator()).append(exc.getMessage());
Throwable cause = exc.getCause();
if (cause != null) {
logEntry.append(": ").append(cause.getMessage());
if (cause instanceof UserRecordFailedException) {
List<Attempt> attempts =
((UserRecordFailedException) cause).getResult().getAttempts();
for (Attempt attempt : attempts) {
if (attempt.getErrorMessage() != null) {
logEntry.append(System.lineSeparator()).append(attempt.getErrorMessage());
}
}
}
}
}
String errorMessage =
String.format(
"Some errors occurred writing to Kinesis. First %d errors: %s",
i, logEntry.toString());
throw new IOException(errorMessage);
}
@Teardown
public void teardown() throws Exception {
teardownSharedProducer();
}
}
|
class KinesisWriterFn extends DoFn<byte[], Void> {
private static final int MAX_NUM_FAILURES = 10;
/** Usage count of static, shared Kinesis producer. */
private static int producerRefCount = 0;
/** Static, shared Kinesis producer. */
private static IKinesisProducer producer;
private final KinesisIO.Write spec;
private transient KinesisPartitioner partitioner;
private transient LinkedBlockingDeque<KinesisWriteException> failures;
private transient List<Future<UserRecordResult>> putFutures;
KinesisWriterFn(KinesisIO.Write spec) {
this.spec = spec;
}
/**
* Initialize statically shared Kinesis producer if required and count usage.
*
* <p>NOTE: If there is, for whatever reasons, another instance of a {@link KinesisWriterFn}
* with different producer properties or even a different implementation of {@link
* AWSClientsProvider}, these changes will be silently discarded in favor of an existing
* producer instance.
*/
/**
* Discard statically shared producer if it is not used anymore according to the usage count.
*/
private void teardownSharedProducer() {
IKinesisProducer obsolete = null;
synchronized (KinesisWriterFn.class) {
if (--producerRefCount == 0) {
obsolete = producer;
producer = null;
}
}
if (obsolete != null) {
obsolete.flushSync();
obsolete.destroy();
}
}
@Setup
public void setup() {
setupSharedProducer();
if (spec.getPartitioner() != null) {
partitioner = spec.getPartitioner();
}
}
@StartBundle
public void startBundle() {
putFutures = Collections.synchronizedList(new ArrayList<>());
/** Keep only the first {@link MAX_NUM_FAILURES} occurred exceptions */
failures = new LinkedBlockingDeque<>(MAX_NUM_FAILURES);
}
/**
* It adds a record asynchronously which then should be delivered by Kinesis producer in
* background (Kinesis producer forks native processes to do this job).
*
* <p>The records can be batched and then they will be sent in one HTTP request. Amazon KPL
* supports two types of batching - aggregation and collection - and they can be configured by
* producer properties.
*
* <p>More details can be found here: <a
* href="https:
* Concepts</a> and <a
* href="https:
* the KPL</a>
*/
@ProcessElement
public void processElement(ProcessContext c) {
ByteBuffer data = ByteBuffer.wrap(c.element());
String partitionKey = spec.getPartitionKey();
String explicitHashKey = null;
if (partitioner != null) {
partitionKey = partitioner.getPartitionKey(c.element());
explicitHashKey = partitioner.getExplicitHashKey(c.element());
}
ListenableFuture<UserRecordResult> f =
producer.addUserRecord(spec.getStreamName(), partitionKey, explicitHashKey, data);
putFutures.add(f);
}
@FinishBundle
public void finishBundle() throws Exception {
flushBundle();
}
/**
* Flush outstanding records until the total number of failed records will be less than 0 or
* the number of retries will be exhausted. The retry timeout starts from 1 second and it
* doubles on every iteration.
*/
private void flushBundle() throws InterruptedException, ExecutionException, IOException {
int retries = spec.getRetries();
int numFailedRecords;
int retryTimeout = 1000;
String message = "";
do {
numFailedRecords = 0;
producer.flush();
for (Future<UserRecordResult> f : putFutures) {
UserRecordResult result = f.get();
if (!result.isSuccessful()) {
numFailedRecords++;
}
}
Thread.sleep(retryTimeout);
retryTimeout *= 2;
} while (numFailedRecords > 0 && retries-- > 0);
if (numFailedRecords > 0) {
for (Future<UserRecordResult> f : putFutures) {
UserRecordResult result = f.get();
if (!result.isSuccessful()) {
failures.offer(
new KinesisWriteException(
"Put record was not successful.", new UserRecordFailedException(result)));
}
}
message =
String.format(
"After [%d] retries, number of failed records [%d] is still greater than 0",
spec.getRetries(), numFailedRecords);
LOG.error(message);
}
checkForFailures(message);
}
/** If any write has asynchronously failed, fail the bundle with a useful error. */
private void checkForFailures(String message) throws IOException {
if (failures.isEmpty()) {
return;
}
StringBuilder logEntry = new StringBuilder();
logEntry.append(message).append(System.lineSeparator());
int i = 0;
while (!failures.isEmpty()) {
i++;
KinesisWriteException exc = failures.remove();
logEntry.append(System.lineSeparator()).append(exc.getMessage());
Throwable cause = exc.getCause();
if (cause != null) {
logEntry.append(": ").append(cause.getMessage());
if (cause instanceof UserRecordFailedException) {
List<Attempt> attempts =
((UserRecordFailedException) cause).getResult().getAttempts();
for (Attempt attempt : attempts) {
if (attempt.getErrorMessage() != null) {
logEntry.append(System.lineSeparator()).append(attempt.getErrorMessage());
}
}
}
}
}
String errorMessage =
String.format(
"Some errors occurred writing to Kinesis. First %d errors: %s",
i, logEntry.toString());
throw new IOException(errorMessage);
}
@Teardown
public void teardown() throws Exception {
teardownSharedProducer();
}
}
|
This code appears to be substituting calls to `getQualifiedUser()` with calls to `getUser()`. The review of such a change depends upon the exact implementation of these two methods, which is not included in the snippet provided. Broadly, this might imply a significant shift from using "qualified" (potentially containing additional attributes or information) to just basic username. However, given what we have here, here are some general points to consider: 1. **Consistency**: This change is consistently applied throughout the code. It's good to see that all instances of the old method are replaced with the new ones, indicating thoroughness. 2. **Exception Messages**: In your exception messages, you directly print out the username. Consider if this may lead to privacy and security issues - depending on who can access these logs, you might be unintentionally leaking user information. 3. **Testing**: After this significant change, it would be important to immediately verify that the application still behaves as expected for various scenarios: existing users, new users who need to be created, handling errors, duplicates, etc. Remember, before making this kind of modification, it's crucial to understand fully how `getQualifiedUser()` and `getUser()` differ in their implementation and use. Validate that replacing one with the other won't unintentionally remove any required functionality or introduce unexpected behavior. Finally, while there doesn't appear to be any direct syntax errors introduced by the changes, without context and broader understanding of the entire codebase, it's impossible to guarantee that these modifications won't introduce logic or functionality errors. Always make sure to conduct comprehensive testing after changes like this.
|
private boolean hasUserNameNoLock(String userName) {
for (UserIdentity userIdentity : userToAuthenticationInfo.keySet()) {
if (userIdentity.getUser().equals(userName)) {
return true;
}
}
return false;
}
|
}
|
private boolean hasUserNameNoLock(String userName) {
for (UserIdentity userIdentity : userToAuthenticationInfo.keySet()) {
if (userIdentity.getUser().equals(userName)) {
return true;
}
}
return false;
}
|
class UserAuthInfoTreeMap extends TreeMap<UserIdentity, UserAuthenticationInfo> {
public UserAuthInfoTreeMap() {
super((o1, o2) -> {
int compareHostScore = scoreUserIdentityHost(o1).compareTo(scoreUserIdentityHost(o2));
if (compareHostScore != 0) {
return compareHostScore;
}
int compareByHost = o1.getHost().compareTo(o2.getHost());
if (compareByHost != 0) {
return compareByHost;
}
return o1.getUser().compareTo(o2.getUser());
});
}
/**
* If someone log in from 10.1.1.1 with name "test_user", the matching UserIdentity
* can be sorted in the below order,
* 1. test_user@10.1.1.1
* 2. test_user@["hostname"], in which "hostname" can be resolved to 10.1.1.1.
* If multiple hostnames match the login ip, just return one randomly.
* 3. test_user@%, as a fallback.
*/
private static Integer scoreUserIdentityHost(UserIdentity userIdentity) {
if (userIdentity.isDomain()) {
return 2;
}
if (userIdentity.getHost().equals(UserAuthenticationInfo.ANY_HOST)) {
return 3;
}
return 1;
}
}
|
class UserAuthInfoTreeMap extends TreeMap<UserIdentity, UserAuthenticationInfo> {
public UserAuthInfoTreeMap() {
super((o1, o2) -> {
int compareHostScore = scoreUserIdentityHost(o1).compareTo(scoreUserIdentityHost(o2));
if (compareHostScore != 0) {
return compareHostScore;
}
int compareByHost = o1.getHost().compareTo(o2.getHost());
if (compareByHost != 0) {
return compareByHost;
}
return o1.getUser().compareTo(o2.getUser());
});
}
/**
* If someone log in from 10.1.1.1 with name "test_user", the matching UserIdentity
* can be sorted in the below order,
* 1. test_user@10.1.1.1
* 2. test_user@["hostname"], in which "hostname" can be resolved to 10.1.1.1.
* If multiple hostnames match the login ip, just return one randomly.
* 3. test_user@%, as a fallback.
*/
private static Integer scoreUserIdentityHost(UserIdentity userIdentity) {
if (userIdentity.isDomain()) {
return 2;
}
if (userIdentity.getHost().equals(UserAuthenticationInfo.ANY_HOST)) {
return 3;
}
return 1;
}
}
|
also worth adding the low bound comparison , like` duration.getSeconds > timeout based on the httpTimeoutPolicy`. By just comparing the high bound, it will not catch the issue
|
private void validateDataPlaneRetryPolicyResponseTimeouts(CosmosDiagnostics cosmosDiagnostics) {
List<ClientSideRequestStatistics.GatewayStatistics> gatewayStatisticsList = diagnosticsAccessor.getClientSideRequestStatistics(cosmosDiagnostics)
.stream()
.map(ClientSideRequestStatistics::getGatewayStatisticsList)
.flatMap(Collection::stream)
.collect(Collectors.toList());
for (ClientSideRequestStatistics.GatewayStatistics gs : gatewayStatisticsList) {
for (RequestTimeline.Event event : gs.getRequestTimeline()) {
Duration durationInMillis = event.getDuration();
if (durationInMillis != null) {
assertThat(durationInMillis.getSeconds()).isLessThanOrEqualTo(62);
}
}
}
}
|
assertThat(durationInMillis.getSeconds()).isLessThanOrEqualTo(62);
|
private void validateDataPlaneRetryPolicyResponseTimeouts(CosmosDiagnostics cosmosDiagnostics) {
List<ClientSideRequestStatistics.GatewayStatistics> gatewayStatisticsList = diagnosticsAccessor.getClientSideRequestStatistics(cosmosDiagnostics)
.stream()
.map(ClientSideRequestStatistics::getGatewayStatisticsList)
.flatMap(Collection::stream)
.collect(Collectors.toList());
for (ClientSideRequestStatistics.GatewayStatistics gs : gatewayStatisticsList) {
if (gs.getStatusCode() == HttpConstants.StatusCodes.REQUEST_TIMEOUT) {
for (RequestTimeline.Event event : gs.getRequestTimeline()) {
Duration durationInMillis = event.getDuration();
if (durationInMillis != null) {
assertThat(durationInMillis.getSeconds()).isLessThanOrEqualTo(62);
assertThat(durationInMillis.getSeconds()).isGreaterThanOrEqualTo(60);
}
}
}
}
}
|
class WebExceptionRetryPolicyE2ETests extends TestSuiteBase {
private final static
ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor diagnosticsAccessor =
ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor();
private CosmosAsyncClient cosmosAsyncClient;
private CosmosAsyncContainer cosmosAsyncContainer;
@Factory(dataProvider = "clientBuildersWithSessionConsistency")
public WebExceptionRetryPolicyE2ETests(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
this.subscriberValidationTimeout = TIMEOUT;
}
@BeforeClass(groups = {"multi-master"}, timeOut = TIMEOUT)
public void beforeClass() {
this.cosmosAsyncClient = getClientBuilder().buildAsyncClient();
this.cosmosAsyncContainer = getSharedMultiPartitionCosmosContainerWithIdAsPartitionKey(cosmosAsyncClient);
}
@AfterClass(groups = {"multi-master"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(cosmosAsyncClient);
}
@DataProvider(name = "operationTypeProvider")
public static Object[][] operationTypeProvider() {
return new Object[][]{
{FaultInjectionOperationType.READ_ITEM, OperationType.Read}
};
}
@Test(groups = {"multi-master"}, timeOut = TIMEOUT)
public void addressRefreshHttpTimeout() {
if (BridgeInternal
.getContextClient(this.cosmosAsyncClient)
.getConnectionPolicy()
.getConnectionMode() != ConnectionMode.DIRECT) {
throw new SkipException("addressRefreshHttpTimeout() is only meant for DIRECT mode");
}
TestItem newItem = TestItem.createNewItem();
this.cosmosAsyncContainer.createItem(newItem).block();
FaultInjectionRule addressRefreshDelayRule = new FaultInjectionRuleBuilder("addressRefreshDelayRule")
.condition(
new FaultInjectionConditionBuilder()
.operationType(FaultInjectionOperationType.METADATA_REQUEST_ADDRESS_REFRESH)
.build())
.result(
FaultInjectionResultBuilders.getResultBuilder(FaultInjectionServerErrorType.RESPONSE_DELAY)
.delay(Duration.ofSeconds(14))
.times(4)
.build()
)
.build();
FaultInjectionRule serverGoneRule = new FaultInjectionRuleBuilder("serverGoneRule")
.condition(
new FaultInjectionConditionBuilder()
.operationType(FaultInjectionOperationType.READ_ITEM)
.build())
.result(
FaultInjectionResultBuilders.getResultBuilder(FaultInjectionServerErrorType.GONE)
.times(4)
.build()
)
.build();
CosmosFaultInjectionHelper
.configureFaultInjectionRules(
cosmosAsyncContainer,
Arrays.asList(addressRefreshDelayRule, serverGoneRule)).block();
try {
cosmosAsyncContainer
.readItem(newItem.getId(), new PartitionKey(newItem.getId()), TestItem.class)
.block();
fail("addressRefreshHttpTimeout() should fail due to addressRefresh timeout");
} catch (CosmosException e) {
System.out.println("dataPlaneRequestHttpTimeout() Diagnostics " + " " + e.getDiagnostics());
assertThat(e.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.REQUEST_TIMEOUT);
assertThat(e.getSubStatusCode()).isEqualTo(HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
validateAddressRefreshRetryPolicyResponseTimeouts(e.getDiagnostics());
} finally {
addressRefreshDelayRule.disable();
serverGoneRule.disable();
}
}
@Test(groups = {"multi-master"}, dataProvider = "operationTypeProvider", timeOut = 8 * TIMEOUT)
public void dataPlaneRequestHttpTimeout(
FaultInjectionOperationType faultInjectionOperationType,
OperationType operationType) {
if (BridgeInternal
.getContextClient(this.cosmosAsyncClient)
.getConnectionPolicy()
.getConnectionMode() != ConnectionMode.GATEWAY) {
throw new SkipException("queryPlanHttpTimeoutWillNotMarkRegionUnavailable() is only meant for GATEWAY mode");
}
TestItem newItem = TestItem.createNewItem();
this.cosmosAsyncContainer.createItem(newItem).block();
FaultInjectionRule requestHttpTimeoutRule = new FaultInjectionRuleBuilder("requestHttpTimeoutRule" + UUID.randomUUID())
.condition(
new FaultInjectionConditionBuilder()
.operationType(faultInjectionOperationType)
.connectionType(FaultInjectionConnectionType.GATEWAY)
.build())
.result(
FaultInjectionResultBuilders.getResultBuilder(FaultInjectionServerErrorType.RESPONSE_DELAY)
.delay(Duration.ofSeconds(66))
.times(4)
.build()
)
.build();
CosmosFaultInjectionHelper.configureFaultInjectionRules(this.cosmosAsyncContainer, Arrays.asList(requestHttpTimeoutRule)).block();
try {
CosmosDiagnostics cosmosDiagnostics =
this.performDocumentOperation(cosmosAsyncContainer, operationType, newItem).block();
System.out.println("dataPlaneRequestHttpTimeout() Diagnostics " + " " + cosmosDiagnostics);
validateDataPlaneRetryPolicyResponseTimeouts(cosmosDiagnostics);
} catch (Exception e) {
fail("dataPlaneRequestHttpTimeout() should succeed for operationType " + operationType, e);
} finally {
requestHttpTimeoutRule.disable();
}
}
private void validateAddressRefreshRetryPolicyResponseTimeouts(CosmosDiagnostics cosmosDiagnostics) {
List<ClientSideRequestStatistics.AddressResolutionStatistics> addressResolutionStatisticsList = diagnosticsAccessor.getClientSideRequestStatistics(cosmosDiagnostics)
.stream()
.map(ClientSideRequestStatistics::getAddressResolutionStatistics)
.flatMap(m -> m.values().stream())
.sorted(Comparator.comparing(ClientSideRequestStatistics.AddressResolutionStatistics::getStartTimeUTC))
.collect(Collectors.toList());
assertThat(MILLIS.between(addressResolutionStatisticsList.get(0).getStartTimeUTC(), addressResolutionStatisticsList.get(0).getEndTimeUTC())).isLessThanOrEqualTo(600);
assertThat(MILLIS.between(addressResolutionStatisticsList.get(1).getStartTimeUTC(), addressResolutionStatisticsList.get(1).getEndTimeUTC())).isLessThanOrEqualTo(600);
assertThat(SECONDS.between(addressResolutionStatisticsList.get(2).getStartTimeUTC(), addressResolutionStatisticsList.get(2).getEndTimeUTC())).isLessThanOrEqualTo(6);
assertThat(SECONDS.between(addressResolutionStatisticsList.get(3).getStartTimeUTC(), addressResolutionStatisticsList.get(3).getEndTimeUTC())).isLessThanOrEqualTo(11);
}
private Mono<CosmosDiagnostics> performDocumentOperation(
CosmosAsyncContainer cosmosAsyncContainer,
OperationType operationType,
TestItem createdItem) {
if (operationType == OperationType.Query) {
CosmosQueryRequestOptions queryRequestOptions = new CosmosQueryRequestOptions();
String query = String.format("SELECT * from c where c.id = '%s'", createdItem.getId());
FeedResponse<TestItem> itemFeedResponse =
cosmosAsyncContainer.queryItems(query, queryRequestOptions, TestItem.class).byPage().blockFirst();
return Mono.just(itemFeedResponse.getCosmosDiagnostics());
}
if (operationType == OperationType.Read
|| operationType == OperationType.Delete
|| operationType == OperationType.Replace
|| operationType == OperationType.Create
|| operationType == OperationType.Patch
|| operationType == OperationType.Upsert) {
if (operationType == OperationType.Read) {
return cosmosAsyncContainer
.readItem(
createdItem.getId(),
new PartitionKey(createdItem.getId()),
TestItem.class
)
.map(itemResponse -> itemResponse.getDiagnostics());
}
if (operationType == OperationType.Replace) {
return cosmosAsyncContainer
.replaceItem(
createdItem,
createdItem.getId(),
new PartitionKey(createdItem.getId()))
.map(itemResponse -> itemResponse.getDiagnostics());
}
if (operationType == OperationType.Delete) {
return cosmosAsyncContainer.deleteItem(createdItem, null).map(itemResponse -> itemResponse.getDiagnostics());
}
if (operationType == OperationType.Create) {
return cosmosAsyncContainer.createItem(TestItem.createNewItem()).map(itemResponse -> itemResponse.getDiagnostics());
}
if (operationType == OperationType.Upsert) {
return cosmosAsyncContainer.upsertItem(TestItem.createNewItem()).map(itemResponse -> itemResponse.getDiagnostics());
}
if (operationType == OperationType.Patch) {
CosmosPatchOperations patchOperations =
CosmosPatchOperations
.create()
.add("newPath", "newPath");
return cosmosAsyncContainer
.patchItem(createdItem.getId(), new PartitionKey(createdItem.getId()), patchOperations, TestItem.class)
.map(itemResponse -> itemResponse.getDiagnostics());
}
}
if (operationType == OperationType.ReadFeed) {
List<FeedRange> feedRanges = cosmosAsyncContainer.getFeedRanges().block();
CosmosChangeFeedRequestOptions changeFeedRequestOptions =
CosmosChangeFeedRequestOptions.createForProcessingFromBeginning(feedRanges.get(0));
FeedResponse<TestItem> firstPage = cosmosAsyncContainer
.queryChangeFeed(changeFeedRequestOptions, TestItem.class)
.byPage()
.blockFirst();
return Mono.just(firstPage.getCosmosDiagnostics());
}
throw new IllegalArgumentException("The operation type is not supported");
}
}
|
class WebExceptionRetryPolicyE2ETests extends TestSuiteBase {
private final static Logger logger = LoggerFactory.getLogger(WebExceptionRetryPolicyE2ETests.class);
private final static
ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor diagnosticsAccessor =
ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor();
private CosmosAsyncClient cosmosAsyncClient;
private CosmosAsyncContainer cosmosAsyncContainer;
@Factory(dataProvider = "clientBuildersWithSessionConsistency")
public WebExceptionRetryPolicyE2ETests(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
this.subscriberValidationTimeout = TIMEOUT;
}
@BeforeClass(groups = {"multi-master"}, timeOut = TIMEOUT)
public void beforeClass() {
this.cosmosAsyncClient = getClientBuilder().buildAsyncClient();
this.cosmosAsyncContainer = getSharedMultiPartitionCosmosContainerWithIdAsPartitionKey(cosmosAsyncClient);
}
@AfterClass(groups = {"multi-master"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(cosmosAsyncClient);
}
@DataProvider(name = "operationTypeProvider")
public static Object[][] operationTypeProvider() {
return new Object[][]{
{FaultInjectionOperationType.READ_ITEM, OperationType.Read},
{FaultInjectionOperationType.QUERY_ITEM, OperationType.Query}
};
}
@Test(groups = {"multi-master"}, timeOut = TIMEOUT)
public void addressRefreshHttpTimeout() {
if (BridgeInternal
.getContextClient(this.cosmosAsyncClient)
.getConnectionPolicy()
.getConnectionMode() != ConnectionMode.DIRECT) {
throw new SkipException("addressRefreshHttpTimeout() is only meant for DIRECT mode");
}
TestItem newItem = TestItem.createNewItem();
this.cosmosAsyncContainer.createItem(newItem).block();
FaultInjectionRule addressRefreshDelayRule = new FaultInjectionRuleBuilder("addressRefreshDelayRule")
.condition(
new FaultInjectionConditionBuilder()
.operationType(FaultInjectionOperationType.METADATA_REQUEST_ADDRESS_REFRESH)
.build())
.result(
FaultInjectionResultBuilders.getResultBuilder(FaultInjectionServerErrorType.RESPONSE_DELAY)
.delay(Duration.ofSeconds(14))
.times(4)
.build()
)
.build();
FaultInjectionRule serverGoneRule = new FaultInjectionRuleBuilder("serverGoneRule")
.condition(
new FaultInjectionConditionBuilder()
.operationType(FaultInjectionOperationType.READ_ITEM)
.build())
.result(
FaultInjectionResultBuilders.getResultBuilder(FaultInjectionServerErrorType.GONE)
.times(4)
.build()
)
.build();
CosmosFaultInjectionHelper
.configureFaultInjectionRules(
cosmosAsyncContainer,
Arrays.asList(addressRefreshDelayRule, serverGoneRule)).block();
try {
cosmosAsyncContainer
.readItem(newItem.getId(), new PartitionKey(newItem.getId()), TestItem.class)
.block();
fail("addressRefreshHttpTimeout() should fail due to addressRefresh timeout");
} catch (CosmosException e) {
logger.info("dataPlaneRequestHttpTimeout() Diagnostics " + " " + e.getDiagnostics());
assertThat(e.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.REQUEST_TIMEOUT);
assertThat(e.getSubStatusCode()).isEqualTo(HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
validateAddressRefreshRetryPolicyResponseTimeouts(e.getDiagnostics());
} finally {
addressRefreshDelayRule.disable();
serverGoneRule.disable();
}
}
@Test(groups = {"multi-master"}, dataProvider = "operationTypeProvider", timeOut = 8 * TIMEOUT)
public void dataPlaneRequestHttpTimeout(
FaultInjectionOperationType faultInjectionOperationType,
OperationType operationType) {
if (BridgeInternal
.getContextClient(this.cosmosAsyncClient)
.getConnectionPolicy()
.getConnectionMode() != ConnectionMode.GATEWAY) {
throw new SkipException("queryPlanHttpTimeoutWillNotMarkRegionUnavailable() is only meant for GATEWAY mode");
}
TestItem newItem = TestItem.createNewItem();
this.cosmosAsyncContainer.createItem(newItem).block();
FaultInjectionRule requestHttpTimeoutRule = new FaultInjectionRuleBuilder("requestHttpTimeoutRule" + UUID.randomUUID())
.condition(
new FaultInjectionConditionBuilder()
.operationType(faultInjectionOperationType)
.connectionType(FaultInjectionConnectionType.GATEWAY)
.build())
.result(
FaultInjectionResultBuilders.getResultBuilder(FaultInjectionServerErrorType.RESPONSE_DELAY)
.delay(Duration.ofSeconds(66))
.times(3)
.build()
)
.build();
CosmosFaultInjectionHelper.configureFaultInjectionRules(this.cosmosAsyncContainer, Arrays.asList(requestHttpTimeoutRule)).block();
try {
CosmosDiagnostics cosmosDiagnostics =
this.performDocumentOperation(cosmosAsyncContainer, operationType, newItem).block();
logger.info("dataPlaneRequestHttpTimeout() Diagnostics " + " " + cosmosDiagnostics);
validateDataPlaneRetryPolicyResponseTimeouts(cosmosDiagnostics);
} catch (Exception e) {
fail("dataPlaneRequestHttpTimeout() should succeed for operationType " + operationType, e);
} finally {
requestHttpTimeoutRule.disable();
}
}
@Test(groups = {"multi-master"}, timeOut = 8 * TIMEOUT)
public void writeOperationRequestHttpTimeout() {
if (BridgeInternal
.getContextClient(this.cosmosAsyncClient)
.getConnectionPolicy()
.getConnectionMode() != ConnectionMode.GATEWAY) {
throw new SkipException("queryPlanHttpTimeoutWillNotMarkRegionUnavailable() is only meant for GATEWAY mode");
}
TestItem newItem = TestItem.createNewItem();
this.cosmosAsyncContainer.createItem(newItem).block();
FaultInjectionRule requestHttpTimeoutRule = new FaultInjectionRuleBuilder("requestHttpTimeoutRule" + UUID.randomUUID())
.condition(
new FaultInjectionConditionBuilder()
.operationType(FaultInjectionOperationType.CREATE_ITEM)
.connectionType(FaultInjectionConnectionType.GATEWAY)
.build())
.result(
FaultInjectionResultBuilders.getResultBuilder(FaultInjectionServerErrorType.RESPONSE_DELAY)
.delay(Duration.ofSeconds(66))
.times(2)
.build()
)
.build();
CosmosFaultInjectionHelper.configureFaultInjectionRules(this.cosmosAsyncContainer, Arrays.asList(requestHttpTimeoutRule)).block();
try {
CosmosDiagnostics cosmosDiagnostics =
this.performDocumentOperation(cosmosAsyncContainer, OperationType.Create, newItem).block();
fail("writeOperationRequestHttpTimeout() should fail for operationType " + OperationType.Create);
} catch (CosmosException e) {
logger.info("writeOperationRequestHttpTimeout() Diagnostics " + " " + e.getDiagnostics());
assertThat(e.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.REQUEST_TIMEOUT);
} finally {
requestHttpTimeoutRule.disable();
}
}
@Test(groups = {"multi-master"}, timeOut = 8 * TIMEOUT)
public void writeOperationConnectionTimeout() {
if (BridgeInternal
.getContextClient(this.cosmosAsyncClient)
.getConnectionPolicy()
.getConnectionMode() != ConnectionMode.GATEWAY) {
throw new SkipException("queryPlanHttpTimeoutWillNotMarkRegionUnavailable() is only meant for GATEWAY mode");
}
TestItem newItem = TestItem.createNewItem();
this.cosmosAsyncContainer.createItem(newItem).block();
FaultInjectionRule requestHttpTimeoutRule = new FaultInjectionRuleBuilder("requestHttpTimeoutRule" + UUID.randomUUID())
.condition(
new FaultInjectionConditionBuilder()
.operationType(FaultInjectionOperationType.CREATE_ITEM)
.connectionType(FaultInjectionConnectionType.GATEWAY)
.build())
.result(
FaultInjectionResultBuilders.getResultBuilder(FaultInjectionServerErrorType.CONNECTION_DELAY)
.delay(Duration.ofSeconds(66))
.times(3)
.build()
)
.build();
CosmosFaultInjectionHelper.configureFaultInjectionRules(this.cosmosAsyncContainer, Arrays.asList(requestHttpTimeoutRule)).block();
try {
CosmosDiagnostics cosmosDiagnostics =
this.performDocumentOperation(cosmosAsyncContainer, OperationType.Create, newItem).block();
logger.info("writeOperationConnectionTimeout() Diagnostics " + " " + cosmosDiagnostics);
} catch (CosmosException e) {
fail("writeOperationConnectionTimeout() should pass for operationType " + OperationType.Create);
assertThat(e.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.SERVICE_UNAVAILABLE);
} finally {
requestHttpTimeoutRule.disable();
}
}
private void validateAddressRefreshRetryPolicyResponseTimeouts(CosmosDiagnostics cosmosDiagnostics) {
List<ClientSideRequestStatistics.AddressResolutionStatistics> addressResolutionStatisticsList = diagnosticsAccessor.getClientSideRequestStatistics(cosmosDiagnostics)
.stream()
.map(ClientSideRequestStatistics::getAddressResolutionStatistics)
.flatMap(m -> m.values().stream())
.sorted(Comparator.comparing(ClientSideRequestStatistics.AddressResolutionStatistics::getStartTimeUTC))
.collect(Collectors.toList());
assertThat(MILLIS.between(addressResolutionStatisticsList.get(0).getStartTimeUTC(), addressResolutionStatisticsList.get(0).getEndTimeUTC())).isLessThanOrEqualTo(600);
assertThat(SECONDS.between(addressResolutionStatisticsList.get(1).getStartTimeUTC(), addressResolutionStatisticsList.get(1).getEndTimeUTC())).isLessThanOrEqualTo(6);
assertThat(SECONDS.between(addressResolutionStatisticsList.get(2).getStartTimeUTC(), addressResolutionStatisticsList.get(2).getEndTimeUTC())).isLessThanOrEqualTo(11);
}
private Mono<CosmosDiagnostics> performDocumentOperation(
CosmosAsyncContainer cosmosAsyncContainer,
OperationType operationType,
TestItem createdItem) {
switch(operationType) {
case Query:
CosmosQueryRequestOptions queryRequestOptions = new CosmosQueryRequestOptions();
String query = String.format("SELECT * from c where c.id = '%s'", createdItem.getId());
FeedResponse<TestItem> itemFeedResponse =
cosmosAsyncContainer.queryItems(query, queryRequestOptions, TestItem.class).byPage().blockFirst();
return Mono.just(itemFeedResponse.getCosmosDiagnostics());
case Read:
return cosmosAsyncContainer
.readItem(
createdItem.getId(),
new PartitionKey(createdItem.getId()),
TestItem.class
)
.map(itemResponse -> itemResponse.getDiagnostics());
case Replace:
return cosmosAsyncContainer
.replaceItem(
createdItem,
createdItem.getId(),
new PartitionKey(createdItem.getId()))
.map(itemResponse -> itemResponse.getDiagnostics());
case Delete:
return cosmosAsyncContainer.deleteItem(createdItem, null).map(itemResponse -> itemResponse.getDiagnostics());
case Create:
return cosmosAsyncContainer.createItem(TestItem.createNewItem()).map(itemResponse -> itemResponse.getDiagnostics());
case Upsert:
return cosmosAsyncContainer.upsertItem(TestItem.createNewItem()).map(itemResponse -> itemResponse.getDiagnostics());
case Patch:
CosmosPatchOperations patchOperations =
CosmosPatchOperations
.create()
.add("newPath", "newPath");
return cosmosAsyncContainer
.patchItem(createdItem.getId(), new PartitionKey(createdItem.getId()), patchOperations, TestItem.class)
.map(itemResponse -> itemResponse.getDiagnostics());
case ReadFeed:
List<FeedRange> feedRanges = cosmosAsyncContainer.getFeedRanges().block();
CosmosChangeFeedRequestOptions changeFeedRequestOptions =
CosmosChangeFeedRequestOptions.createForProcessingFromBeginning(feedRanges.get(0));
FeedResponse<TestItem> firstPage = cosmosAsyncContainer
.queryChangeFeed(changeFeedRequestOptions, TestItem.class)
.byPage()
.blockFirst();
return Mono.just(firstPage.getCosmosDiagnostics());
}
throw new IllegalArgumentException("The operation type is not supported");
}
}
|
Couldn't you inject the Agroal datasource directly? I see Flyway has a method for this: https://github.com/flyway/flyway/blob/master/flyway-core/src/main/java/org/flywaydb/core/api/configuration/FluentConfiguration.java#L735 .
|
public Flyway produceFlyway() {
FluentConfiguration configure = Flyway.configure();
String url = agroalRuntimeConfig.defaultDataSource.url
.orElseThrow(() -> new IllegalStateException(
"No default datasource configured. Please specify the quarkus.datasource.* properties"));
String username = agroalRuntimeConfig.defaultDataSource.username.orElse(null);
String password = agroalRuntimeConfig.defaultDataSource.password.orElse(null);
configure.dataSource(url, username, password);
flywayRuntimeConfig.connectRetries.ifPresent(configure::connectRetries);
flywayRuntimeConfig.schemas.ifPresent(configure::schemas);
flywayRuntimeConfig.table.ifPresent(configure::table);
flywayBuildConfig.locations.map(it -> it.split(",")).ifPresent(configure::locations);
flywayRuntimeConfig.sqlMigrationPrefix.ifPresent(configure::sqlMigrationPrefix);
flywayRuntimeConfig.repeatableSqlMigrationPrefix.ifPresent(configure::repeatableSqlMigrationPrefix);
return configure.load();
}
|
flywayRuntimeConfig.repeatableSqlMigrationPrefix.ifPresent(configure::repeatableSqlMigrationPrefix);
|
public Flyway produceFlyway() {
FluentConfiguration configure = Flyway.configure();
configure.dataSource(dataSource);
flywayRuntimeConfig.connectRetries.ifPresent(configure::connectRetries);
List<String> notEmptySchemas = filterBlanks(flywayRuntimeConfig.schemas);
if (!notEmptySchemas.isEmpty()) {
configure.schemas(notEmptySchemas.toArray(new String[0]));
}
flywayRuntimeConfig.table.ifPresent(configure::table);
List<String> notEmptyLocations = filterBlanks(flywayBuildConfig.locations);
if (!notEmptyLocations.isEmpty()) {
configure.locations(notEmptyLocations.toArray(new String[0]));
}
flywayRuntimeConfig.sqlMigrationPrefix.ifPresent(configure::sqlMigrationPrefix);
flywayRuntimeConfig.repeatableSqlMigrationPrefix.ifPresent(configure::repeatableSqlMigrationPrefix);
return configure.load();
}
|
class FlywayProducer {
private AgroalRuntimeConfig agroalRuntimeConfig;
private FlywayRuntimeConfig flywayRuntimeConfig;
private FlywayBuildConfig flywayBuildConfig;
@Produces
@Dependent
public void setAgroalRuntimeConfig(AgroalRuntimeConfig agroalRuntimeConfig) {
this.agroalRuntimeConfig = agroalRuntimeConfig;
}
public void setFlywayRuntimeConfig(FlywayRuntimeConfig flywayRuntimeConfig) {
this.flywayRuntimeConfig = flywayRuntimeConfig;
}
public void setFlywayBuildConfig(FlywayBuildConfig flywayBuildConfig) {
this.flywayBuildConfig = flywayBuildConfig;
}
}
|
class FlywayProducer {
@Inject
AgroalDataSource dataSource;
private FlywayRuntimeConfig flywayRuntimeConfig;
private FlywayBuildConfig flywayBuildConfig;
@Produces
@Dependent
private List<String> filterBlanks(List<String> values) {
return values.stream().filter(it -> it != null && !"".equals(it))
.collect(Collectors.toList());
}
public void setFlywayRuntimeConfig(FlywayRuntimeConfig flywayRuntimeConfig) {
this.flywayRuntimeConfig = flywayRuntimeConfig;
}
public void setFlywayBuildConfig(FlywayBuildConfig flywayBuildConfig) {
this.flywayBuildConfig = flywayBuildConfig;
}
}
|
configRegistry will never be null, right?
|
public Tracer getTracer(String tracerName, String serviceName) {
if (Objects.isNull(configRegistry)) {
throw new IllegalStateException("Tracer not initialized with configurations");
}
return new Configuration(
serviceName,
new Configuration.SamplerConfiguration(samplerType, samplerParam),
new Configuration.ReporterConfiguration(
Boolean.FALSE, hostname, port, reporterFlushInterval, reporterBufferSize
)
).getTracerBuilder().withScopeManager(NoOpScopeManager.INSTANCE).build();
}
|
if (Objects.isNull(configRegistry)) {
|
public Tracer getTracer(String tracerName, String serviceName) {
if (Objects.isNull(configRegistry)) {
throw new IllegalStateException("Tracer not initialized with configurations");
}
return new Configuration(
serviceName,
new Configuration.SamplerConfiguration(samplerType, samplerParam),
new Configuration.ReporterConfiguration(
Boolean.FALSE, hostname, port, reporterFlushInterval, reporterBufferSize
)
).getTracerBuilder().withScopeManager(NoOpScopeManager.INSTANCE).build();
}
|
class OpenTracingExtension implements OpenTracer {
private ConfigRegistry configRegistry;
private String hostname;
private int port;
private String samplerType;
private Number samplerParam;
private int reporterFlushInterval;
private int reporterBufferSize;
private static final PrintStream console = System.out;
private static final PrintStream consoleError = System.err;
@Override
public void init() throws InvalidConfigurationException {
configRegistry = ConfigRegistry.getInstance();
try {
port = Integer.parseInt(
configRegistry.getConfigOrDefault(REPORTER_PORT_CONFIG, String.valueOf(DEFAULT_REPORTER_PORT)));
hostname = configRegistry.getConfigOrDefault(REPORTER_HOST_NAME_CONFIG, DEFAULT_REPORTER_HOSTNAME);
samplerType = configRegistry.getConfigOrDefault(SAMPLER_TYPE_CONFIG, DEFAULT_SAMPLER_TYPE);
if (!(samplerType.equals(ConstSampler.TYPE) || samplerType.equals(RateLimitingSampler.TYPE)
|| samplerType.equals(ProbabilisticSampler.TYPE))) {
samplerType = DEFAULT_SAMPLER_TYPE;
consoleError.println(
"ballerina: Jaeger configuration: \"sampler type\" invalid. Defaulted to const sampling");
}
samplerParam = Float.valueOf(
configRegistry.getConfigOrDefault(SAMPLER_PARAM_CONFIG, String.valueOf(DEFAULT_SAMPLER_PARAM)));
reporterFlushInterval = Integer.parseInt(configRegistry.getConfigOrDefault(
REPORTER_FLUSH_INTERVAL_MS_CONFIG, String.valueOf(DEFAULT_REPORTER_FLUSH_INTERVAL)));
reporterBufferSize = Integer.parseInt(configRegistry.getConfigOrDefault
(REPORTER_MAX_BUFFER_SPANS_CONFIG, String.valueOf(DEFAULT_REPORTER_MAX_BUFFER_SPANS)));
} catch (IllegalArgumentException | ArithmeticException e) {
throw new InvalidConfigurationException(e.getMessage());
}
console.println("ballerina: started publishing tracers to Jaeger on " + hostname + ":" + port);
}
@Override
@Override
public String getName() {
return TRACER_NAME;
}
}
|
class OpenTracingExtension implements OpenTracer {
private ConfigRegistry configRegistry;
private String hostname;
private int port;
private String samplerType;
private Number samplerParam;
private int reporterFlushInterval;
private int reporterBufferSize;
private static final PrintStream console = System.out;
private static final PrintStream consoleError = System.err;
@Override
public void init() throws InvalidConfigurationException {
configRegistry = ConfigRegistry.getInstance();
try {
port = Integer.parseInt(
configRegistry.getConfigOrDefault(REPORTER_PORT_CONFIG, String.valueOf(DEFAULT_REPORTER_PORT)));
hostname = configRegistry.getConfigOrDefault(REPORTER_HOST_NAME_CONFIG, DEFAULT_REPORTER_HOSTNAME);
samplerType = configRegistry.getConfigOrDefault(SAMPLER_TYPE_CONFIG, DEFAULT_SAMPLER_TYPE);
if (!(samplerType.equals(ConstSampler.TYPE) || samplerType.equals(RateLimitingSampler.TYPE)
|| samplerType.equals(ProbabilisticSampler.TYPE))) {
samplerType = DEFAULT_SAMPLER_TYPE;
consoleError.println(
"ballerina: Jaeger configuration: \"sampler type\" invalid. Defaulted to const sampling");
}
samplerParam = Float.valueOf(
configRegistry.getConfigOrDefault(SAMPLER_PARAM_CONFIG, String.valueOf(DEFAULT_SAMPLER_PARAM)));
reporterFlushInterval = Integer.parseInt(configRegistry.getConfigOrDefault(
REPORTER_FLUSH_INTERVAL_MS_CONFIG, String.valueOf(DEFAULT_REPORTER_FLUSH_INTERVAL)));
reporterBufferSize = Integer.parseInt(configRegistry.getConfigOrDefault
(REPORTER_MAX_BUFFER_SPANS_CONFIG, String.valueOf(DEFAULT_REPORTER_MAX_BUFFER_SPANS)));
} catch (IllegalArgumentException | ArithmeticException e) {
throw new InvalidConfigurationException(e.getMessage());
}
console.println("ballerina: started publishing tracers to Jaeger on " + hostname + ":" + port);
}
@Override
@Override
public String getName() {
return TRACER_NAME;
}
}
|
Is this format consistent for all cases?
|
static String cleanupObjectTypeName(String callName, BType objectType) {
if (!objectType.tsymbol.name.value.isEmpty() && callName.startsWith(objectType.tsymbol.name.value)) {
callName = callName.replace(objectType.tsymbol.name.value + ".", "").trim();
}
if (callName.startsWith("(") && callName.contains(").")) {
callName = callName.substring(callName.indexOf(").") + 2);
}
return callName;
}
|
static String cleanupObjectTypeName(String callName, BType objectType) {
if (!objectType.tsymbol.name.value.isEmpty() && callName.startsWith(objectType.tsymbol.name.value)) {
callName = callName.replace(objectType.tsymbol.name.value + ".", "").trim();
}
if (callName.startsWith("(") && callName.contains(").")) {
callName = callName.substring(callName.indexOf(").") + 2);
}
return callName;
}
|
class JvmCodeGenUtil {
public static final Unifier UNIFIER = new Unifier();
private static final Pattern JVM_RESERVED_CHAR_SET = Pattern.compile("[\\.:/<>]");
public static final String SCOPE_PREFIX = "_SCOPE_";
public static final NameHashComparator NAME_HASH_COMPARATOR = new NameHashComparator();
static void visitInvokeDynamic(MethodVisitor mv, String currentClass, String lambdaName, int size) {
String mapDesc = getMapsDesc(size);
Handle handle = new Handle(Opcodes.H_INVOKESTATIC, "java/lang/invoke/LambdaMetafactory",
"metafactory", "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;" +
"Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;" +
"Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;", false);
mv.visitInvokeDynamicInsn("apply", "(" + mapDesc + ")Ljava/util/function/Function;", handle,
Type.getType("(Ljava/lang/Object;)Ljava/lang/Object;"),
new Handle(Opcodes.H_INVOKESTATIC, currentClass, lambdaName, "(" + mapDesc + "[" +
"Ljava/lang/Object;)Ljava/lang/Object;", false),
Type.getType("([Ljava/lang/Object;" + ")Ljava/lang/Object;"));
}
private static String getMapsDesc(long count) {
StringBuilder builder = new StringBuilder();
for (long i = count; i > 0; i--) {
builder.append("Lio/ballerina/runtime/internal/values/MapValue;");
}
return builder.toString();
}
public static void createFunctionPointer(MethodVisitor mv, String className, String lambdaName) {
mv.visitTypeInsn(Opcodes.NEW, FUNCTION_POINTER);
mv.visitInsn(Opcodes.DUP);
visitInvokeDynamic(mv, className, lambdaName, 0);
mv.visitInsn(Opcodes.ACONST_NULL);
mv.visitInsn(Opcodes.ACONST_NULL);
mv.visitInsn(Opcodes.ICONST_0);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, FUNCTION_POINTER, JVM_INIT_METHOD, FP_INIT, false);
}
public static String cleanupPathSeparators(String name) {
name = cleanupBalExt(name);
return name.replace(WINDOWS_PATH_SEPERATOR, JAVA_PACKAGE_SEPERATOR);
}
public static String rewriteVirtualCallTypeName(String value, BType objectType) {
return Utils.encodeFunctionIdentifier(cleanupObjectTypeName(value, getImpliedType(objectType)));
}
private static String cleanupBalExt(String name) {
if (name.endsWith(BAL_EXTENSION)) {
return name.substring(0, name.length() - 4);
}
return name;
}
public static String getFieldTypeSignature(BType bType) {
bType = getImpliedType(bType);
if (TypeTags.isIntegerTypeTag(bType.tag)) {
return "J";
} else if (TypeTags.isStringTypeTag(bType.tag)) {
return GET_BSTRING;
} else if (TypeTags.isXMLTypeTag(bType.tag)) {
return GET_XML;
} else {
switch (bType.tag) {
case TypeTags.BYTE:
return "I";
case TypeTags.FLOAT:
return "D";
case TypeTags.DECIMAL:
return GET_BDECIMAL;
case TypeTags.BOOLEAN:
return "Z";
case TypeTags.NIL:
case TypeTags.NEVER:
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.UNION:
case TypeTags.JSON:
case TypeTags.FINITE:
case TypeTags.READONLY:
return GET_OBJECT;
case TypeTags.MAP:
case TypeTags.RECORD:
return GET_MAP_VALUE;
case TypeTags.STREAM:
return GET_STREAM_VALUE;
case TypeTags.TABLE:
return GET_TABLE_VALUE;
case TypeTags.ARRAY:
case TypeTags.TUPLE:
return GET_ARRAY_VALUE;
case TypeTags.ERROR:
return GET_ERROR_VALUE;
case TypeTags.FUTURE:
return GET_FUTURE_VALUE;
case TypeTags.OBJECT:
return GET_BOBJECT;
case TypeTags.TYPEDESC:
return GET_TYPEDESC;
case TypeTags.INVOKABLE:
return GET_FUNCTION_POINTER;
case TypeTags.HANDLE:
return GET_HANDLE_VALUE;
case JTypeTags.JTYPE:
return InteropMethodGen.getJTypeSignature((JType) bType);
case TypeTags.REGEXP:
return GET_REGEXP;
default:
throw new BLangCompilerException(JvmConstants.TYPE_NOT_SUPPORTED_MESSAGE + bType);
}
}
}
public static void generateDefaultConstructor(ClassWriter cw, String ownerClass) {
MethodVisitor mv = cw.visitMethod(Opcodes.ACC_PUBLIC, JVM_INIT_METHOD, VOID_METHOD_DESC, null, null);
mv.visitCode();
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, ownerClass, JVM_INIT_METHOD, VOID_METHOD_DESC, false);
mv.visitInsn(Opcodes.RETURN);
mv.visitMaxs(1, 1);
mv.visitEnd();
}
static void generateStrandMetadata(MethodVisitor mv, String moduleClass,
PackageID packageID, AsyncDataCollector asyncDataCollector) {
asyncDataCollector.getStrandMetadata().forEach(
(varName, metaData) -> genStrandMetadataField(mv, moduleClass, packageID, varName, metaData));
}
public static void genStrandMetadataField(MethodVisitor mv, String moduleClass, PackageID packageID,
String varName, ScheduleFunctionInfo metaData) {
mv.visitTypeInsn(Opcodes.NEW, STRAND_METADATA);
mv.visitInsn(Opcodes.DUP);
mv.visitLdcInsn(Utils.decodeIdentifier(packageID.orgName.value));
mv.visitLdcInsn(Utils.decodeIdentifier(packageID.name.value));
mv.visitLdcInsn(getMajorVersion(packageID.version.value));
if (metaData.typeName == null) {
mv.visitInsn(Opcodes.ACONST_NULL);
} else {
mv.visitLdcInsn(metaData.typeName);
}
mv.visitLdcInsn(metaData.parentFunctionName);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, STRAND_METADATA,
JVM_INIT_METHOD, INIT_STRAND_METADATA, false);
mv.visitFieldInsn(Opcodes.PUTSTATIC, moduleClass, varName, GET_STRAND_METADATA);
}
static void visitStrandMetadataFields(ClassWriter cw, Map<String, ScheduleFunctionInfo> strandMetaDataMap) {
strandMetaDataMap.keySet().forEach(varName -> visitStrandMetadataField(cw, varName));
}
private static void visitStrandMetadataField(ClassWriter cw, String varName) {
FieldVisitor fv = cw.visitField(Opcodes.ACC_STATIC, varName,
GET_STRAND_METADATA, null, null);
fv.visitEnd();
}
public static String getStrandMetadataVarName(String parentFunction) {
return STRAND_METADATA_VAR_PREFIX + parentFunction + "$";
}
public static boolean isExternFunc(BIRNode.BIRFunction func) {
return (func.flags & Flags.NATIVE) == Flags.NATIVE;
}
public static String getPackageName(PackageID packageID) {
return getPackageNameWithSeparator(packageID, "/");
}
private static String getPackageNameWithSeparator(PackageID packageID, String separator) {
return getPackageNameWithSeparator(packageID, separator, false);
}
private static String getPackageNameWithSeparator(PackageID packageID, String separator, boolean isSource) {
String packageName = "";
String orgName = Utils.encodeNonFunctionIdentifier(packageID.orgName.value);
String moduleName;
if (!packageID.isTestPkg || isSource) {
moduleName = Utils.encodeNonFunctionIdentifier(packageID.name.value);
} else {
moduleName = Utils.encodeNonFunctionIdentifier(packageID.name.value) + Names.TEST_PACKAGE.value;
}
if (!moduleName.equals(ENCODED_DOT_CHARACTER)) {
if (!packageID.version.value.equals("")) {
packageName = getMajorVersion(packageID.version.value) + separator;
}
packageName = moduleName + separator + packageName;
}
if (!orgName.equalsIgnoreCase("$anon")) {
packageName = orgName + separator + packageName;
}
return packageName;
}
public static String getModuleLevelClassName(PackageID packageID, String sourceFileName) {
return getModuleLevelClassName(packageID, sourceFileName, "/");
}
public static void generateExitRuntime(MethodVisitor mv) {
mv.visitMethodInsn(INVOKESTATIC , JAVA_RUNTIME, "getRuntime", GET_RUNTIME, false);
mv.visitInsn(ICONST_0);
mv.visitMethodInsn(INVOKEVIRTUAL , JAVA_RUNTIME, "exit", "(I)V", false);
}
static String getModuleLevelClassName(PackageID packageID, String sourceFileName, String separator) {
String className = cleanupSourceFileName(sourceFileName);
if (className.startsWith(JAVA_PACKAGE_SEPERATOR)) {
className = className.substring(1);
}
return getPackageNameWithSeparator(packageID, separator) + className;
}
private static String cleanupSourceFileName(String name) {
return name.replace(".", FILE_NAME_PERIOD_SEPERATOR);
}
public static String getMethodDesc(List<BType> paramTypes, BType retType) {
return INITIAL_METHOD_DESC + getMethodDescParams(paramTypes) + generateReturnType(retType);
}
public static String getMethodDesc(List<BType> paramTypes, BType retType, BType attachedType) {
return INITIAL_METHOD_DESC + getArgTypeSignature(attachedType) + getMethodDescParams(paramTypes) +
generateReturnType(retType);
}
public static String getMethodDesc(List<BType> paramTypes, BType retType, String attachedTypeClassName) {
return INITIAL_METHOD_DESC + "L" + attachedTypeClassName + ";" + getMethodDescParams(paramTypes) +
generateReturnType(retType);
}
public static String getMethodDescParams(List<BType> paramTypes) {
StringBuilder descBuilder = new StringBuilder();
for (BType type : paramTypes) {
descBuilder.append(getArgTypeSignature(type));
}
return descBuilder.toString();
}
public static String getArgTypeSignature(BType bType) {
bType = JvmCodeGenUtil.getImpliedType(bType);
if (TypeTags.isIntegerTypeTag(bType.tag)) {
return "J";
} else if (TypeTags.isStringTypeTag(bType.tag)) {
return GET_BSTRING;
} else if (TypeTags.isXMLTypeTag(bType.tag)) {
return GET_XML;
}
switch (bType.tag) {
case TypeTags.BYTE:
return "I";
case TypeTags.FLOAT:
return "D";
case TypeTags.DECIMAL:
return GET_BDECIMAL;
case TypeTags.BOOLEAN:
return "Z";
case TypeTags.NIL:
case TypeTags.NEVER:
case TypeTags.ANYDATA:
case TypeTags.UNION:
case TypeTags.JSON:
case TypeTags.FINITE:
case TypeTags.ANY:
case TypeTags.READONLY:
return GET_OBJECT;
case TypeTags.ARRAY:
case TypeTags.TUPLE:
return GET_ARRAY_VALUE;
case TypeTags.ERROR:
return GET_ERROR_VALUE;
case TypeTags.MAP:
case TypeTags.RECORD:
return GET_MAP_VALUE;
case TypeTags.FUTURE:
return GET_FUTURE_VALUE;
case TypeTags.STREAM:
return GET_STREAM_VALUE;
case TypeTags.TABLE:
return GET_TABLE_VALUE;
case TypeTags.INVOKABLE:
return GET_FUNCTION_POINTER;
case TypeTags.TYPEDESC:
return GET_TYPEDESC;
case TypeTags.OBJECT:
return GET_BOBJECT;
case TypeTags.HANDLE:
return GET_HANDLE_VALUE;
case TypeTags.REGEXP:
return GET_REGEXP;
default:
throw new BLangCompilerException(JvmConstants.TYPE_NOT_SUPPORTED_MESSAGE + bType);
}
}
public static String generateReturnType(BType bType) {
bType = JvmCodeGenUtil.getImpliedType(bType);
if (bType == null) {
return RETURN_JOBJECT;
}
bType = JvmCodeGenUtil.UNIFIER.build(bType);
if (bType == null || bType.tag == TypeTags.NIL || bType.tag == TypeTags.NEVER) {
return RETURN_JOBJECT;
} else if (TypeTags.isIntegerTypeTag(bType.tag)) {
return ")J";
} else if (TypeTags.isStringTypeTag(bType.tag)) {
return RETURN_B_STRING_VALUE;
} else if (TypeTags.isXMLTypeTag(bType.tag)) {
return RETURN_XML_VALUE;
}
switch (bType.tag) {
case TypeTags.BYTE:
return ")I";
case TypeTags.FLOAT:
return ")D";
case TypeTags.DECIMAL:
return RETURN_DECIMAL_VALUE;
case TypeTags.BOOLEAN:
return ")Z";
case TypeTags.ARRAY:
case TypeTags.TUPLE:
return RETURN_ARRAY_VALUE;
case TypeTags.MAP:
case TypeTags.RECORD:
return RETURN_MAP_VALUE;
case TypeTags.ERROR:
return RETURN_ERROR_VALUE;
case TypeTags.STREAM:
return RETURN_STREAM_VALUE;
case TypeTags.TABLE:
return RETURN_TABLE_VALUE;
case TypeTags.FUTURE:
return RETURN_FUTURE_VALUE;
case TypeTags.TYPEDESC:
return RETURN_TYPEDESC_VALUE;
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.UNION:
case TypeTags.INTERSECTION:
case TypeTags.JSON:
case TypeTags.FINITE:
case TypeTags.READONLY:
return RETURN_JOBJECT;
case TypeTags.OBJECT:
return RETURN_B_OBJECT;
case TypeTags.INVOKABLE:
return RETURN_FUNCTION_POINTER;
case TypeTags.HANDLE:
return RETURN_HANDLE_VALUE;
case TypeTags.REGEXP:
return RETURN_REGEX_VALUE;
default:
throw new BLangCompilerException(JvmConstants.TYPE_NOT_SUPPORTED_MESSAGE + bType);
}
}
public static void loadChannelDetails(MethodVisitor mv, List<BIRNode.ChannelDetails> channels,
int invocationVarIndex) {
mv.visitIntInsn(BIPUSH, channels.size());
mv.visitTypeInsn(ANEWARRAY, CHANNEL_DETAILS);
int index = 0;
for (BIRNode.ChannelDetails ch : channels) {
mv.visitInsn(DUP);
mv.visitIntInsn(BIPUSH, index);
index += 1;
mv.visitTypeInsn(NEW, CHANNEL_DETAILS);
mv.visitInsn(DUP);
mv.visitVarInsn(ILOAD, invocationVarIndex);
mv.visitInvokeDynamicInsn(MAKE_CONCAT_WITH_CONSTANTS, INT_TO_STRING,
new Handle(H_INVOKESTATIC, STRING_CONCAT_FACTORY, MAKE_CONCAT_WITH_CONSTANTS,
HANDLE_DESCRIPTOR_FOR_STRING_CONCAT, false),
ch.name + START_OF_HEADING_WITH_SEMICOLON);
if (ch.channelInSameStrand) {
mv.visitInsn(ICONST_1);
} else {
mv.visitInsn(ICONST_0);
}
if (ch.send) {
mv.visitInsn(ICONST_1);
} else {
mv.visitInsn(ICONST_0);
}
mv.visitMethodInsn(INVOKESPECIAL, CHANNEL_DETAILS, JVM_INIT_METHOD,
INIT_CHANNEL_DETAILS, false);
mv.visitInsn(AASTORE);
}
}
public static String toNameString(BType t) {
BTypeSymbol typeSymbol = t.tsymbol;
if ((typeSymbol.kind == SymbolKind.RECORD || typeSymbol.kind == SymbolKind.OBJECT) &&
((BStructureTypeSymbol) typeSymbol).typeDefinitionSymbol != null) {
return Utils.encodeNonFunctionIdentifier(((BStructureTypeSymbol) typeSymbol)
.typeDefinitionSymbol.name.value);
}
return Utils.encodeNonFunctionIdentifier(typeSymbol.name.value);
}
public static boolean isBallerinaBuiltinModule(String orgName, String moduleName) {
return orgName.equals("ballerina") && moduleName.equals("builtin");
}
public static BirScope getLastScopeFromBBInsGen(MethodVisitor mv, LabelGenerator labelGen,
JvmInstructionGen instGen, int localVarOffset,
String funcName, BIRNode.BIRBasicBlock bb,
Set<BirScope> visitedScopesSet, BirScope lastScope) {
int insCount = bb.instructions.size();
for (int i = 0; i < insCount; i++) {
Label insLabel = labelGen.getLabel(funcName + bb.id.value + "ins" + i);
mv.visitLabel(insLabel);
BIRAbstractInstruction inst = bb.instructions.get(i);
if (inst != null) {
generateDiagnosticPos(inst.pos, mv);
instGen.generateInstructions(localVarOffset, inst);
lastScope = getLastScope(inst, funcName, labelGen, visitedScopesSet, lastScope, mv);
}
}
return lastScope;
}
public static void generateDiagnosticPos(Location pos, MethodVisitor mv) {
Label label = new Label();
if (pos != null && pos.lineRange().startLine().line() != OVERFLOW_LINE_NUMBER) {
mv.visitLabel(label);
mv.visitLineNumber(pos.lineRange().startLine().line() + 1, label);
}
}
private static BirScope getLastScope(BIRAbstractInstruction instruction, String funcName, LabelGenerator labelGen,
Set<BirScope> visitedScopesSet, BirScope lastScope, MethodVisitor mv) {
BirScope scope = instruction.scope;
if (scope != null && scope != lastScope) {
lastScope = scope;
Label scopeLabel = labelGen.getLabel(funcName + SCOPE_PREFIX + scope.id);
mv.visitLabel(scopeLabel);
storeLabelForParentScopes(scope, scopeLabel, labelGen, funcName, visitedScopesSet);
visitedScopesSet.add(scope);
}
return lastScope;
}
private static void storeLabelForParentScopes(BirScope scope, Label scopeLabel, LabelGenerator labelGen,
String funcName, Set<BirScope> visitedScopesSet) {
BirScope parent = scope.parent;
if (parent != null && !visitedScopesSet.contains(parent)) {
String labelName = funcName + SCOPE_PREFIX + parent.id;
labelGen.putLabel(labelName, scopeLabel);
visitedScopesSet.add(parent);
storeLabelForParentScopes(parent, scopeLabel, labelGen, funcName, visitedScopesSet);
}
}
public static BirScope getLastScopeFromTerminator(MethodVisitor mv, BIRNode.BIRBasicBlock bb, String funcName,
LabelGenerator labelGen, BirScope lastScope,
Set<BirScope> visitedScopesSet) {
BirScope scope = bb.terminator.scope;
if (scope != null && scope != lastScope) {
lastScope = scope;
Label scopeLabel = labelGen.getLabel(funcName + SCOPE_PREFIX + scope.id);
mv.visitLabel(scopeLabel);
visitedScopesSet.add(scope);
}
return lastScope;
}
public static void genYieldCheck(MethodVisitor mv, LabelGenerator labelGen, BIRNode.BIRBasicBlock thenBB,
String funcName, int localVarOffset, int yieldLocationVarIndex,
Location terminatorPos, String fullyQualifiedFuncName, String yieldStatus,
int yieldStatusVarIndex) {
mv.visitVarInsn(ALOAD, localVarOffset);
mv.visitMethodInsn(INVOKEVIRTUAL, STRAND_CLASS, "isYielded", "()Z", false);
generateSetYieldedStatus(mv, labelGen, funcName, yieldLocationVarIndex, terminatorPos,
fullyQualifiedFuncName, yieldStatus, yieldStatusVarIndex);
Label gotoLabel = labelGen.getLabel(funcName + thenBB.id.value);
mv.visitJumpInsn(GOTO, gotoLabel);
}
protected static void generateSetYieldedStatus(MethodVisitor mv, LabelGenerator labelGen, String funcName,
int yieldLocationVarIndex, Location terminatorPos,
String fullyQualifiedFuncName, String yieldStatus,
int yieldStatusVarIndex) {
Label yieldLocationLabel = new Label();
mv.visitJumpInsn(IFEQ, yieldLocationLabel);
StringBuilder yieldLocationData = new StringBuilder(fullyQualifiedFuncName);
if (terminatorPos != null) {
yieldLocationData.append("(").append(terminatorPos.lineRange().fileName()).append(":")
.append(terminatorPos.lineRange().startLine().line() + 1).append(")");
}
mv.visitLdcInsn(yieldLocationData.toString());
mv.visitVarInsn(ASTORE, yieldLocationVarIndex);
mv.visitLdcInsn(yieldStatus);
mv.visitVarInsn(ASTORE, yieldStatusVarIndex);
Label yieldLabel = labelGen.getLabel(funcName + "yield");
mv.visitJumpInsn(GOTO, yieldLabel);
mv.visitLabel(yieldLocationLabel);
}
public static PackageID cleanupPackageID(PackageID pkgID) {
Name org = new Name(Utils.encodeNonFunctionIdentifier(pkgID.orgName.value));
Name module = new Name(Utils.encodeNonFunctionIdentifier(pkgID.name.value));
return new PackageID(org, module, pkgID.version);
}
public static boolean isBuiltInPackage(PackageID packageID) {
packageID = cleanupPackageID(packageID);
return BALLERINA.equals(packageID.orgName.value) && BUILT_IN_PACKAGE_NAME.equals(packageID.name.value);
}
public static boolean isSameModule(PackageID moduleId, PackageID importModule) {
PackageID cleanedPkg = cleanupPackageID(importModule);
if (!moduleId.orgName.value.equals(cleanedPkg.orgName.value)) {
return false;
} else if (!moduleId.name.value.equals(cleanedPkg.name.value)) {
return false;
} else {
return getMajorVersion(moduleId.version.value).equals(getMajorVersion(cleanedPkg.version.value));
}
}
public static String cleanupFunctionName(String functionName) {
return StringUtils.containsAny(functionName, "\\.:/<>") ?
"$" + JVM_RESERVED_CHAR_SET.matcher(functionName).replaceAll("_") : functionName;
}
public static boolean isSimpleBasicType(BType bType) {
bType = JvmCodeGenUtil.getImpliedType(bType);
switch (bType.tag) {
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.BOOLEAN:
case TypeTags.DECIMAL:
case TypeTags.NIL:
case TypeTags.NEVER:
return true;
default:
return (TypeTags.isIntegerTypeTag(bType.tag)) || (TypeTags.isStringTypeTag(bType.tag));
}
}
public static boolean needNoTypeGeneration(int bTypeTag) {
switch (bTypeTag) {
case TypeTags.RECORD:
case TypeTags.ERROR:
case TypeTags.OBJECT:
case TypeTags.UNION:
case TypeTags.TUPLE:
return false;
default:
return true;
}
}
/**
* Retrieve the referred type if a given type is a type reference type or
* retrieve the effective type if the given type is an intersection type.
*
* @param type type to retrieve the implied type
* @return the implied type if provided with a type reference type or an intersection type,
* else returns the original type
*/
public static BType getImpliedType(BType type) {
BType constraint = type;
if (type == null) {
return null;
}
if (type.tag == TypeTags.TYPEREFDESC) {
return getImpliedType(((BTypeReferenceType) type).referredType);
}
if (type.tag == TypeTags.INTERSECTION) {
return getImpliedType(((BIntersectionType) type).effectiveType);
}
return constraint;
}
public static void loadConstantValue(BType bType, Object constVal, MethodVisitor mv,
JvmConstantsGen jvmConstantsGen) {
int typeTag = getImpliedType(bType).tag;
if (TypeTags.isIntegerTypeTag(typeTag)) {
long intValue = constVal instanceof Long ? (long) constVal : Long.parseLong(String.valueOf(constVal));
mv.visitLdcInsn(intValue);
return;
} else if (TypeTags.isStringTypeTag(typeTag)) {
String val = String.valueOf(constVal);
int index = jvmConstantsGen.getBStringConstantVarIndex(val);
String varName = B_STRING_VAR_PREFIX + index;
String stringConstantsClass = getStringConstantsClass(index, jvmConstantsGen);
mv.visitFieldInsn(GETSTATIC, stringConstantsClass, varName, GET_BSTRING);
return;
}
switch (typeTag) {
case TypeTags.BYTE:
int byteValue = ((Number) constVal).intValue();
mv.visitLdcInsn(byteValue);
break;
case TypeTags.FLOAT:
double doubleValue = constVal instanceof Double ? (double) constVal :
Double.parseDouble(String.valueOf(constVal));
mv.visitLdcInsn(doubleValue);
break;
case TypeTags.BOOLEAN:
boolean booleanVal = constVal instanceof Boolean ? (boolean) constVal :
Boolean.parseBoolean(String.valueOf(constVal));
mv.visitLdcInsn(booleanVal);
break;
case TypeTags.DECIMAL:
mv.visitTypeInsn(NEW, DECIMAL_VALUE);
mv.visitInsn(DUP);
mv.visitLdcInsn(removeDecimalDiscriminator(String.valueOf(constVal)));
mv.visitMethodInsn(INVOKESPECIAL, DECIMAL_VALUE, JVM_INIT_METHOD, INIT_WITH_STRING, false);
break;
case TypeTags.NIL:
case TypeTags.NEVER:
mv.visitInsn(ACONST_NULL);
break;
default:
throw new BLangCompilerException("JVM generation is not supported for type : " + bType);
}
}
private static String getStringConstantsClass(int varIndex, JvmConstantsGen jvmConstantsGen) {
int classIndex = varIndex / MAX_STRINGS_PER_METHOD;
return jvmConstantsGen.getStringConstantsClass() + UNDERSCORE + classIndex;
}
private static String removeDecimalDiscriminator(String value) {
int length = value.length();
if (length < 2) {
return value;
}
char lastChar = value.charAt(length - 1);
if (lastChar == 'd' || lastChar == 'D') {
return value.substring(0, length - 1);
}
return value;
}
public static void createDefaultCase(MethodVisitor mv, Label defaultCaseLabel, int nameRegIndex,
String errorMessage) {
mv.visitLabel(defaultCaseLabel);
mv.visitTypeInsn(NEW, ERROR_VALUE);
mv.visitInsn(DUP);
mv.visitTypeInsn(NEW, STRING_BUILDER);
mv.visitInsn(DUP);
mv.visitLdcInsn(errorMessage);
mv.visitMethodInsn(INVOKESPECIAL, STRING_BUILDER, JVM_INIT_METHOD, INIT_WITH_STRING,
false);
mv.visitVarInsn(ALOAD, nameRegIndex);
mv.visitMethodInsn(INVOKEVIRTUAL, STRING_BUILDER, "append",
STRING_BUILDER_APPEND, false);
mv.visitMethodInsn(INVOKEVIRTUAL, STRING_BUILDER, JVM_TO_STRING_METHOD, GET_JSTRING
, false);
mv.visitMethodInsn(INVOKESTATIC, STRING_UTILS, "fromString", FROM_STRING, false);
mv.visitMethodInsn(INVOKESPECIAL, ERROR_VALUE, JVM_INIT_METHOD, INIT_ERROR,
false);
mv.visitInsn(ATHROW);
}
public static void castToJavaString(MethodVisitor mv, int fieldNameRegIndex, int strKeyVarIndex) {
mv.visitVarInsn(ALOAD, fieldNameRegIndex);
mv.visitTypeInsn(CHECKCAST, B_STRING_VALUE);
mv.visitMethodInsn(INVOKEINTERFACE, B_STRING_VALUE, GET_VALUE_METHOD,
GET_JSTRING, true);
mv.visitVarInsn(ASTORE, strKeyVarIndex);
}
private JvmCodeGenUtil() {
}
public static String getRefTypeConstantName(BTypeReferenceType type) {
return JvmConstants.TYPEREF_TYPE_VAR_PREFIX + Utils.encodeNonFunctionIdentifier(type.tsymbol.name.value);
}
public static void visitMaxStackForMethod(MethodVisitor mv, String funcName, String className) {
try {
mv.visitMaxs(0, 0);
} catch (Throwable e) {
throw new BLangCompilerException(
"error while generating method '" + Utils.decodeIdentifier(funcName) + "' in class '" +
Utils.decodeIdentifier(className) + "'", e);
}
}
}
|
class JvmCodeGenUtil {
public static final Unifier UNIFIER = new Unifier();
private static final Pattern JVM_RESERVED_CHAR_SET = Pattern.compile("[\\.:/<>]");
public static final String SCOPE_PREFIX = "_SCOPE_";
public static final NameHashComparator NAME_HASH_COMPARATOR = new NameHashComparator();
static void visitInvokeDynamic(MethodVisitor mv, String currentClass, String lambdaName, int size) {
String mapDesc = getMapsDesc(size);
Handle handle = new Handle(Opcodes.H_INVOKESTATIC, "java/lang/invoke/LambdaMetafactory",
"metafactory", "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;" +
"Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;" +
"Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;", false);
mv.visitInvokeDynamicInsn("apply", "(" + mapDesc + ")Ljava/util/function/Function;", handle,
Type.getType("(Ljava/lang/Object;)Ljava/lang/Object;"),
new Handle(Opcodes.H_INVOKESTATIC, currentClass, lambdaName, "(" + mapDesc + "[" +
"Ljava/lang/Object;)Ljava/lang/Object;", false),
Type.getType("([Ljava/lang/Object;" + ")Ljava/lang/Object;"));
}
private static String getMapsDesc(long count) {
StringBuilder builder = new StringBuilder();
for (long i = count; i > 0; i--) {
builder.append("Lio/ballerina/runtime/internal/values/MapValue;");
}
return builder.toString();
}
public static void createFunctionPointer(MethodVisitor mv, String className, String lambdaName) {
mv.visitTypeInsn(Opcodes.NEW, FUNCTION_POINTER);
mv.visitInsn(Opcodes.DUP);
visitInvokeDynamic(mv, className, lambdaName, 0);
mv.visitInsn(Opcodes.ACONST_NULL);
mv.visitInsn(Opcodes.ACONST_NULL);
mv.visitInsn(Opcodes.ICONST_0);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, FUNCTION_POINTER, JVM_INIT_METHOD, FP_INIT, false);
}
public static String cleanupPathSeparators(String name) {
name = cleanupBalExt(name);
return name.replace(WINDOWS_PATH_SEPERATOR, JAVA_PACKAGE_SEPERATOR);
}
public static String rewriteVirtualCallTypeName(String value, BType objectType) {
return Utils.encodeFunctionIdentifier(cleanupObjectTypeName(value, getImpliedType(objectType)));
}
private static String cleanupBalExt(String name) {
if (name.endsWith(BAL_EXTENSION)) {
return name.substring(0, name.length() - 4);
}
return name;
}
public static String getFieldTypeSignature(BType bType) {
bType = getImpliedType(bType);
if (TypeTags.isIntegerTypeTag(bType.tag)) {
return "J";
} else if (TypeTags.isStringTypeTag(bType.tag)) {
return GET_BSTRING;
} else if (TypeTags.isXMLTypeTag(bType.tag)) {
return GET_XML;
} else {
switch (bType.tag) {
case TypeTags.BYTE:
return "I";
case TypeTags.FLOAT:
return "D";
case TypeTags.DECIMAL:
return GET_BDECIMAL;
case TypeTags.BOOLEAN:
return "Z";
case TypeTags.NIL:
case TypeTags.NEVER:
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.UNION:
case TypeTags.JSON:
case TypeTags.FINITE:
case TypeTags.READONLY:
return GET_OBJECT;
case TypeTags.MAP:
case TypeTags.RECORD:
return GET_MAP_VALUE;
case TypeTags.STREAM:
return GET_STREAM_VALUE;
case TypeTags.TABLE:
return GET_TABLE_VALUE;
case TypeTags.ARRAY:
case TypeTags.TUPLE:
return GET_ARRAY_VALUE;
case TypeTags.ERROR:
return GET_ERROR_VALUE;
case TypeTags.FUTURE:
return GET_FUTURE_VALUE;
case TypeTags.OBJECT:
return GET_BOBJECT;
case TypeTags.TYPEDESC:
return GET_TYPEDESC;
case TypeTags.INVOKABLE:
return GET_FUNCTION_POINTER;
case TypeTags.HANDLE:
return GET_HANDLE_VALUE;
case JTypeTags.JTYPE:
return InteropMethodGen.getJTypeSignature((JType) bType);
case TypeTags.REGEXP:
return GET_REGEXP;
default:
throw new BLangCompilerException(JvmConstants.TYPE_NOT_SUPPORTED_MESSAGE + bType);
}
}
}
public static void generateDefaultConstructor(ClassWriter cw, String ownerClass) {
MethodVisitor mv = cw.visitMethod(Opcodes.ACC_PUBLIC, JVM_INIT_METHOD, VOID_METHOD_DESC, null, null);
mv.visitCode();
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, ownerClass, JVM_INIT_METHOD, VOID_METHOD_DESC, false);
mv.visitInsn(Opcodes.RETURN);
mv.visitMaxs(1, 1);
mv.visitEnd();
}
static void generateStrandMetadata(MethodVisitor mv, String moduleClass,
PackageID packageID, AsyncDataCollector asyncDataCollector) {
asyncDataCollector.getStrandMetadata().forEach(
(varName, metaData) -> genStrandMetadataField(mv, moduleClass, packageID, varName, metaData));
}
public static void genStrandMetadataField(MethodVisitor mv, String moduleClass, PackageID packageID,
String varName, ScheduleFunctionInfo metaData) {
mv.visitTypeInsn(Opcodes.NEW, STRAND_METADATA);
mv.visitInsn(Opcodes.DUP);
mv.visitLdcInsn(Utils.decodeIdentifier(packageID.orgName.value));
mv.visitLdcInsn(Utils.decodeIdentifier(packageID.name.value));
mv.visitLdcInsn(getMajorVersion(packageID.version.value));
if (metaData.typeName == null) {
mv.visitInsn(Opcodes.ACONST_NULL);
} else {
mv.visitLdcInsn(metaData.typeName);
}
mv.visitLdcInsn(metaData.parentFunctionName);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, STRAND_METADATA,
JVM_INIT_METHOD, INIT_STRAND_METADATA, false);
mv.visitFieldInsn(Opcodes.PUTSTATIC, moduleClass, varName, GET_STRAND_METADATA);
}
static void visitStrandMetadataFields(ClassWriter cw, Map<String, ScheduleFunctionInfo> strandMetaDataMap) {
strandMetaDataMap.keySet().forEach(varName -> visitStrandMetadataField(cw, varName));
}
private static void visitStrandMetadataField(ClassWriter cw, String varName) {
FieldVisitor fv = cw.visitField(Opcodes.ACC_STATIC, varName,
GET_STRAND_METADATA, null, null);
fv.visitEnd();
}
public static String getStrandMetadataVarName(String parentFunction) {
return STRAND_METADATA_VAR_PREFIX + parentFunction + "$";
}
public static boolean isExternFunc(BIRNode.BIRFunction func) {
return (func.flags & Flags.NATIVE) == Flags.NATIVE;
}
public static String getPackageName(PackageID packageID) {
return getPackageNameWithSeparator(packageID, "/");
}
private static String getPackageNameWithSeparator(PackageID packageID, String separator) {
return getPackageNameWithSeparator(packageID, separator, false);
}
private static String getPackageNameWithSeparator(PackageID packageID, String separator, boolean isSource) {
String packageName = "";
String orgName = Utils.encodeNonFunctionIdentifier(packageID.orgName.value);
String moduleName;
if (!packageID.isTestPkg || isSource) {
moduleName = Utils.encodeNonFunctionIdentifier(packageID.name.value);
} else {
moduleName = Utils.encodeNonFunctionIdentifier(packageID.name.value) + Names.TEST_PACKAGE.value;
}
if (!moduleName.equals(ENCODED_DOT_CHARACTER)) {
if (!packageID.version.value.equals("")) {
packageName = getMajorVersion(packageID.version.value) + separator;
}
packageName = moduleName + separator + packageName;
}
if (!orgName.equalsIgnoreCase("$anon")) {
packageName = orgName + separator + packageName;
}
return packageName;
}
public static String getModuleLevelClassName(PackageID packageID, String sourceFileName) {
return getModuleLevelClassName(packageID, sourceFileName, "/");
}
public static void generateExitRuntime(MethodVisitor mv) {
mv.visitMethodInsn(INVOKESTATIC , JAVA_RUNTIME, "getRuntime", GET_RUNTIME, false);
mv.visitInsn(ICONST_0);
mv.visitMethodInsn(INVOKEVIRTUAL , JAVA_RUNTIME, "exit", "(I)V", false);
}
static String getModuleLevelClassName(PackageID packageID, String sourceFileName, String separator) {
String className = cleanupSourceFileName(sourceFileName);
if (className.startsWith(JAVA_PACKAGE_SEPERATOR)) {
className = className.substring(1);
}
return getPackageNameWithSeparator(packageID, separator) + className;
}
private static String cleanupSourceFileName(String name) {
return name.replace(".", FILE_NAME_PERIOD_SEPERATOR);
}
public static String getMethodDesc(List<BType> paramTypes, BType retType) {
return INITIAL_METHOD_DESC + getMethodDescParams(paramTypes) + generateReturnType(retType);
}
public static String getMethodDesc(List<BType> paramTypes, BType retType, BType attachedType) {
return INITIAL_METHOD_DESC + getArgTypeSignature(attachedType) + getMethodDescParams(paramTypes) +
generateReturnType(retType);
}
public static String getMethodDesc(List<BType> paramTypes, BType retType, String attachedTypeClassName) {
return INITIAL_METHOD_DESC + "L" + attachedTypeClassName + ";" + getMethodDescParams(paramTypes) +
generateReturnType(retType);
}
public static String getMethodDescParams(List<BType> paramTypes) {
StringBuilder descBuilder = new StringBuilder();
for (BType type : paramTypes) {
descBuilder.append(getArgTypeSignature(type));
}
return descBuilder.toString();
}
public static String getArgTypeSignature(BType bType) {
bType = JvmCodeGenUtil.getImpliedType(bType);
if (TypeTags.isIntegerTypeTag(bType.tag)) {
return "J";
} else if (TypeTags.isStringTypeTag(bType.tag)) {
return GET_BSTRING;
} else if (TypeTags.isXMLTypeTag(bType.tag)) {
return GET_XML;
}
switch (bType.tag) {
case TypeTags.BYTE:
return "I";
case TypeTags.FLOAT:
return "D";
case TypeTags.DECIMAL:
return GET_BDECIMAL;
case TypeTags.BOOLEAN:
return "Z";
case TypeTags.NIL:
case TypeTags.NEVER:
case TypeTags.ANYDATA:
case TypeTags.UNION:
case TypeTags.JSON:
case TypeTags.FINITE:
case TypeTags.ANY:
case TypeTags.READONLY:
return GET_OBJECT;
case TypeTags.ARRAY:
case TypeTags.TUPLE:
return GET_ARRAY_VALUE;
case TypeTags.ERROR:
return GET_ERROR_VALUE;
case TypeTags.MAP:
case TypeTags.RECORD:
return GET_MAP_VALUE;
case TypeTags.FUTURE:
return GET_FUTURE_VALUE;
case TypeTags.STREAM:
return GET_STREAM_VALUE;
case TypeTags.TABLE:
return GET_TABLE_VALUE;
case TypeTags.INVOKABLE:
return GET_FUNCTION_POINTER;
case TypeTags.TYPEDESC:
return GET_TYPEDESC;
case TypeTags.OBJECT:
return GET_BOBJECT;
case TypeTags.HANDLE:
return GET_HANDLE_VALUE;
case TypeTags.REGEXP:
return GET_REGEXP;
default:
throw new BLangCompilerException(JvmConstants.TYPE_NOT_SUPPORTED_MESSAGE + bType);
}
}
public static String generateReturnType(BType bType) {
bType = JvmCodeGenUtil.getImpliedType(bType);
if (bType == null) {
return RETURN_JOBJECT;
}
bType = JvmCodeGenUtil.UNIFIER.build(bType);
if (bType == null || bType.tag == TypeTags.NIL || bType.tag == TypeTags.NEVER) {
return RETURN_JOBJECT;
} else if (TypeTags.isIntegerTypeTag(bType.tag)) {
return ")J";
} else if (TypeTags.isStringTypeTag(bType.tag)) {
return RETURN_B_STRING_VALUE;
} else if (TypeTags.isXMLTypeTag(bType.tag)) {
return RETURN_XML_VALUE;
}
switch (bType.tag) {
case TypeTags.BYTE:
return ")I";
case TypeTags.FLOAT:
return ")D";
case TypeTags.DECIMAL:
return RETURN_DECIMAL_VALUE;
case TypeTags.BOOLEAN:
return ")Z";
case TypeTags.ARRAY:
case TypeTags.TUPLE:
return RETURN_ARRAY_VALUE;
case TypeTags.MAP:
case TypeTags.RECORD:
return RETURN_MAP_VALUE;
case TypeTags.ERROR:
return RETURN_ERROR_VALUE;
case TypeTags.STREAM:
return RETURN_STREAM_VALUE;
case TypeTags.TABLE:
return RETURN_TABLE_VALUE;
case TypeTags.FUTURE:
return RETURN_FUTURE_VALUE;
case TypeTags.TYPEDESC:
return RETURN_TYPEDESC_VALUE;
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.UNION:
case TypeTags.INTERSECTION:
case TypeTags.JSON:
case TypeTags.FINITE:
case TypeTags.READONLY:
return RETURN_JOBJECT;
case TypeTags.OBJECT:
return RETURN_B_OBJECT;
case TypeTags.INVOKABLE:
return RETURN_FUNCTION_POINTER;
case TypeTags.HANDLE:
return RETURN_HANDLE_VALUE;
case TypeTags.REGEXP:
return RETURN_REGEX_VALUE;
default:
throw new BLangCompilerException(JvmConstants.TYPE_NOT_SUPPORTED_MESSAGE + bType);
}
}
public static void loadChannelDetails(MethodVisitor mv, List<BIRNode.ChannelDetails> channels,
int invocationVarIndex) {
mv.visitIntInsn(BIPUSH, channels.size());
mv.visitTypeInsn(ANEWARRAY, CHANNEL_DETAILS);
int index = 0;
for (BIRNode.ChannelDetails ch : channels) {
mv.visitInsn(DUP);
mv.visitIntInsn(BIPUSH, index);
index += 1;
mv.visitTypeInsn(NEW, CHANNEL_DETAILS);
mv.visitInsn(DUP);
mv.visitVarInsn(ILOAD, invocationVarIndex);
mv.visitInvokeDynamicInsn(MAKE_CONCAT_WITH_CONSTANTS, INT_TO_STRING,
new Handle(H_INVOKESTATIC, STRING_CONCAT_FACTORY, MAKE_CONCAT_WITH_CONSTANTS,
HANDLE_DESCRIPTOR_FOR_STRING_CONCAT, false),
ch.name + START_OF_HEADING_WITH_SEMICOLON);
if (ch.channelInSameStrand) {
mv.visitInsn(ICONST_1);
} else {
mv.visitInsn(ICONST_0);
}
if (ch.send) {
mv.visitInsn(ICONST_1);
} else {
mv.visitInsn(ICONST_0);
}
mv.visitMethodInsn(INVOKESPECIAL, CHANNEL_DETAILS, JVM_INIT_METHOD,
INIT_CHANNEL_DETAILS, false);
mv.visitInsn(AASTORE);
}
}
public static String toNameString(BType t) {
BTypeSymbol typeSymbol = t.tsymbol;
if ((typeSymbol.kind == SymbolKind.RECORD || typeSymbol.kind == SymbolKind.OBJECT) &&
((BStructureTypeSymbol) typeSymbol).typeDefinitionSymbol != null) {
return Utils.encodeNonFunctionIdentifier(((BStructureTypeSymbol) typeSymbol)
.typeDefinitionSymbol.name.value);
}
return Utils.encodeNonFunctionIdentifier(typeSymbol.name.value);
}
public static boolean isBallerinaBuiltinModule(String orgName, String moduleName) {
return orgName.equals("ballerina") && moduleName.equals("builtin");
}
public static BirScope getLastScopeFromBBInsGen(MethodVisitor mv, LabelGenerator labelGen,
JvmInstructionGen instGen, int localVarOffset,
String funcName, BIRNode.BIRBasicBlock bb,
Set<BirScope> visitedScopesSet, BirScope lastScope) {
int insCount = bb.instructions.size();
for (int i = 0; i < insCount; i++) {
Label insLabel = labelGen.getLabel(funcName + bb.id.value + "ins" + i);
mv.visitLabel(insLabel);
BIRAbstractInstruction inst = bb.instructions.get(i);
if (inst != null) {
generateDiagnosticPos(inst.pos, mv);
instGen.generateInstructions(localVarOffset, inst);
lastScope = getLastScope(inst, funcName, labelGen, visitedScopesSet, lastScope, mv);
}
}
return lastScope;
}
public static void generateDiagnosticPos(Location pos, MethodVisitor mv) {
Label label = new Label();
if (pos != null && pos.lineRange().startLine().line() != OVERFLOW_LINE_NUMBER) {
mv.visitLabel(label);
mv.visitLineNumber(pos.lineRange().startLine().line() + 1, label);
}
}
private static BirScope getLastScope(BIRAbstractInstruction instruction, String funcName, LabelGenerator labelGen,
Set<BirScope> visitedScopesSet, BirScope lastScope, MethodVisitor mv) {
BirScope scope = instruction.scope;
if (scope != null && scope != lastScope) {
lastScope = scope;
Label scopeLabel = labelGen.getLabel(funcName + SCOPE_PREFIX + scope.id);
mv.visitLabel(scopeLabel);
storeLabelForParentScopes(scope, scopeLabel, labelGen, funcName, visitedScopesSet);
visitedScopesSet.add(scope);
}
return lastScope;
}
private static void storeLabelForParentScopes(BirScope scope, Label scopeLabel, LabelGenerator labelGen,
String funcName, Set<BirScope> visitedScopesSet) {
BirScope parent = scope.parent;
if (parent != null && !visitedScopesSet.contains(parent)) {
String labelName = funcName + SCOPE_PREFIX + parent.id;
labelGen.putLabel(labelName, scopeLabel);
visitedScopesSet.add(parent);
storeLabelForParentScopes(parent, scopeLabel, labelGen, funcName, visitedScopesSet);
}
}
public static BirScope getLastScopeFromTerminator(MethodVisitor mv, BIRNode.BIRBasicBlock bb, String funcName,
LabelGenerator labelGen, BirScope lastScope,
Set<BirScope> visitedScopesSet) {
BirScope scope = bb.terminator.scope;
if (scope != null && scope != lastScope) {
lastScope = scope;
Label scopeLabel = labelGen.getLabel(funcName + SCOPE_PREFIX + scope.id);
mv.visitLabel(scopeLabel);
visitedScopesSet.add(scope);
}
return lastScope;
}
public static void genYieldCheck(MethodVisitor mv, LabelGenerator labelGen, BIRNode.BIRBasicBlock thenBB,
String funcName, int localVarOffset, int yieldLocationVarIndex,
Location terminatorPos, String fullyQualifiedFuncName, String yieldStatus,
int yieldStatusVarIndex) {
mv.visitVarInsn(ALOAD, localVarOffset);
mv.visitMethodInsn(INVOKEVIRTUAL, STRAND_CLASS, "isYielded", "()Z", false);
generateSetYieldedStatus(mv, labelGen, funcName, yieldLocationVarIndex, terminatorPos,
fullyQualifiedFuncName, yieldStatus, yieldStatusVarIndex);
Label gotoLabel = labelGen.getLabel(funcName + thenBB.id.value);
mv.visitJumpInsn(GOTO, gotoLabel);
}
protected static void generateSetYieldedStatus(MethodVisitor mv, LabelGenerator labelGen, String funcName,
int yieldLocationVarIndex, Location terminatorPos,
String fullyQualifiedFuncName, String yieldStatus,
int yieldStatusVarIndex) {
Label yieldLocationLabel = new Label();
mv.visitJumpInsn(IFEQ, yieldLocationLabel);
StringBuilder yieldLocationData = new StringBuilder(fullyQualifiedFuncName);
if (terminatorPos != null) {
yieldLocationData.append("(").append(terminatorPos.lineRange().fileName()).append(":")
.append(terminatorPos.lineRange().startLine().line() + 1).append(")");
}
mv.visitLdcInsn(yieldLocationData.toString());
mv.visitVarInsn(ASTORE, yieldLocationVarIndex);
mv.visitLdcInsn(yieldStatus);
mv.visitVarInsn(ASTORE, yieldStatusVarIndex);
Label yieldLabel = labelGen.getLabel(funcName + "yield");
mv.visitJumpInsn(GOTO, yieldLabel);
mv.visitLabel(yieldLocationLabel);
}
public static PackageID cleanupPackageID(PackageID pkgID) {
Name org = new Name(Utils.encodeNonFunctionIdentifier(pkgID.orgName.value));
Name module = new Name(Utils.encodeNonFunctionIdentifier(pkgID.name.value));
return new PackageID(org, module, pkgID.version);
}
public static boolean isBuiltInPackage(PackageID packageID) {
packageID = cleanupPackageID(packageID);
return BALLERINA.equals(packageID.orgName.value) && BUILT_IN_PACKAGE_NAME.equals(packageID.name.value);
}
public static boolean isSameModule(PackageID moduleId, PackageID importModule) {
PackageID cleanedPkg = cleanupPackageID(importModule);
if (!moduleId.orgName.value.equals(cleanedPkg.orgName.value)) {
return false;
} else if (!moduleId.name.value.equals(cleanedPkg.name.value)) {
return false;
} else {
return getMajorVersion(moduleId.version.value).equals(getMajorVersion(cleanedPkg.version.value));
}
}
public static String cleanupFunctionName(String functionName) {
return StringUtils.containsAny(functionName, "\\.:/<>") ?
"$" + JVM_RESERVED_CHAR_SET.matcher(functionName).replaceAll("_") : functionName;
}
public static boolean isSimpleBasicType(BType bType) {
bType = JvmCodeGenUtil.getImpliedType(bType);
switch (bType.tag) {
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.BOOLEAN:
case TypeTags.DECIMAL:
case TypeTags.NIL:
case TypeTags.NEVER:
return true;
default:
return (TypeTags.isIntegerTypeTag(bType.tag)) || (TypeTags.isStringTypeTag(bType.tag));
}
}
public static boolean needNoTypeGeneration(int bTypeTag) {
switch (bTypeTag) {
case TypeTags.RECORD:
case TypeTags.ERROR:
case TypeTags.OBJECT:
case TypeTags.UNION:
case TypeTags.TUPLE:
return false;
default:
return true;
}
}
/**
* Retrieve the referred type if a given type is a type reference type or
* retrieve the effective type if the given type is an intersection type.
*
* @param type type to retrieve the implied type
* @return the implied type if provided with a type reference type or an intersection type,
* else returns the original type
*/
public static BType getImpliedType(BType type) {
BType constraint = type;
if (type == null) {
return null;
}
if (type.tag == TypeTags.TYPEREFDESC) {
return getImpliedType(((BTypeReferenceType) type).referredType);
}
if (type.tag == TypeTags.INTERSECTION) {
return getImpliedType(((BIntersectionType) type).effectiveType);
}
return constraint;
}
public static void loadConstantValue(BType bType, Object constVal, MethodVisitor mv,
JvmConstantsGen jvmConstantsGen) {
int typeTag = getImpliedType(bType).tag;
if (TypeTags.isIntegerTypeTag(typeTag)) {
long intValue = constVal instanceof Long ? (long) constVal : Long.parseLong(String.valueOf(constVal));
mv.visitLdcInsn(intValue);
return;
} else if (TypeTags.isStringTypeTag(typeTag)) {
String val = String.valueOf(constVal);
int index = jvmConstantsGen.getBStringConstantVarIndex(val);
String varName = B_STRING_VAR_PREFIX + index;
String stringConstantsClass = getStringConstantsClass(index, jvmConstantsGen);
mv.visitFieldInsn(GETSTATIC, stringConstantsClass, varName, GET_BSTRING);
return;
}
switch (typeTag) {
case TypeTags.BYTE:
int byteValue = ((Number) constVal).intValue();
mv.visitLdcInsn(byteValue);
break;
case TypeTags.FLOAT:
double doubleValue = constVal instanceof Double ? (double) constVal :
Double.parseDouble(String.valueOf(constVal));
mv.visitLdcInsn(doubleValue);
break;
case TypeTags.BOOLEAN:
boolean booleanVal = constVal instanceof Boolean ? (boolean) constVal :
Boolean.parseBoolean(String.valueOf(constVal));
mv.visitLdcInsn(booleanVal);
break;
case TypeTags.DECIMAL:
mv.visitTypeInsn(NEW, DECIMAL_VALUE);
mv.visitInsn(DUP);
mv.visitLdcInsn(removeDecimalDiscriminator(String.valueOf(constVal)));
mv.visitMethodInsn(INVOKESPECIAL, DECIMAL_VALUE, JVM_INIT_METHOD, INIT_WITH_STRING, false);
break;
case TypeTags.NIL:
case TypeTags.NEVER:
mv.visitInsn(ACONST_NULL);
break;
default:
throw new BLangCompilerException("JVM generation is not supported for type : " + bType);
}
}
private static String getStringConstantsClass(int varIndex, JvmConstantsGen jvmConstantsGen) {
int classIndex = varIndex / MAX_STRINGS_PER_METHOD;
return jvmConstantsGen.getStringConstantsClass() + UNDERSCORE + classIndex;
}
private static String removeDecimalDiscriminator(String value) {
int length = value.length();
if (length < 2) {
return value;
}
char lastChar = value.charAt(length - 1);
if (lastChar == 'd' || lastChar == 'D') {
return value.substring(0, length - 1);
}
return value;
}
public static void createDefaultCase(MethodVisitor mv, Label defaultCaseLabel, int nameRegIndex,
String errorMessage) {
mv.visitLabel(defaultCaseLabel);
mv.visitTypeInsn(NEW, ERROR_VALUE);
mv.visitInsn(DUP);
mv.visitTypeInsn(NEW, STRING_BUILDER);
mv.visitInsn(DUP);
mv.visitLdcInsn(errorMessage);
mv.visitMethodInsn(INVOKESPECIAL, STRING_BUILDER, JVM_INIT_METHOD, INIT_WITH_STRING,
false);
mv.visitVarInsn(ALOAD, nameRegIndex);
mv.visitMethodInsn(INVOKEVIRTUAL, STRING_BUILDER, "append",
STRING_BUILDER_APPEND, false);
mv.visitMethodInsn(INVOKEVIRTUAL, STRING_BUILDER, JVM_TO_STRING_METHOD, GET_JSTRING
, false);
mv.visitMethodInsn(INVOKESTATIC, STRING_UTILS, "fromString", FROM_STRING, false);
mv.visitMethodInsn(INVOKESPECIAL, ERROR_VALUE, JVM_INIT_METHOD, INIT_ERROR,
false);
mv.visitInsn(ATHROW);
}
public static void castToJavaString(MethodVisitor mv, int fieldNameRegIndex, int strKeyVarIndex) {
mv.visitVarInsn(ALOAD, fieldNameRegIndex);
mv.visitTypeInsn(CHECKCAST, B_STRING_VALUE);
mv.visitMethodInsn(INVOKEINTERFACE, B_STRING_VALUE, GET_VALUE_METHOD,
GET_JSTRING, true);
mv.visitVarInsn(ASTORE, strKeyVarIndex);
}
private JvmCodeGenUtil() {
}
public static String getRefTypeConstantName(BTypeReferenceType type) {
return JvmConstants.TYPEREF_TYPE_VAR_PREFIX + Utils.encodeNonFunctionIdentifier(type.tsymbol.name.value);
}
public static void visitMaxStackForMethod(MethodVisitor mv, String funcName, String className) {
try {
mv.visitMaxs(0, 0);
} catch (Throwable e) {
throw new BLangCompilerException(
"error while generating method '" + Utils.decodeIdentifier(funcName) + "' in class '" +
Utils.decodeIdentifier(className) + "'", e);
}
}
}
|
|
what is it for ?
|
public void replace(Map<Slot, Slot> replaceMap) {
slots = slots.stream()
.map(s -> replaceMap.getOrDefault(s, s))
.collect(Collectors.toSet());
slotSets = slotSets.stream()
.map(set -> set.stream().map(s -> replaceMap.getOrDefault(s, s))
.collect(ImmutableSet.toImmutableSet()))
.collect(Collectors.toSet());
}
|
.map(set -> set.stream().map(s -> replaceMap.getOrDefault(s, s))
|
public void replace(Map<Slot, Slot> replaceMap) {
uniformSet.replace(replaceMap);
uniqueSet.replace(replaceMap);
}
|
class Builder {
private final NestedSet uniqueSet;
private final NestedSet uniformSet;
public Builder() {
uniqueSet = new NestedSet();
uniformSet = new NestedSet();
}
public Builder(FunctionalDependencies other) {
this.uniformSet = new NestedSet(other.uniformSet);
this.uniqueSet = new NestedSet(other.uniqueSet);
}
public void addUniformSlot(Slot slot) {
uniformSet.add(slot);
}
public void addUniformSlot(FunctionalDependencies functionalDependencies) {
uniformSet.add(functionalDependencies.uniformSet);
}
public void addUniformSlot(ImmutableSet<Slot> slotSet) {
uniformSet.add(slotSet);
}
public void addUniqueSlot(Slot slot) {
uniqueSet.add(slot);
}
public void addUniqueSlot(ImmutableSet<Slot> slotSet) {
uniqueSet.add(slotSet);
}
public void addFunctionalDependencies(FunctionalDependencies fd) {
uniformSet.add(fd.uniformSet);
uniqueSet.add(fd.uniqueSet);
}
public FunctionalDependencies build() {
return new FunctionalDependencies(uniqueSet.toImmutable(), uniformSet.toImmutable());
}
public void pruneSlots(Set<Slot> outputSlots) {
uniformSet.removeNotContain(outputSlots);
uniqueSet.removeNotContain(outputSlots);
}
}
|
class Builder {
private final NestedSet uniqueSet;
private final NestedSet uniformSet;
public Builder() {
uniqueSet = new NestedSet();
uniformSet = new NestedSet();
}
public Builder(FunctionalDependencies other) {
this.uniformSet = new NestedSet(other.uniformSet);
this.uniqueSet = new NestedSet(other.uniqueSet);
}
public void addUniformSlot(Slot slot) {
uniformSet.add(slot);
}
public void addUniformSlot(FunctionalDependencies functionalDependencies) {
uniformSet.add(functionalDependencies.uniformSet);
}
public void addUniformSlot(ImmutableSet<Slot> slotSet) {
uniformSet.add(slotSet);
}
public void addUniqueSlot(Slot slot) {
uniqueSet.add(slot);
}
public void addUniqueSlot(ImmutableSet<Slot> slotSet) {
uniqueSet.add(slotSet);
}
public void addFunctionalDependencies(FunctionalDependencies fd) {
uniformSet.add(fd.uniformSet);
uniqueSet.add(fd.uniqueSet);
}
public FunctionalDependencies build() {
return new FunctionalDependencies(uniqueSet.toImmutable(), uniformSet.toImmutable());
}
public void pruneSlots(Set<Slot> outputSlots) {
uniformSet.removeNotContain(outputSlots);
uniqueSet.removeNotContain(outputSlots);
}
}
|
``` List<Library> deps = platform.libraries.stream().filter(library -> library.getModules() == null || Arrays.asList(library.getModules()).contains(moduleName)).toList(); ```
|
public String getTargetPlatform(String moduleName) {
if (isTemplateModule(moduleName)) {
return ProgramFileConstants.ANY_PLATFORM;
}
if (null != platform.libraries) {
if (null == platform.target) {
throw new BLangCompilerException("Platform target is not specified in the Ballerina.toml");
}
if (!(Arrays.stream(ProgramFileConstants.SUPPORTED_PLATFORMS).anyMatch(platform.getTarget()::equals))) {
throw new BLangCompilerException("Platform target is not " +
"supported by installed Ballerina distribution." +
"\nSupported platforms : " + supportedPlatforms());
}
List<Library> deps = platform.libraries.stream().filter(library ->
library.getModules() == null ||
Arrays.stream(library.getModules()).anyMatch(moduleName::equals)
).collect(Collectors.toList());
if (!deps.isEmpty()) {
return platform.target;
}
}
return ProgramFileConstants.ANY_PLATFORM;
}
|
).collect(Collectors.toList());
|
public String getTargetPlatform(String moduleName) {
if (isTemplateModule(moduleName)) {
return ProgramFileConstants.ANY_PLATFORM;
}
if (null != platform.libraries) {
if (null == platform.target) {
throw new BLangCompilerException("Platform target is not specified in the Ballerina.toml");
}
if (!(Arrays.stream(ProgramFileConstants.SUPPORTED_PLATFORMS).anyMatch(platform.getTarget()::equals))) {
throw new BLangCompilerException("Platform target is not " +
"supported by installed Ballerina distribution." +
"\nSupported platforms : " + supportedPlatforms());
}
List<Library> deps = platform.libraries.stream().filter(library -> library.getModules() == null ||
Arrays.stream(library.getModules()).anyMatch(moduleName::equals)).toList();
if (!deps.isEmpty()) {
return platform.target;
}
}
return ProgramFileConstants.ANY_PLATFORM;
}
|
class Manifest {
private Project project = new Project();
private Map<String, Object> dependencies = new LinkedHashMap<>();
public Platform platform = new Platform();
private BuildOptions buildOptions;
public Project getProject() {
return project;
}
public void setProject(Project project) {
this.project = project;
}
public Map<String, Object> getDependenciesAsObjectMap() {
return this.dependencies.entrySet().stream()
.collect(Collectors.toMap(d -> d.getKey().replaceAll("^\"|\"$", ""), Map.Entry::getValue));
}
public List<Dependency> getDependencies() {
return this.dependencies.entrySet().stream()
.map(entry -> {
Dependency dependency = new Dependency();
dependency.setModuleID(entry.getKey());
dependency.setMetadata(convertObjectToDependencyMetadata(entry.getValue()));
return dependency;
})
.collect(Collectors.toList());
}
private DependencyMetadata convertObjectToDependencyMetadata(Object obj) {
DependencyMetadata metadata = new DependencyMetadata();
if (obj instanceof String) {
metadata.setVersion((String) obj);
} else if (obj instanceof Map) {
Map metadataMap = (Map) obj;
if (metadataMap.keySet().contains("version") && metadataMap.get("version") instanceof String) {
metadata.setVersion((String) metadataMap.get("version"));
}
if (metadataMap.keySet().contains("path") && metadataMap.get("path") instanceof String) {
metadata.setPath((String) metadataMap.get("path"));
}
}
return metadata;
}
public Platform getPlatform() {
return platform;
}
public void setPlatform(Platform platform) {
this.platform = platform;
}
private String supportedPlatforms() {
String platforms = String.join(",", ProgramFileConstants.SUPPORTED_PLATFORMS);
return platforms;
}
public boolean isTemplateModule(String moduleName) {
return this.getProject().getTemplates().contains(moduleName);
}
public void setBuildOptions(BuildOptions buildOptions) {
this.buildOptions = buildOptions;
}
public BuildOptions getBuildOptions() {
return buildOptions;
}
}
|
class Manifest {
private Project project = new Project();
private Map<String, Object> dependencies = new LinkedHashMap<>();
public Platform platform = new Platform();
private BuildOptions buildOptions;
public Project getProject() {
return project;
}
public void setProject(Project project) {
this.project = project;
}
public Map<String, Object> getDependenciesAsObjectMap() {
return this.dependencies.entrySet().stream()
.collect(Collectors.toMap(d -> d.getKey().replaceAll("^\"|\"$", ""), Map.Entry::getValue));
}
public List<Dependency> getDependencies() {
return this.dependencies.entrySet().stream()
.map(entry -> {
Dependency dependency = new Dependency();
dependency.setModuleID(entry.getKey());
dependency.setMetadata(convertObjectToDependencyMetadata(entry.getValue()));
return dependency;
})
.collect(Collectors.toList());
}
private DependencyMetadata convertObjectToDependencyMetadata(Object obj) {
DependencyMetadata metadata = new DependencyMetadata();
if (obj instanceof String) {
metadata.setVersion((String) obj);
} else if (obj instanceof Map) {
Map metadataMap = (Map) obj;
if (metadataMap.keySet().contains("version") && metadataMap.get("version") instanceof String) {
metadata.setVersion((String) metadataMap.get("version"));
}
if (metadataMap.keySet().contains("path") && metadataMap.get("path") instanceof String) {
metadata.setPath((String) metadataMap.get("path"));
}
}
return metadata;
}
public Platform getPlatform() {
return platform;
}
public void setPlatform(Platform platform) {
this.platform = platform;
}
private String supportedPlatforms() {
String platforms = String.join(",", ProgramFileConstants.SUPPORTED_PLATFORMS);
return platforms;
}
public boolean isTemplateModule(String moduleName) {
return this.getProject().getTemplates().contains(moduleName);
}
public void setBuildOptions(BuildOptions buildOptions) {
this.buildOptions = buildOptions;
}
public BuildOptions getBuildOptions() {
return buildOptions;
}
}
|
@cescoffier maybe we could merge and adjust later if needed? I'm a bit worried that we could have annoying conflicts, considering it touches a lot of places.
|
public void testHealth() {
try {
RestAssured.defaultParser = Parser.JSON;
RestAssured.when().get("/health").then()
.body("outcome", is("UP"),
"checks.state", contains("UP"),
"checks.name", contains("basic"));
} finally {
RestAssured.reset();
}
}
|
RestAssured.defaultParser = Parser.JSON;
|
public void testHealth() {
try {
RestAssured.defaultParser = Parser.JSON;
RestAssured.when().get("/health").then()
.body("outcome", is("UP"),
"checks.state", contains("UP"),
"checks.name", contains("basic"));
} finally {
RestAssured.reset();
}
}
|
class HealthUnitTest {
@Deployment
public static JavaArchive deploy() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(BasicHealthCheck.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml");
}
@Test
}
|
class HealthUnitTest {
@Deployment
public static JavaArchive deploy() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(BasicHealthCheck.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml");
}
@Test
}
|
Does this need to include the try count as that is inferred by attempting more than the maximum limit
|
private boolean isValidRedirectCount(int tryCount) {
if (tryCount >= getMaxAttempts()) {
LOGGER.atError()
.addKeyValue(LoggingKeys.TRY_COUNT_KEY, tryCount)
.addKeyValue("maxAttempts", getMaxAttempts())
.log("Redirect attempts have been exhausted.");
return false;
}
return true;
}
|
.addKeyValue(LoggingKeys.TRY_COUNT_KEY, tryCount)
|
private boolean isValidRedirectCount(int tryCount) {
if (tryCount >= getMaxAttempts()) {
LOGGER.atError()
.addKeyValue("maxAttempts", getMaxAttempts())
.log("Redirect attempts have been exhausted.");
return false;
}
return true;
}
|
class DefaultRedirectStrategy implements RedirectStrategy {
private static final ClientLogger LOGGER = new ClientLogger(DefaultRedirectStrategy.class);
private static final int DEFAULT_MAX_REDIRECT_ATTEMPTS = 3;
private static final String DEFAULT_REDIRECT_LOCATION_HEADER_NAME = "Location";
private static final int PERMANENT_REDIRECT_STATUS_CODE = 308;
private static final int TEMPORARY_REDIRECT_STATUS_CODE = 307;
private static final Set<HttpMethod> DEFAULT_REDIRECT_ALLOWED_METHODS =
new HashSet<>(Arrays.asList(HttpMethod.GET, HttpMethod.HEAD));
private final int maxAttempts;
private final String locationHeader;
private final Set<HttpMethod> allowedRedirectHttpMethods;
/**
* Creates an instance of {@link DefaultRedirectStrategy} with a maximum number of redirect attempts 3,
* header name "Location" to locate the redirect url in the response headers and {@link HttpMethod
* and {@link HttpMethod
*/
public DefaultRedirectStrategy() {
this(DEFAULT_MAX_REDIRECT_ATTEMPTS, DEFAULT_REDIRECT_LOCATION_HEADER_NAME, DEFAULT_REDIRECT_ALLOWED_METHODS);
}
/**
* Creates an instance of {@link DefaultRedirectStrategy} with the provided number of redirect attempts and
* default header name "Location" to locate the redirect url in the response headers and {@link HttpMethod
* and {@link HttpMethod
*
* @param maxAttempts The max number of redirect attempts that can be made.
* @throws IllegalArgumentException if {@code maxAttempts} is less than 0.
*/
public DefaultRedirectStrategy(int maxAttempts) {
this(maxAttempts, DEFAULT_REDIRECT_LOCATION_HEADER_NAME, DEFAULT_REDIRECT_ALLOWED_METHODS);
}
/**
* Creates an instance of {@link DefaultRedirectStrategy}.
*
* @param maxAttempts The max number of redirect attempts that can be made.
* @param locationHeader The header name containing the redirect URL.
* @param allowedMethods The set of {@link HttpMethod} that are allowed to be redirected.
* @throws IllegalArgumentException if {@code maxAttempts} is less than 0.
*/
public DefaultRedirectStrategy(int maxAttempts, String locationHeader, Set<HttpMethod> allowedMethods) {
if (maxAttempts < 0) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("Max attempts cannot be less than 0."));
}
this.maxAttempts = maxAttempts;
if (CoreUtils.isNullOrEmpty(locationHeader)) {
LOGGER.error("'locationHeader' provided as null will be defaulted to {}",
DEFAULT_REDIRECT_LOCATION_HEADER_NAME);
this.locationHeader = DEFAULT_REDIRECT_LOCATION_HEADER_NAME;
} else {
this.locationHeader = locationHeader;
}
if (CoreUtils.isNullOrEmpty(allowedMethods)) {
LOGGER.error("'allowedMethods' provided as null will be defaulted to {}", DEFAULT_REDIRECT_ALLOWED_METHODS);
this.allowedRedirectHttpMethods = DEFAULT_REDIRECT_ALLOWED_METHODS;
} else {
this.allowedRedirectHttpMethods = allowedMethods;
}
}
@Override
public boolean shouldAttemptRedirect(HttpPipelineCallContext context,
HttpResponse httpResponse, int tryCount,
Set<String> attemptedRedirectUrls) {
if (isValidRedirectStatusCode(httpResponse.getStatusCode())
&& isValidRedirectCount(tryCount)
&& isAllowedRedirectMethod(httpResponse.getRequest().getHttpMethod())) {
String redirectUrl = tryGetRedirectHeader(httpResponse.getHeaders(), getLocationHeader());
if (redirectUrl != null && !alreadyAttemptedRedirectUrl(redirectUrl, attemptedRedirectUrls)) {
LOGGER.atVerbose()
.addKeyValue(LoggingKeys.TRY_COUNT_KEY, tryCount)
.addKeyValue(LoggingKeys.REDIRECT_URL_KEY, redirectUrl)
.log("Redirecting.");
attemptedRedirectUrls.add(redirectUrl);
return true;
} else {
return false;
}
} else {
return false;
}
}
@Override
public HttpRequest createRedirectRequest(HttpResponse httpResponse) {
String responseLocation = tryGetRedirectHeader(httpResponse.getHeaders(), getLocationHeader());
return httpResponse.getRequest().setUrl(responseLocation);
}
@Override
public int getMaxAttempts() {
return maxAttempts;
}
/*
* The header name to look up the value for the redirect url in response headers.
*
* @return the value of the header, or null if the header doesn't exist in the response.
*/
String getLocationHeader() {
return locationHeader;
}
/*
* The {@link HttpMethod http methods} that are allowed to be redirected.
*
* @return the set of allowed redirect http methods.
*/
Set<HttpMethod> getAllowedRedirectHttpMethods() {
return allowedRedirectHttpMethods;
}
/**
* Check if the redirect url provided in the response headers is already attempted.
*
* @param redirectUrl the redirect url provided in the response header.
* @param attemptedRedirectUrls the set containing a list of attempted redirect locations.
* @return {@code true} if the redirectUrl provided in the response header is already being attempted for redirect
* , {@code false} otherwise.
*/
private boolean alreadyAttemptedRedirectUrl(String redirectUrl,
Set<String> attemptedRedirectUrls) {
if (attemptedRedirectUrls.contains(redirectUrl)) {
LOGGER.atError()
.addKeyValue(LoggingKeys.REDIRECT_URL_KEY, redirectUrl)
.log("Request was redirected more than once to the same URL.");
return true;
}
return false;
}
/**
* Check if the attempt count of the redirect is less than the {@code maxAttempts}
*
* @param tryCount the try count for the HTTP request associated to the HTTP response.
* @return {@code true} if the {@code tryCount} is greater than the {@code maxAttempts}, {@code false} otherwise.
*/
/**
* Check if the request http method is a valid redirect method.
*
* @param httpMethod the http method of the request.
* @return {@code true} if the request {@code httpMethod} is a valid http redirect method, {@code false} otherwise.
*/
private boolean isAllowedRedirectMethod(HttpMethod httpMethod) {
if (getAllowedRedirectHttpMethods().contains(httpMethod)) {
return true;
} else {
LOGGER.atError()
.addKeyValue(LoggingKeys.HTTP_METHOD_KEY, httpMethod)
.log("Request was redirected from an invalid redirect allowed method.");
return false;
}
}
/**
* Checks if the incoming request status code is a valid redirect status code.
*
* @param statusCode the status code of the incoming request.
* @return {@code true} if the request {@code statusCode} is a valid http redirect method, {@code false} otherwise.
*/
private boolean isValidRedirectStatusCode(int statusCode) {
return statusCode == HttpURLConnection.HTTP_MOVED_TEMP
|| statusCode == HttpURLConnection.HTTP_MOVED_PERM
|| statusCode == PERMANENT_REDIRECT_STATUS_CODE
|| statusCode == TEMPORARY_REDIRECT_STATUS_CODE;
}
/**
* Gets the redirect url from the response headers.
*
* @param headers the http response headers.
* @param headerName the header name to look up value for.
* @return the header value for the provided header name, {@code null} otherwise.
*/
String tryGetRedirectHeader(HttpHeaders headers, String headerName) {
String headerValue = headers.getValue(headerName);
if (CoreUtils.isNullOrEmpty(headerValue)) {
LOGGER.atError()
.addKeyValue("headerName", headerName)
.log("Redirect url was null, request redirect was terminated.");
return null;
} else {
return headerValue;
}
}
}
|
class DefaultRedirectStrategy implements RedirectStrategy {
private static final ClientLogger LOGGER = new ClientLogger(DefaultRedirectStrategy.class);
private static final int DEFAULT_MAX_REDIRECT_ATTEMPTS = 3;
private static final String DEFAULT_REDIRECT_LOCATION_HEADER_NAME = "Location";
private static final int PERMANENT_REDIRECT_STATUS_CODE = 308;
private static final int TEMPORARY_REDIRECT_STATUS_CODE = 307;
private static final Set<HttpMethod> DEFAULT_REDIRECT_ALLOWED_METHODS =
new HashSet<>(Arrays.asList(HttpMethod.GET, HttpMethod.HEAD));
private static final String REDIRECT_URLS_KEY = "redirectUrls";
private final int maxAttempts;
private final String locationHeader;
private final Set<HttpMethod> allowedRedirectHttpMethods;
/**
* Creates an instance of {@link DefaultRedirectStrategy} with a maximum number of redirect attempts 3,
* header name "Location" to locate the redirect url in the response headers and {@link HttpMethod
* and {@link HttpMethod
*/
public DefaultRedirectStrategy() {
this(DEFAULT_MAX_REDIRECT_ATTEMPTS, DEFAULT_REDIRECT_LOCATION_HEADER_NAME, DEFAULT_REDIRECT_ALLOWED_METHODS);
}
/**
* Creates an instance of {@link DefaultRedirectStrategy} with the provided number of redirect attempts and
* default header name "Location" to locate the redirect url in the response headers and {@link HttpMethod
* and {@link HttpMethod
*
* @param maxAttempts The max number of redirect attempts that can be made.
* @throws IllegalArgumentException if {@code maxAttempts} is less than 0.
*/
public DefaultRedirectStrategy(int maxAttempts) {
this(maxAttempts, DEFAULT_REDIRECT_LOCATION_HEADER_NAME, DEFAULT_REDIRECT_ALLOWED_METHODS);
}
/**
* Creates an instance of {@link DefaultRedirectStrategy}.
*
* @param maxAttempts The max number of redirect attempts that can be made.
* @param locationHeader The header name containing the redirect URL.
* @param allowedMethods The set of {@link HttpMethod} that are allowed to be redirected.
* @throws IllegalArgumentException if {@code maxAttempts} is less than 0.
*/
public DefaultRedirectStrategy(int maxAttempts, String locationHeader, Set<HttpMethod> allowedMethods) {
if (maxAttempts < 0) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("Max attempts cannot be less than 0."));
}
this.maxAttempts = maxAttempts;
if (CoreUtils.isNullOrEmpty(locationHeader)) {
LOGGER.error("'locationHeader' provided as null will be defaulted to {}",
DEFAULT_REDIRECT_LOCATION_HEADER_NAME);
this.locationHeader = DEFAULT_REDIRECT_LOCATION_HEADER_NAME;
} else {
this.locationHeader = locationHeader;
}
if (CoreUtils.isNullOrEmpty(allowedMethods)) {
LOGGER.error("'allowedMethods' provided as null will be defaulted to {}", DEFAULT_REDIRECT_ALLOWED_METHODS);
this.allowedRedirectHttpMethods = DEFAULT_REDIRECT_ALLOWED_METHODS;
} else {
this.allowedRedirectHttpMethods = allowedMethods;
}
}
@Override
public boolean shouldAttemptRedirect(HttpPipelineCallContext context,
HttpResponse httpResponse, int tryCount,
Set<String> attemptedRedirectUrls) {
if (isValidRedirectStatusCode(httpResponse.getStatusCode())
&& isValidRedirectCount(tryCount)
&& isAllowedRedirectMethod(httpResponse.getRequest().getHttpMethod())) {
String redirectUrl = tryGetRedirectHeader(httpResponse.getHeaders(), getLocationHeader());
if (redirectUrl != null && !alreadyAttemptedRedirectUrl(redirectUrl, attemptedRedirectUrls)) {
LOGGER.atVerbose()
.addKeyValue(LoggingKeys.TRY_COUNT_KEY, tryCount)
.addKeyValue(REDIRECT_URLS_KEY, () -> attemptedRedirectUrls.toString())
.log("Redirecting.");
attemptedRedirectUrls.add(redirectUrl);
return true;
} else {
return false;
}
} else {
return false;
}
}
@Override
public HttpRequest createRedirectRequest(HttpResponse httpResponse) {
String responseLocation = tryGetRedirectHeader(httpResponse.getHeaders(), getLocationHeader());
return httpResponse.getRequest().setUrl(responseLocation);
}
@Override
public int getMaxAttempts() {
return maxAttempts;
}
/*
* The header name to look up the value for the redirect url in response headers.
*
* @return the value of the header, or null if the header doesn't exist in the response.
*/
String getLocationHeader() {
return locationHeader;
}
/*
* The {@link HttpMethod http methods} that are allowed to be redirected.
*
* @return the set of allowed redirect http methods.
*/
Set<HttpMethod> getAllowedRedirectHttpMethods() {
return allowedRedirectHttpMethods;
}
/**
* Check if the redirect url provided in the response headers is already attempted.
*
* @param redirectUrl the redirect url provided in the response header.
* @param attemptedRedirectUrls the set containing a list of attempted redirect locations.
* @return {@code true} if the redirectUrl provided in the response header is already being attempted for redirect
* , {@code false} otherwise.
*/
private boolean alreadyAttemptedRedirectUrl(String redirectUrl,
Set<String> attemptedRedirectUrls) {
if (attemptedRedirectUrls.contains(redirectUrl)) {
LOGGER.atError()
.addKeyValue(LoggingKeys.REDIRECT_URL_KEY, redirectUrl)
.log("Request was redirected more than once to the same URL.");
return true;
}
return false;
}
/**
* Check if the attempt count of the redirect is less than the {@code maxAttempts}
*
* @param tryCount the try count for the HTTP request associated to the HTTP response.
* @return {@code true} if the {@code tryCount} is greater than the {@code maxAttempts}, {@code false} otherwise.
*/
/**
* Check if the request http method is a valid redirect method.
*
* @param httpMethod the http method of the request.
* @return {@code true} if the request {@code httpMethod} is a valid http redirect method, {@code false} otherwise.
*/
private boolean isAllowedRedirectMethod(HttpMethod httpMethod) {
if (getAllowedRedirectHttpMethods().contains(httpMethod)) {
return true;
} else {
LOGGER.atError()
.addKeyValue(LoggingKeys.HTTP_METHOD_KEY, httpMethod)
.log("Request was redirected from an invalid redirect allowed method.");
return false;
}
}
/**
* Checks if the incoming request status code is a valid redirect status code.
*
* @param statusCode the status code of the incoming request.
* @return {@code true} if the request {@code statusCode} is a valid http redirect method, {@code false} otherwise.
*/
private boolean isValidRedirectStatusCode(int statusCode) {
return statusCode == HttpURLConnection.HTTP_MOVED_TEMP
|| statusCode == HttpURLConnection.HTTP_MOVED_PERM
|| statusCode == PERMANENT_REDIRECT_STATUS_CODE
|| statusCode == TEMPORARY_REDIRECT_STATUS_CODE;
}
/**
* Gets the redirect url from the response headers.
*
* @param headers the http response headers.
* @param headerName the header name to look up value for.
* @return the header value for the provided header name, {@code null} otherwise.
*/
String tryGetRedirectHeader(HttpHeaders headers, String headerName) {
String headerValue = headers.getValue(headerName);
if (CoreUtils.isNullOrEmpty(headerValue)) {
LOGGER.atError()
.addKeyValue("headerName", headerName)
.log("Redirect url header was null, request redirect was terminated.");
return null;
} else {
return headerValue;
}
}
}
|
I think we need to call `setProperties` in `gsonPostProcess` as well to make sure all properties are set after metadata replay.
|
private void setProperties(Map<String, String> properties) {
try {
nodes = properties.get(PROP_HOSTS).trim().split(",");
if (properties.containsKey(PROP_SSL)) {
enableSsl = EsUtil.getBoolean(properties, PROP_SSL);
}
if (StringUtils.isNotBlank(properties.get(PROP_USERNAME))) {
username = properties.get(PROP_USERNAME).trim();
}
if (StringUtils.isNotBlank(properties.get(PROP_PASSWORD))) {
password = properties.get(PROP_PASSWORD).trim();
}
if (properties.containsKey(PROP_DOC_VALUE_SCAN)) {
enableDocValueScan = EsUtil.getBoolean(properties, PROP_DOC_VALUE_SCAN);
}
if (properties.containsKey(PROP_KEYWORD_SNIFF)) {
enableKeywordSniff = EsUtil.getBoolean(properties, PROP_KEYWORD_SNIFF);
}
if (properties.containsKey(PROP_NODES_DISCOVERY)) {
enableNodesDiscovery = EsUtil.getBoolean(properties, PROP_NODES_DISCOVERY);
}
} catch (DdlException e) {
throw new RuntimeException("should not happen", e);
}
}
|
try {
|
private void setProperties(Map<String, String> properties) {
try {
nodes = properties.get(PROP_HOSTS).trim().split(",");
if (properties.containsKey(PROP_SSL)) {
enableSsl = EsUtil.getBoolean(properties, PROP_SSL);
}
if (StringUtils.isNotBlank(properties.get(PROP_USERNAME))) {
username = properties.get(PROP_USERNAME).trim();
}
if (StringUtils.isNotBlank(properties.get(PROP_PASSWORD))) {
password = properties.get(PROP_PASSWORD).trim();
}
if (properties.containsKey(PROP_DOC_VALUE_SCAN)) {
enableDocValueScan = EsUtil.getBoolean(properties, PROP_DOC_VALUE_SCAN);
}
if (properties.containsKey(PROP_KEYWORD_SNIFF)) {
enableKeywordSniff = EsUtil.getBoolean(properties, PROP_KEYWORD_SNIFF);
}
if (properties.containsKey(PROP_NODES_DISCOVERY)) {
enableNodesDiscovery = EsUtil.getBoolean(properties, PROP_NODES_DISCOVERY);
}
} catch (DdlException e) {
throw new RuntimeException("should not happen", e);
}
}
|
class EsExternalCatalog extends ExternalCatalog {
private static final Logger LOG = LogManager.getLogger(EsExternalCatalog.class);
public static final String DEFAULT_DB = "default_db";
public static final String PROP_HOSTS = "elasticsearch.hosts";
public static final String PROP_SSL = "elasticsearch.ssl";
public static final String PROP_USERNAME = "elasticsearch.username";
public static final String PROP_PASSWORD = "elasticsearch.password";
public static final String PROP_DOC_VALUE_SCAN = "elasticsearch.doc_value_scan";
public static final String PROP_KEYWORD_SNIFF = "elasticsearch.keyword_sniff";
public static final String PROP_NODES_DISCOVERY = "elasticsearch.nodes_discovery";
private EsRestClient esRestClient;
private String[] nodes;
private String username = null;
private String password = null;
private boolean enableDocValueScan = true;
private boolean enableKeywordSniff = true;
private boolean enableSsl = false;
private boolean enableNodesDiscovery = true;
/**
* Default constructor for EsExternalCatalog.
*/
public EsExternalCatalog(long catalogId, String name, Map<String, String> props) {
this.id = catalogId;
this.name = name;
this.type = "es";
setProperties(props);
this.catalogProperty = new CatalogProperty();
this.catalogProperty.setProperties(props);
}
/**
* Datasource can't be init when creating because the external datasource may depend on third system.
* So you have to make sure the client of third system is initialized before any method was called.
*/
@Override
public synchronized void makeSureInitialized() {
if (!objectCreated) {
esRestClient = new EsRestClient(this.nodes, this.username, this.password, this.enableSsl);
objectCreated = true;
}
if (!initialized) {
if (!Env.getCurrentEnv().isMaster()) {
MasterCatalogExecutor remoteExecutor = new MasterCatalogExecutor();
try {
remoteExecutor.forward(id, -1, -1);
} catch (Exception e) {
Util.logAndThrowRuntimeException(LOG,
String.format("failed to forward init catalog %s operation to master.", name), e);
}
return;
}
init();
}
}
private void init() {
InitCatalogLog initCatalogLog = new InitCatalogLog();
this.esRestClient = new EsRestClient(this.nodes, this.username, this.password, this.enableSsl);
initCatalogLog.setCatalogId(id);
initCatalogLog.setType(InitCatalogLog.Type.ES);
if (dbNameToId != null && dbNameToId.containsKey(DEFAULT_DB)) {
idToDb.get(dbNameToId.get(DEFAULT_DB)).setUnInitialized();
initCatalogLog.addRefreshDb(dbNameToId.get(DEFAULT_DB));
} else {
dbNameToId = Maps.newConcurrentMap();
idToDb = Maps.newConcurrentMap();
long defaultDbId = Env.getCurrentEnv().getNextId();
dbNameToId.put(DEFAULT_DB, defaultDbId);
EsExternalDatabase db = new EsExternalDatabase(this, defaultDbId, DEFAULT_DB);
idToDb.put(defaultDbId, db);
initCatalogLog.addCreateDb(defaultDbId, DEFAULT_DB);
}
initialized = true;
Env.getCurrentEnv().getEditLog().logInitCatalog(initCatalogLog);
}
@Override
public List<String> listDatabaseNames(SessionContext ctx) {
makeSureInitialized();
return new ArrayList<>(dbNameToId.keySet());
}
@Override
public List<String> listTableNames(SessionContext ctx, String dbName) {
return esRestClient.listTable();
}
@Nullable
@Override
public ExternalDatabase getDbNullable(String dbName) {
makeSureInitialized();
String realDbName = ClusterNamespace.getNameFromFullName(dbName);
if (!dbNameToId.containsKey(realDbName)) {
return null;
}
return idToDb.get(dbNameToId.get(realDbName));
}
@Nullable
@Override
public ExternalDatabase getDbNullable(long dbId) {
makeSureInitialized();
return idToDb.get(dbId);
}
@Override
public boolean tableExist(SessionContext ctx, String dbName, String tblName) {
return esRestClient.existIndex(this.esRestClient.getClient(), tblName);
}
@Override
public List<Long> getDbIds() {
return Lists.newArrayList(dbNameToId.values());
}
public ExternalDatabase getDbForReplay(long dbId) {
return idToDb.get(dbId);
}
}
|
class EsExternalCatalog extends ExternalCatalog {
private static final Logger LOG = LogManager.getLogger(EsExternalCatalog.class);
public static final String DEFAULT_DB = "default_db";
public static final String PROP_HOSTS = "elasticsearch.hosts";
public static final String PROP_SSL = "elasticsearch.ssl";
public static final String PROP_USERNAME = "elasticsearch.username";
public static final String PROP_PASSWORD = "elasticsearch.password";
public static final String PROP_DOC_VALUE_SCAN = "elasticsearch.doc_value_scan";
public static final String PROP_KEYWORD_SNIFF = "elasticsearch.keyword_sniff";
public static final String PROP_NODES_DISCOVERY = "elasticsearch.nodes_discovery";
private EsRestClient esRestClient;
private String[] nodes;
private String username = null;
private String password = null;
private boolean enableDocValueScan = true;
private boolean enableKeywordSniff = true;
private boolean enableSsl = false;
private boolean enableNodesDiscovery = true;
/**
* Default constructor for EsExternalCatalog.
*/
public EsExternalCatalog(long catalogId, String name, Map<String, String> props) {
this.id = catalogId;
this.name = name;
this.type = "es";
setProperties(props);
this.catalogProperty = new CatalogProperty();
this.catalogProperty.setProperties(props);
}
/**
* Datasource can't be init when creating because the external datasource may depend on third system.
* So you have to make sure the client of third system is initialized before any method was called.
*/
@Override
public synchronized void makeSureInitialized() {
if (!objectCreated) {
esRestClient = new EsRestClient(this.nodes, this.username, this.password, this.enableSsl);
objectCreated = true;
}
if (!initialized) {
if (!Env.getCurrentEnv().isMaster()) {
MasterCatalogExecutor remoteExecutor = new MasterCatalogExecutor();
try {
remoteExecutor.forward(id, -1, -1);
} catch (Exception e) {
Util.logAndThrowRuntimeException(LOG,
String.format("failed to forward init catalog %s operation to master.", name), e);
}
return;
}
init();
}
}
private void init() {
InitCatalogLog initCatalogLog = new InitCatalogLog();
this.esRestClient = new EsRestClient(this.nodes, this.username, this.password, this.enableSsl);
initCatalogLog.setCatalogId(id);
initCatalogLog.setType(InitCatalogLog.Type.ES);
if (dbNameToId != null && dbNameToId.containsKey(DEFAULT_DB)) {
idToDb.get(dbNameToId.get(DEFAULT_DB)).setUnInitialized();
initCatalogLog.addRefreshDb(dbNameToId.get(DEFAULT_DB));
} else {
dbNameToId = Maps.newConcurrentMap();
idToDb = Maps.newConcurrentMap();
long defaultDbId = Env.getCurrentEnv().getNextId();
dbNameToId.put(DEFAULT_DB, defaultDbId);
EsExternalDatabase db = new EsExternalDatabase(this, defaultDbId, DEFAULT_DB);
idToDb.put(defaultDbId, db);
initCatalogLog.addCreateDb(defaultDbId, DEFAULT_DB);
}
initialized = true;
Env.getCurrentEnv().getEditLog().logInitCatalog(initCatalogLog);
}
@Override
public List<String> listDatabaseNames(SessionContext ctx) {
makeSureInitialized();
return new ArrayList<>(dbNameToId.keySet());
}
@Override
public List<String> listTableNames(SessionContext ctx, String dbName) {
return esRestClient.listTable();
}
@Nullable
@Override
public ExternalDatabase getDbNullable(String dbName) {
makeSureInitialized();
String realDbName = ClusterNamespace.getNameFromFullName(dbName);
if (!dbNameToId.containsKey(realDbName)) {
return null;
}
return idToDb.get(dbNameToId.get(realDbName));
}
@Nullable
@Override
public ExternalDatabase getDbNullable(long dbId) {
makeSureInitialized();
return idToDb.get(dbId);
}
@Override
public boolean tableExist(SessionContext ctx, String dbName, String tblName) {
return esRestClient.existIndex(this.esRestClient.getClient(), tblName);
}
@Override
public List<Long> getDbIds() {
return Lists.newArrayList(dbNameToId.values());
}
public ExternalDatabase getDbForReplay(long dbId) {
return idToDb.get(dbId);
}
@Override
public void gsonPostProcess() throws IOException {
super.gsonPostProcess();
setProperties(this.catalogProperty.getProperties());
}
}
|
In this case, we would like a matching extension (if found) to be added as a `forcedDependency`. For example in any quarkus project the following should always work: ```sh mvn quarkus:deploy -Dquarkus.deploy.target=knative mvn quarkus:deploy -Dquarkus.deploy.target=openshift ``` Besides the hard coding approach which is what was originally used, I can think of two approaches that could possibly work: 1. Express deployment capabilites in the extension metadata and perform a registry lookup. 2. Use a convention like adding `quarkus-${quarkus.deploy.target|` to forced dependencies. **Option 1**: has the downsides that its slow. **Option 2**: is not the most reliable (e.g. in the existing codebase knative would still not work as there is no `quarkus-knative` extension).
|
protected void doExecute() throws MojoExecutionException {
try (CuratedApplication curatedApplication = bootstrapApplication()) {
AtomicReference<List<String>> tooMany = new AtomicReference<>();
AugmentAction action = curatedApplication.createAugmentor();
action.performCustomBuild(DeployCommandDeclarationHandler.class.getName(), new Consumer<List<String>>() {
@Override
public void accept(List<String> strings) {
tooMany.set(strings);
}
}, DeployCommandDeclarationResultBuildItem.class.getName());
String target = System.getProperty("quarkus.deploy.target");
List<String> targets = tooMany.get();
if (targets.isEmpty() && target == null) {
systemProperties = new HashMap<>(systemProperties);
boolean shouldBuildImage = imageBuild || imageBuilder != null && !imageBuilder.isEmpty();
systemProperties.put("quarkus." + getDeployer().name() + ".deploy", "true");
systemProperties.put("quarkus.container-image.build", String.valueOf(shouldBuildImage));
super.doExecute();
} else if (targets.size() > 1 && target == null) {
getLog().error(
"Too many installed extensions support quarkus:deploy. You must choose one by setting quarkus.deploy.target.");
getLog().error("Extensions: " + targets.stream().collect(Collectors.joining(" ")));
} else if (target != null && !targets.contains(target)) {
getLog().error(
"Unknown quarkus.deploy.target: " + target);
getLog().error("Extensions: " + targets.stream().collect(Collectors.joining(" ")));
} else {
forceDependencies = false;
if (target == null) {
target = targets.get(0);
}
AugmentAction deployAction = curatedApplication.createAugmentor();
getLog().info("Deploy target: " + target);
System.setProperty("quarkus.deploy.target", target);
deployAction.performCustomBuild(DeployCommandHandler.class.getName(), new Consumer<Boolean>() {
@Override
public void accept(Boolean success) {
}
}, DeployCommandActionResultBuildItem.class.getName());
}
} finally {
}
}
|
getLog().error(
|
protected void doExecute() throws MojoExecutionException {
try (CuratedApplication curatedApplication = bootstrapApplication()) {
AtomicReference<List<String>> tooMany = new AtomicReference<>();
AugmentAction action = curatedApplication.createAugmentor();
action.performCustomBuild(DeployCommandDeclarationHandler.class.getName(), new Consumer<List<String>>() {
@Override
public void accept(List<String> strings) {
tooMany.set(strings);
}
}, DeployCommandDeclarationResultBuildItem.class.getName());
String target = System.getProperty("quarkus.deploy.target");
List<String> targets = tooMany.get();
if (targets.isEmpty() && target == null) {
systemProperties = new HashMap<>(systemProperties);
boolean shouldBuildImage = imageBuild || imageBuilder != null && !imageBuilder.isEmpty();
systemProperties.put("quarkus." + getDeployer().name() + ".deploy", "true");
systemProperties.put("quarkus.container-image.build", String.valueOf(shouldBuildImage));
super.doExecute();
} else if (targets.size() > 1 && target == null) {
getLog().error(
"Too many installed extensions support quarkus:deploy. You must choose one by setting quarkus.deploy.target.");
getLog().error("Extensions: " + targets.stream().collect(Collectors.joining(" ")));
} else if (target != null && !targets.contains(target)) {
getLog().error(
"Unknown quarkus.deploy.target: " + target);
getLog().error("Extensions: " + targets.stream().collect(Collectors.joining(" ")));
} else {
forceDependencies = false;
if (target == null) {
target = targets.get(0);
}
AugmentAction deployAction = curatedApplication.createAugmentor();
getLog().info("Deploy target: " + target);
System.setProperty("quarkus.deploy.target", target);
deployAction.performCustomBuild(DeployCommandHandler.class.getName(), new Consumer<Boolean>() {
@Override
public void accept(Boolean success) {
}
}, DeployCommandActionResultBuildItem.class.getName());
}
} finally {
}
}
|
class DeployMojo extends AbstractDeploymentMojo {
@Override
protected boolean beforeExecute() throws MojoExecutionException {
return super.beforeExecute();
}
@Override
}
|
class DeployMojo extends AbstractDeploymentMojo {
@Override
protected boolean beforeExecute() throws MojoExecutionException {
return super.beforeExecute();
}
@Override
}
|
i'll have a look at what the flink runner does as well.
|
public void translate(GroupByKey<K, V> transform, Context cxt) {
WindowingStrategy<?, ?> windowing = cxt.getInput().getWindowingStrategy();
TimestampCombiner tsCombiner = windowing.getTimestampCombiner();
Dataset<WindowedValue<KV<K, V>>> input = cxt.getDataset(cxt.getInput());
KvCoder<K, V> inputCoder = (KvCoder<K, V>) cxt.getInput().getCoder();
KvCoder<K, Iterable<V>> outputCoder = (KvCoder<K, Iterable<V>>) cxt.getOutput().getCoder();
Encoder<V> valueEnc = cxt.valueEncoderOf(inputCoder);
Encoder<K> keyEnc = cxt.keyEncoderOf(inputCoder);
final Dataset<WindowedValue<KV<K, Iterable<V>>>> result;
if (useCollectList && eligibleForGlobalGroupBy(windowing, false)) {
result =
input
.groupBy(col("value.key").as("key"))
.agg(collect_list(col("value.value")).as("values"), timestampAggregator(tsCombiner))
.select(
inGlobalWindow(
keyValue(col("key").as(keyEnc), col("values").as(iterableEnc(valueEnc))),
windowTimestamp(tsCombiner)));
} else if (eligibleForGlobalGroupBy(windowing, true)) {
result =
cxt.getDataset(cxt.getInput())
.groupByKey(valueKey(), keyEnc)
.mapValues(valueValue(), cxt.valueEncoderOf(inputCoder))
.mapGroups(fun2((k, it) -> KV.of(k, iterableOnce(it))), cxt.kvEncoderOf(outputCoder))
.map(fun1(WindowedValue::valueInGlobalWindow), cxt.windowedEncoder(outputCoder));
} else if (useCollectList
&& eligibleForGroupByWindow(windowing, false)
&& (windowing.getWindowFn().assignsToOneWindow() || transform.fewKeys())) {
result =
input
.select(explode(col("windows")).as("window"), col("value"), col("timestamp"))
.groupBy(col("value.key"), col("window"))
.agg(collect_list(col("value.value")).as("values"), timestampAggregator(tsCombiner))
.select(
inSingleWindow(
keyValue(col("key").as(keyEnc), col("values").as(iterableEnc(valueEnc))),
col("window").as(cxt.windowEncoder()),
windowTimestamp(tsCombiner)));
} else if (eligibleForGroupByWindow(windowing, true)
&& (windowing.getWindowFn().assignsToOneWindow() || transform.fewKeys())) {
Encoder<Tuple2<BoundedWindow, K>> windowedKeyEnc = windowedKeyEnc(keyEnc, cxt);
result =
cxt.getDataset(cxt.getInput())
.flatMap(explodeWindowedKey(valueValue()), cxt.tupleEncoder(windowedKeyEnc, valueEnc))
.groupByKey(fun1(Tuple2::_1), windowedKeyEnc)
.mapValues(fun1(Tuple2::_2), valueEnc)
.mapGroups(
fun2((wKey, it) -> windowedKV(wKey, iterableOnce(it))),
cxt.windowedEncoder(outputCoder));
} else {
result =
input
.groupByKey(valueKey(), keyEnc)
.flatMapGroups(
new GroupAlsoByWindowViaOutputBufferFn<>(
windowing,
(SerStateInternalsFactory) key -> InMemoryStateInternals.forKey(key),
SystemReduceFn.buffering(inputCoder.getValueCoder()),
cxt.getSerializableOptions()),
cxt.windowedEncoder(outputCoder));
}
cxt.putDataset(cxt.getOutput(), result);
}
|
public void translate(GroupByKey<K, V> transform, Context cxt) {
WindowingStrategy<?, ?> windowing = cxt.getInput().getWindowingStrategy();
TimestampCombiner tsCombiner = windowing.getTimestampCombiner();
Dataset<WindowedValue<KV<K, V>>> input = cxt.getDataset(cxt.getInput());
KvCoder<K, V> inputCoder = (KvCoder<K, V>) cxt.getInput().getCoder();
KvCoder<K, Iterable<V>> outputCoder = (KvCoder<K, Iterable<V>>) cxt.getOutput().getCoder();
Encoder<V> valueEnc = cxt.valueEncoderOf(inputCoder);
Encoder<K> keyEnc = cxt.keyEncoderOf(inputCoder);
final Dataset<WindowedValue<KV<K, Iterable<V>>>> result;
if (useCollectList && eligibleForGlobalGroupBy(windowing, false)) {
result =
input
.groupBy(col("value.key").as("key"))
.agg(collect_list(col("value.value")).as("values"), timestampAggregator(tsCombiner))
.select(
inGlobalWindow(
keyValue(col("key").as(keyEnc), col("values").as(iterableEnc(valueEnc))),
windowTimestamp(tsCombiner)));
} else if (eligibleForGlobalGroupBy(windowing, true)) {
result =
cxt.getDataset(cxt.getInput())
.groupByKey(valueKey(), keyEnc)
.mapValues(valueValue(), cxt.valueEncoderOf(inputCoder))
.mapGroups(fun2((k, it) -> KV.of(k, iterableOnce(it))), cxt.kvEncoderOf(outputCoder))
.map(fun1(WindowedValue::valueInGlobalWindow), cxt.windowedEncoder(outputCoder));
} else if (useCollectList
&& eligibleForGroupByWindow(windowing, false)
&& (windowing.getWindowFn().assignsToOneWindow() || transform.fewKeys())) {
result =
input
.select(explode(col("windows")).as("window"), col("value"), col("timestamp"))
.groupBy(col("value.key"), col("window"))
.agg(collect_list(col("value.value")).as("values"), timestampAggregator(tsCombiner))
.select(
inSingleWindow(
keyValue(col("key").as(keyEnc), col("values").as(iterableEnc(valueEnc))),
col("window").as(cxt.windowEncoder()),
windowTimestamp(tsCombiner)));
} else if (eligibleForGroupByWindow(windowing, true)
&& (windowing.getWindowFn().assignsToOneWindow() || transform.fewKeys())) {
Encoder<Tuple2<BoundedWindow, K>> windowedKeyEnc =
cxt.tupleEncoder(cxt.windowEncoder(), keyEnc);
result =
cxt.getDataset(cxt.getInput())
.flatMap(explodeWindowedKey(valueValue()), cxt.tupleEncoder(windowedKeyEnc, valueEnc))
.groupByKey(fun1(Tuple2::_1), windowedKeyEnc)
.mapValues(fun1(Tuple2::_2), valueEnc)
.mapGroups(
fun2((wKey, it) -> windowedKV(wKey, iterableOnce(it))),
cxt.windowedEncoder(outputCoder));
} else {
result =
input
.groupByKey(valueKey(), keyEnc)
.flatMapGroups(
new GroupAlsoByWindowViaOutputBufferFn<>(
windowing,
(SerStateInternalsFactory) key -> InMemoryStateInternals.forKey(key),
SystemReduceFn.buffering(inputCoder.getValueCoder()),
cxt.getSerializableOptions()),
cxt.windowedEncoder(outputCoder));
}
cxt.putDataset(cxt.getOutput(), result);
}
|
class GroupByKeyTranslatorBatch<K, V>
extends GroupingTranslator<K, V, Iterable<V>, GroupByKey<K, V>> {
/** Literal of binary encoded Pane info. */
private static final Expression PANE_NO_FIRING = lit(toByteArray(NO_FIRING, PaneInfoCoder.of()));
/** Defaults for value in single global window. */
private static final List<Expression> GLOBAL_WINDOW_DETAILS =
windowDetails(lit(new byte[][] {EMPTY_BYTE_ARRAY}));
private boolean useCollectList = true;
public GroupByKeyTranslatorBatch() {}
public GroupByKeyTranslatorBatch(boolean useCollectList) {
this.useCollectList = useCollectList;
}
@Override
/** Serializable In-memory state internals factory. */
private interface SerStateInternalsFactory<K> extends StateInternalsFactory<K>, Serializable {}
private Encoder<Iterable<V>> iterableEnc(Encoder<V> enc) {
return (Encoder) collectionEncoder(enc);
}
private static Column[] timestampAggregator(TimestampCombiner tsCombiner) {
if (tsCombiner.equals(TimestampCombiner.END_OF_WINDOW)) {
return new Column[0];
}
Column agg =
tsCombiner.equals(TimestampCombiner.EARLIEST)
? min(col("timestamp"))
: max(col("timestamp"));
return new Column[] {agg.as("timestamp")};
}
private static Expression windowTimestamp(TimestampCombiner tsCombiner) {
if (tsCombiner.equals(TimestampCombiner.END_OF_WINDOW)) {
return litNull(DataTypes.LongType);
}
return col("timestamp").expr();
}
/**
* Java {@link Iterable} from Scala {@link Iterator} that can be iterated just once so that we
* don't have to load all data into memory.
*/
private static <T extends @NonNull Object> Iterable<T> iterableOnce(Iterator<T> it) {
return () -> {
checkState(!it.isEmpty(), "Iterator on values can only be consumed once!");
return javaIterator(it);
};
}
private <T> TypedColumn<?, KV<K, T>> keyValue(TypedColumn<?, K> key, TypedColumn<?, T> value) {
return struct(key.as("key"), value.as("value")).as(kvEncoder(key.encoder(), value.encoder()));
}
private static <InT, T> TypedColumn<InT, WindowedValue<T>> inGlobalWindow(
TypedColumn<?, T> value, Expression ts) {
List<Expression> fields = concat(timestampedValue(value, ts), GLOBAL_WINDOW_DETAILS);
Encoder<WindowedValue<T>> enc =
windowedValueEncoder(value.encoder(), encoderOf(GlobalWindow.class));
return (TypedColumn<InT, WindowedValue<T>>) new Column(new CreateNamedStruct(fields)).as(enc);
}
public static <InT, T> TypedColumn<InT, WindowedValue<T>> inSingleWindow(
TypedColumn<?, T> value, TypedColumn<?, ? extends BoundedWindow> window, Expression ts) {
Expression windows = new CreateArray(listOf(window.expr()));
Seq<Expression> fields = concat(timestampedValue(value, ts), windowDetails(windows));
Encoder<WindowedValue<T>> enc = windowedValueEncoder(value.encoder(), window.encoder());
return (TypedColumn<InT, WindowedValue<T>>) new Column(new CreateNamedStruct(fields)).as(enc);
}
private static List<Expression> timestampedValue(Column value, Expression ts) {
return seqOf(lit("value"), value.expr(), lit("timestamp"), ts).toList();
}
private static List<Expression> windowDetails(Expression windows) {
return seqOf(lit("windows"), windows, lit("pane"), PANE_NO_FIRING).toList();
}
private static <T extends @NonNull Object> Expression lit(T t) {
return Literal$.MODULE$.apply(t);
}
@SuppressWarnings("nullness")
private static Expression litNull(DataType dataType) {
return new Literal(null, dataType);
}
}
|
class GroupByKeyTranslatorBatch<K, V>
extends TransformTranslator<
PCollection<KV<K, V>>, PCollection<KV<K, Iterable<V>>>, GroupByKey<K, V>> {
/** Literal of binary encoded Pane info. */
private static final Expression PANE_NO_FIRING = lit(toByteArray(NO_FIRING, PaneInfoCoder.of()));
/** Defaults for value in single global window. */
private static final List<Expression> GLOBAL_WINDOW_DETAILS =
windowDetails(lit(new byte[][] {EMPTY_BYTE_ARRAY}));
private boolean useCollectList = true;
public GroupByKeyTranslatorBatch() {}
public GroupByKeyTranslatorBatch(boolean useCollectList) {
this.useCollectList = useCollectList;
}
@Override
/** Serializable In-memory state internals factory. */
private interface SerStateInternalsFactory<K> extends StateInternalsFactory<K>, Serializable {}
private Encoder<Iterable<V>> iterableEnc(Encoder<V> enc) {
return (Encoder) collectionEncoder(enc);
}
private static Column[] timestampAggregator(TimestampCombiner tsCombiner) {
if (tsCombiner.equals(TimestampCombiner.END_OF_WINDOW)) {
return new Column[0];
}
Column agg =
tsCombiner.equals(TimestampCombiner.EARLIEST)
? min(col("timestamp"))
: max(col("timestamp"));
return new Column[] {agg.as("timestamp")};
}
private static Expression windowTimestamp(TimestampCombiner tsCombiner) {
if (tsCombiner.equals(TimestampCombiner.END_OF_WINDOW)) {
return litNull(DataTypes.LongType);
}
return col("timestamp").expr();
}
/**
* Java {@link Iterable} from Scala {@link Iterator} that can be iterated just once so that we
* don't have to load all data into memory.
*/
private static <T extends @NonNull Object> Iterable<T> iterableOnce(Iterator<T> it) {
return () -> {
checkState(!it.isEmpty(), "Iterator on values can only be consumed once!");
return javaIterator(it);
};
}
private <T> TypedColumn<?, KV<K, T>> keyValue(TypedColumn<?, K> key, TypedColumn<?, T> value) {
return struct(key.as("key"), value.as("value")).as(kvEncoder(key.encoder(), value.encoder()));
}
private static <InT, T> TypedColumn<InT, WindowedValue<T>> inGlobalWindow(
TypedColumn<?, T> value, Expression ts) {
List<Expression> fields = concat(timestampedValue(value, ts), GLOBAL_WINDOW_DETAILS);
Encoder<WindowedValue<T>> enc =
windowedValueEncoder(value.encoder(), encoderOf(GlobalWindow.class));
return (TypedColumn<InT, WindowedValue<T>>) new Column(new CreateNamedStruct(fields)).as(enc);
}
public static <InT, T> TypedColumn<InT, WindowedValue<T>> inSingleWindow(
TypedColumn<?, T> value, TypedColumn<?, ? extends BoundedWindow> window, Expression ts) {
Expression windows = new CreateArray(listOf(window.expr()));
Seq<Expression> fields = concat(timestampedValue(value, ts), windowDetails(windows));
Encoder<WindowedValue<T>> enc = windowedValueEncoder(value.encoder(), window.encoder());
return (TypedColumn<InT, WindowedValue<T>>) new Column(new CreateNamedStruct(fields)).as(enc);
}
private static List<Expression> timestampedValue(Column value, Expression ts) {
return seqOf(lit("value"), value.expr(), lit("timestamp"), ts).toList();
}
private static List<Expression> windowDetails(Expression windows) {
return seqOf(lit("windows"), windows, lit("pane"), PANE_NO_FIRING).toList();
}
private static <T extends @NonNull Object> Expression lit(T t) {
return Literal$.MODULE$.apply(t);
}
@SuppressWarnings("nullness")
private static Expression litNull(DataType dataType) {
return new Literal(null, dataType);
}
}
|
|
I think mapping ctor expression is more suitable as we don't know the error type completely yet
|
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef;
if (userProvidedTypeRef != null) {
symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR);
}
validateErrorConstructorPositionalArgs(errorConstructorExpr);
List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr);
List<BType> errorDetailTypes = new ArrayList<>(expandedCandidates.size());
for (BType expandedCandidate : expandedCandidates) {
BType detailType = ((BErrorType) expandedCandidate).detailType;
errorDetailTypes.add(detailType);
}
BType detailCandidate;
if (errorDetailTypes.size() == 1) {
detailCandidate = errorDetailTypes.get(0);
} else {
detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes));
}
BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr);
BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env);
int index = errorDetailTypes.indexOf(inferredDetailType);
if (index < 0) {
resultType = symTable.semanticError;
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorType, expType);
return;
}
BType selectedCandidate = expandedCandidates.get(index);
if (selectedCandidate != symTable.semanticError
&& (userProvidedTypeRef == null || userProvidedTypeRef.getBType() == selectedCandidate)) {
checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType);
resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType);
}
BErrorType errorType;
if (userProvidedTypeRef != null && userProvidedTypeRef.getBType().tag == TypeTags.ERROR) {
errorType = (BErrorType) userProvidedTypeRef.getBType();
} else if (expandedCandidates.size() == 1) {
errorType = (BErrorType) expandedCandidates.get(0);
} else {
errorType = symTable.errorType;
}
List<BLangNamedArgsExpression> namedArgs =
checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType);
BType detailType = errorType.detailType;
if (detailType.tag == TypeTags.MAP) {
BType errorDetailTypeConstraint = ((BMapType) detailType).constraint;
for (BLangNamedArgsExpression namedArgExpr: namedArgs) {
if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) {
dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType());
}
}
} else if (detailType.tag == TypeTags.RECORD) {
BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType;
LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream()
.filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED)
.map(f -> f.name.value)
.collect(Collectors.toCollection(LinkedList::new));
LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields;
for (BLangNamedArgsExpression namedArg : namedArgs) {
BField field = targetFields.get(namedArg.name.value);
Location pos = namedArg.pos;
if (field == null) {
if (targetErrorDetailRec.sealed) {
dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC,
namedArg.name, targetErrorDetailRec);
} else if (targetFields.isEmpty()
&& !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE,
namedArg.name, targetErrorDetailRec);
}
} else {
missingRequiredFields.remove(namedArg.name.value);
if (!types.isAssignable(namedArg.expr.getBType(), field.type)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArg.name, field.type, namedArg.expr.getBType());
}
}
}
for (String requiredField : missingRequiredFields) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField);
}
}
if (userProvidedTypeRef != null) {
errorConstructorExpr.setBType(userProvidedTypeRef.getBType());
} else {
errorConstructorExpr.setBType(errorType);
}
resultType = errorConstructorExpr.getBType();
}
|
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorType, expType);
|
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef;
if (userProvidedTypeRef != null) {
symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR);
}
validateErrorConstructorPositionalArgs(errorConstructorExpr);
List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr);
List<BType> errorDetailTypes = new ArrayList<>(expandedCandidates.size());
for (BType expandedCandidate : expandedCandidates) {
BType detailType = ((BErrorType) expandedCandidate).detailType;
errorDetailTypes.add(detailType);
}
BType detailCandidate;
if (errorDetailTypes.size() == 1) {
detailCandidate = errorDetailTypes.get(0);
} else {
detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes));
}
BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr);
BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env);
int index = errorDetailTypes.indexOf(inferredDetailType);
BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index);
if (selectedCandidate != symTable.semanticError
&& (userProvidedTypeRef == null || userProvidedTypeRef.getBType() == selectedCandidate)) {
checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType);
resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType);
}
boolean validTypeRefFound = false;
BErrorType errorType;
if (userProvidedTypeRef != null && userProvidedTypeRef.getBType().tag == TypeTags.ERROR) {
errorType = (BErrorType) userProvidedTypeRef.getBType();
validTypeRefFound = true;
} else if (expandedCandidates.size() == 1) {
errorType = (BErrorType) expandedCandidates.get(0);
} else {
errorType = symTable.errorType;
}
List<BLangNamedArgsExpression> namedArgs =
checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType);
BType detailType = errorType.detailType;
if (detailType.tag == TypeTags.MAP) {
BType errorDetailTypeConstraint = ((BMapType) detailType).constraint;
for (BLangNamedArgsExpression namedArgExpr: namedArgs) {
if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) {
dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType());
}
}
} else if (detailType.tag == TypeTags.RECORD) {
BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType;
LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream()
.filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED)
.map(f -> f.name.value)
.collect(Collectors.toCollection(LinkedList::new));
LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields;
for (BLangNamedArgsExpression namedArg : namedArgs) {
BField field = targetFields.get(namedArg.name.value);
Location pos = namedArg.pos;
if (field == null) {
if (targetErrorDetailRec.sealed) {
dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC,
namedArg.name, targetErrorDetailRec);
} else if (targetFields.isEmpty()
&& !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE,
namedArg.name, targetErrorDetailRec);
}
} else {
missingRequiredFields.remove(namedArg.name.value);
if (!types.isAssignable(namedArg.expr.getBType(), field.type)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArg.name, field.type, namedArg.expr.getBType());
}
}
}
for (String requiredField : missingRequiredFields) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField);
}
}
if (userProvidedTypeRef != null) {
errorConstructorExpr.setBType(userProvidedTypeRef.getBType());
} else {
errorConstructorExpr.setBType(errorType);
}
BType resolvedType = errorConstructorExpr.getBType();
if (resolvedType != symTable.semanticError && expType != symTable.noType &&
!types.isAssignable(resolvedType, expType)) {
if (validTypeRefFound) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, userProvidedTypeRef);
} else {
dlog.error(errorConstructorExpr.pos,
DiagnosticErrorCode.ERROR_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
}
resultType = symTable.semanticError;
return;
}
resultType = resolvedType;
}
|
class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>();
private static Set<String> listLengthModifierFunctions = new HashSet<>();
private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>();
private static final String LIST_LANG_LIB = "lang.array";
private static final String MAP_LANG_LIB = "lang.map";
private static final String TABLE_LANG_LIB = "lang.table";
private static final String VALUE_LANG_LIB = "lang.value";
private static final String XML_LANG_LIB = "lang.xml";
private static final String FUNCTION_NAME_PUSH = "push";
private static final String FUNCTION_NAME_POP = "pop";
private static final String FUNCTION_NAME_SHIFT = "shift";
private static final String FUNCTION_NAME_UNSHIFT = "unshift";
private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType";
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private NodeCloner nodeCloner;
private Types types;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
private boolean isTypeChecked;
private TypeNarrower typeNarrower;
private TypeParamAnalyzer typeParamAnalyzer;
private BLangAnonymousModelHelper anonymousModelHelper;
private SemanticAnalyzer semanticAnalyzer;
private Unifier unifier;
private boolean nonErrorLoggingCheck = false;
private int letCount = 0;
private Stack<SymbolEnv> queryEnvs, prevEnvs;
private Stack<BLangNode> queryFinalClauses;
private boolean checkWithinQueryExpr = false;
private BLangMissingNodesHelper missingNodesHelper;
private boolean breakToParallelQueryEnv = false;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
static {
listLengthModifierFunctions.add(FUNCTION_NAME_PUSH);
listLengthModifierFunctions.add(FUNCTION_NAME_POP);
listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT);
listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT);
modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeAll");
add("setLength");
add("reverse");
add("sort");
add("pop");
add("push");
add("shift");
add("unshift");
}});
modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{
add("put");
add("add");
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{
add("mergeJson");
}});
modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{
add("setName");
add("setChildren");
add("strip");
}});
}
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.queryFinalClauses = new Stack<>();
this.queryEnvs = new Stack<>();
this.prevEnvs = new Stack<>();
this.unifier = new Unifier();
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
if (expr.typeChecked) {
return expr.getBType();
}
if (expType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) expType).effectiveType;
}
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
this.isTypeChecked = true;
expr.expectedType = expType;
expr.accept(this);
if (resultType.tag == TypeTags.INTERSECTION) {
resultType = ((BIntersectionType) resultType).effectiveType;
}
expr.setTypeCheckedType(resultType);
expr.typeChecked = isTypeChecked;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
validateAndSetExprExpectedType(expr);
return resultType;
}
private void analyzeObjectConstructor(BLangNode node, SymbolEnv env) {
if (!nonErrorLoggingCheck) {
semanticAnalyzer.analyzeNode(node, env);
}
}
private void validateAndSetExprExpectedType(BLangExpression expr) {
if (resultType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null &&
expr.expectedType.tag == TypeTags.MAP && expr.getBType().tag == TypeTags.RECORD) {
return;
}
expr.expectedType = resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = setLiteralValueAndGetType(literalExpr, expType);
if (literalType == symTable.semanticError || literalExpr.isFiniteContext) {
return;
}
resultType = types.checkType(literalExpr, literalType, expType);
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
checkXMLNamespacePrefixes(xmlElementAccess.filters);
checkExpr(xmlElementAccess.expr, env, symTable.xmlType);
resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
checkXMLNamespacePrefixes(xmlNavigation.filters);
if (xmlNavigation.childIndex != null) {
checkExpr(xmlNavigation.childIndex, env, symTable.intType);
}
BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType);
if (exprType.tag == TypeTags.UNION) {
dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS,
xmlNavigation.expr.getBType());
}
BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN
? symTable.xmlType : symTable.xmlElementSeqType;
types.checkType(xmlNavigation, actualType, expType);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
resultType = symTable.xmlType;
} else {
resultType = symTable.xmlElementSeqType;
}
}
private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) {
for (BLangXMLElementFilter filter : filters) {
if (!filter.namespace.isEmpty()) {
Name nsName = names.fromString(filter.namespace);
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName);
filter.namespaceSymbol = nsSymbol;
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName);
}
}
}
}
private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) {
BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag);
Object literalValue = literalExpr.value;
if (literalType.tag == TypeTags.INT || literalType.tag == TypeTags.BYTE) {
if (expType.tag == TypeTags.FLOAT) {
literalType = symTable.floatType;
literalExpr.value = ((Long) literalValue).doubleValue();
} else if (expType.tag == TypeTags.DECIMAL &&
!NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) {
literalType = symTable.decimalType;
literalExpr.value = String.valueOf(literalValue);
} else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) {
literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue);
if (literalType == symTable.semanticError) {
return symTable.semanticError;
}
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
BType intSubType = null;
boolean intOrIntCompatibleTypeFound = false;
for (BType memType : memberTypes) {
if ((memType.tag != TypeTags.INT && TypeTags.isIntegerTypeTag(memType.tag)) ||
memType.tag == TypeTags.BYTE) {
intSubType = memType;
} else if (memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON ||
memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) {
intOrIntCompatibleTypeFound = true;
}
}
if (intOrIntCompatibleTypeFound) {
return setLiteralValueAndGetType(literalExpr, symTable.intType);
}
if (intSubType != null) {
return setLiteralValueAndGetType(literalExpr, intSubType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) {
return setLiteralValueAndGetType(literalExpr, symTable.byteType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) {
return setLiteralValueAndGetType(literalExpr, symTable.floatType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
}
} else if (literalType.tag == TypeTags.FLOAT) {
String literal = String.valueOf(literalValue);
String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal);
boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal);
if (expType.tag == TypeTags.DECIMAL) {
if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
symTable.floatType);
resultType = symTable.semanticError;
return resultType;
}
literalType = symTable.decimalType;
literalExpr.value = numericLiteral;
} else if (expType.tag == TypeTags.FLOAT) {
literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral));
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (!isDiscriminatedFloat
&& literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
} else if (literalType.tag == TypeTags.DECIMAL) {
return decimalLiteral(literalValue, literalExpr, expType);
} else if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) {
if (expType.tag == TypeTags.CHAR_STRING) {
return symTable.charStringType;
}
if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
for (BType memType : memberTypes) {
if (TypeTags.isStringTypeTag(memType.tag)) {
return setLiteralValueAndGetType(literalExpr, memType);
} else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA ||
memType.tag == TypeTags.ANY) {
return setLiteralValueAndGetType(literalExpr, symTable.charStringType);
} else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType,
literalExpr)) {
setLiteralValueForFiniteType(literalExpr, symTable.charStringType);
return literalType;
}
}
}
boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else {
if (this.expType.tag == TypeTags.FINITE) {
boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else if (this.expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.expType;
boolean foundMember = unionType.getMemberTypes()
.stream()
.anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr));
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
}
}
if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) {
literalType = new BArrayType(symTable.byteType);
}
return literalType;
}
private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) {
Set<BType> memberTypes = expType.getMemberTypes();
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == desiredType.tag
|| memType.tag == TypeTags.JSON
|| memType.tag == TypeTags.ANYDATA
|| memType.tag == TypeTags.ANY)) {
return setLiteralValueAndGetType(literalExpr, desiredType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
return symTable.noType;
}
private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType,
int targetMemberTypeTag) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
if (valueExpr.getBType().tag == targetMemberTypeTag &&
types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) {
return true;
}
}
return false;
}
private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) {
String literal = String.valueOf(literalValue);
if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
symTable.decimalType);
resultType = symTable.semanticError;
return resultType;
}
if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal);
resultType = symTable.decimalType;
return symTable.decimalType;
}
private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) {
types.setImplicitCastExpr(literalExpr, type, this.expType);
this.resultType = type;
literalExpr.isFiniteContext = true;
}
private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) {
List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(memFiniteType -> (BFiniteType) memFiniteType)
.collect(Collectors.toList());
if (finiteTypeMembers.isEmpty()) {
return symTable.semanticError;
}
int tag = matchType.tag;
Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>();
for (BFiniteType finiteType : finiteTypeMembers) {
Set<BLangExpression> set = new HashSet<>();
for (BLangExpression expression : finiteType.getValueSpace()) {
if (expression.getBType().tag == tag) {
set.add(expression);
}
}
matchedValueSpace.addAll(set);
}
if (matchedValueSpace.isEmpty()) {
return symTable.semanticError;
}
return new BFiniteType(null, matchedValueSpace);
}
private BType getIntLiteralType(Location location, BType expType, BType literalType,
Object literalValue) {
switch (expType.tag) {
case TypeTags.INT:
return symTable.intType;
case TypeTags.BYTE:
if (types.isByteLiteralValue((Long) literalValue)) {
return symTable.byteType;
}
break;
case TypeTags.SIGNED32_INT:
if (types.isSigned32LiteralValue((Long) literalValue)) {
return symTable.signed32IntType;
}
break;
case TypeTags.SIGNED16_INT:
if (types.isSigned16LiteralValue((Long) literalValue)) {
return symTable.signed16IntType;
}
break;
case TypeTags.SIGNED8_INT:
if (types.isSigned8LiteralValue((Long) literalValue)) {
return symTable.signed8IntType;
}
break;
case TypeTags.UNSIGNED32_INT:
if (types.isUnsigned32LiteralValue((Long) literalValue)) {
return symTable.unsigned32IntType;
}
break;
case TypeTags.UNSIGNED16_INT:
if (types.isUnsigned16LiteralValue((Long) literalValue)) {
return symTable.unsigned16IntType;
}
break;
case TypeTags.UNSIGNED8_INT:
if (types.isUnsigned8LiteralValue((Long) literalValue)) {
return symTable.unsigned8IntType;
}
break;
default:
}
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, literalType);
resultType = symTable.semanticError;
return resultType;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) {
BType inferredType = getInferredTupleType(listConstructor, expType);
resultType = inferredType == symTable.semanticError ?
symTable.semanticError : types.checkType(listConstructor, inferredType, expType);
return;
}
resultType = checkListConstructorCompatibility(expType, listConstructor);
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) {
List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
}
if (tableConstructorExpr.recordLiteralList.size() == 0) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE);
resultType = symTable.semanticError;
return;
}
BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr);
BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null);
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
recordLiteral.setBType(inherentMemberType);
}
if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) {
resultType = symTable.semanticError;
return;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
resultType = tableType;
return;
}
BType applicableExpType = expType.tag == TypeTags.INTERSECTION ?
((BIntersectionType) expType).effectiveType : expType;
if (applicableExpType.tag == TypeTags.TABLE) {
List<BType> memTypes = new ArrayList<>();
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
BLangRecordLiteral clonedExpr = recordLiteral;
if (this.nonErrorLoggingCheck) {
clonedExpr.cloneAttempt++;
clonedExpr = nodeCloner.cloneNode(recordLiteral);
}
BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint);
if (recordType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
memTypes.add(recordType);
}
BTableType expectedTableType = (BTableType) applicableExpType;
if (expectedTableType.constraint.tag == TypeTags.MAP && expectedTableType.isTypeInlineDefined) {
validateMapConstraintTable(applicableExpType);
return;
}
if (!(validateKeySpecifierInTableConstructor((BTableType) applicableExpType,
tableConstructorExpr.recordLiteralList) &&
validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) {
resultType = symTable.semanticError;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType),
null);
if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) {
tableType.flags |= Flags.READONLY;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) {
tableType.fieldNameList = expectedTableType.fieldNameList;
}
resultType = tableType;
} else if (applicableExpType.tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> matchingTypes = new ArrayList<>();
BUnionType expectedType = (BUnionType) applicableExpType;
for (BType memType : expectedType.getMemberTypes()) {
dlog.resetErrorCount();
BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr);
}
BType resultType = checkExpr(clonedTableExpr, env, memType);
if (resultType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(matchingTypes, resultType)) {
matchingTypes.add(resultType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (matchingTypes.isEmpty()) {
BLangTableConstructorExpr exprToLog = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(tableConstructorExpr);
}
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getInferredTableType(exprToLog));
} else if (matchingTypes.size() != 1) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
} else {
resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0));
return;
}
resultType = symTable.semanticError;
} else {
resultType = symTable.semanticError;
}
}
private BType getInferredTableType(BLangTableConstructorExpr exprToLog) {
List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null);
}
private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) {
if (tableConstructorExpr.tableKeySpecifier != null) {
if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr.
tableKeySpecifier), tableConstructorExpr.recordLiteralList))) {
resultType = symTable.semanticError;
return true;
}
tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier);
}
return false;
}
private BType inferTableMemberType(List<BType> memTypes, BType expType) {
if (memTypes.isEmpty()) {
return ((BTableType) expType).constraint;
}
LinkedHashSet<BType> result = new LinkedHashSet<>();
result.add(memTypes.get(0));
BUnionType unionType = BUnionType.create(null, result);
for (int i = 1; i < memTypes.size(); i++) {
BType source = memTypes.get(i);
if (!types.isAssignable(source, unionType)) {
result.add(source);
unionType = BUnionType.create(null, result);
}
}
if (unionType.getMemberTypes().size() == 1) {
return memTypes.get(0);
}
return unionType;
}
private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) {
BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier;
List<String> keySpecifierFieldNames = new ArrayList<>();
Set<BField> allFieldSet = new LinkedHashSet<>();
for (BType memType : memTypes) {
allFieldSet.addAll(((BRecordType) memType).fields.values());
}
Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet);
for (BType memType : memTypes) {
commonFieldSet.retainAll(((BRecordType) memType).fields.values());
}
List<String> requiredFieldNames = new ArrayList<>();
if (keySpecifier != null) {
for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) {
requiredFieldNames.add(((BLangIdentifier) identifierNode).value);
keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value);
}
}
List<String> fieldNames = new ArrayList<>();
for (BField field : allFieldSet) {
String fieldName = field.name.value;
if (fieldNames.contains(fieldName)) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY,
fieldName);
return symTable.semanticError;
}
fieldNames.add(fieldName);
boolean isOptional = true;
for (BField commonField : commonFieldSet) {
if (commonField.name.value.equals(fieldName)) {
isOptional = false;
requiredFieldNames.add(commonField.name.value);
}
}
if (isOptional) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL));
} else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY));
} else if (requiredFieldNames.contains(fieldName)) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED));
}
}
return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos);
}
private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, Location pos) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL);
for (BField field : allFieldSet) {
recordSymbol.scope.define(field.name, field.symbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = allFieldSet.stream().collect(getFieldCollector());
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
return recordType;
}
private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() {
BinaryOperator<BField> mergeFunc = (u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
};
return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new);
}
private boolean validateTableType(BTableType tableType) {
BType constraint = tableType.constraint;
if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) {
dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint);
resultType = symTable.semanticError;
return false;
}
return true;
}
private boolean validateKeySpecifierInTableConstructor(BTableType tableType,
List<BLangRecordLiteral> recordLiterals) {
List<String> fieldNameList = tableType.fieldNameList;
if (fieldNameList != null) {
return validateTableKeyValue(fieldNameList, recordLiterals);
}
return true;
}
private boolean validateTableKeyValue(List<String> keySpecifierFieldNames,
List<BLangRecordLiteral> recordLiterals) {
for (String fieldName : keySpecifierFieldNames) {
for (BLangRecordLiteral recordLiteral : recordLiterals) {
BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName);
if (recordKeyValueField != null && isConstExpression(recordKeyValueField.getValue())) {
continue;
}
dlog.error(recordLiteral.pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean isConstExpression(BLangExpression expression) {
switch(expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case STRING_TEMPLATE_LITERAL:
case XML_ELEMENT_LITERAL:
case XML_TEXT_LITERAL:
case LIST_CONSTRUCTOR_EXPR:
case TABLE_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
case TYPE_CONVERSION_EXPR:
case UNARY_EXPR:
case BINARY_EXPR:
case TYPE_TEST_EXPR:
case TERNARY_EXPR:
return true;
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
case GROUP_EXPR:
return isConstExpression(((BLangGroupExpr) expression).expression);
default:
return false;
}
}
private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral,
String fieldName) {
for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) {
BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField;
if (fieldName.equals(recordKeyValueField.key.toString())) {
return recordKeyValueField;
}
}
return null;
}
public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint,
Location pos) {
for (String fieldName : fieldNameList) {
BField field = types.getTableConstraintField(constraint, fieldName);
if (field == null) {
dlog.error(pos,
DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!types.isAssignable(field.type, symTable.anydataType)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr,
BTableType tableType) {
BType constraintType = tableType.constraint;
List<String> fieldNameList = new ArrayList<>();
boolean isKeySpecifierEmpty = tableConstructorExpr.tableKeySpecifier == null;
if (!isKeySpecifierEmpty) {
fieldNameList.addAll(getTableKeyNameList(tableConstructorExpr.tableKeySpecifier));
if (tableType.fieldNameList == null &&
!validateKeySpecifier(fieldNameList,
constraintType.tag != TypeTags.INTERSECTION ? constraintType :
((BIntersectionType) constraintType).effectiveType,
tableConstructorExpr.tableKeySpecifier.pos)) {
return false;
}
if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH,
tableType.fieldNameList.toString(), fieldNameList.toString());
resultType = symTable.semanticError;
return false;
}
}
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (keyTypeConstraint != null) {
List<BType> memberTypes = new ArrayList<>();
switch (keyTypeConstraint.tag) {
case TypeTags.TUPLE:
for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) {
memberTypes.add((BType) type);
}
break;
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) keyTypeConstraint).getFields();
memberTypes = fieldList.entrySet().stream()
.filter(e -> fieldNameList.contains(e.getKey())).map(entry -> entry.getValue().type)
.collect(Collectors.toList());
if (memberTypes.isEmpty()) {
memberTypes.add(keyTypeConstraint);
}
break;
default:
memberTypes.add(keyTypeConstraint);
}
if (isKeySpecifierEmpty && keyTypeConstraint.tag == TypeTags.NEVER) {
return true;
}
if (isKeySpecifierEmpty ||
tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) {
if (isKeySpecifierEmpty) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_EMPTY_FOR_PROVIDED_KEY_CONSTRAINT, memberTypes);
} else {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT,
memberTypes, tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList);
}
resultType = symTable.semanticError;
return false;
}
List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier.
fieldNameIdentifierList;
int index = 0;
for (IdentifierNode identifier : fieldNameIdentifierList) {
BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value);
if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos,
DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT,
fieldNameIdentifierList.toString(), memberTypes.toString());
resultType = symTable.semanticError;
return false;
}
index++;
}
}
return true;
}
public void validateMapConstraintTable(BType expType) {
if (expType != null && (((BTableType) expType).fieldNameList != null ||
((BTableType) expType).keyTypeConstraint != null) &&
!expType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) {
dlog.error(((BTableType) expType).keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) {
List<String> fieldNamesList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNamesList.add(((BLangIdentifier) identifier).value);
}
return fieldNamesList;
}
private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) {
if (fieldNames == null) {
return symTable.semanticError;
}
List<BType> memTypes = new ArrayList<>();
for (String fieldName : fieldNames) {
BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName);
if (tableConstraintField == null) {
return symTable.semanticError;
}
BType fieldType = tableConstraintField.type;
memTypes.add(fieldType);
}
if (memTypes.size() == 1) {
return memTypes.get(0);
}
return new BTupleType(memTypes);
}
private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.nonErrorLoggingCheck = true;
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType);
if (!erroredExpType && inferredTupleType != symTable.semanticError) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType);
}
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
return symTable.semanticError;
}
return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor);
}
BType possibleType = getListConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.ARRAY:
return checkArrayType(listConstructor, (BArrayType) possibleType);
case TypeTags.TUPLE:
return checkTupleType(listConstructor, (BTupleType) possibleType);
case TypeTags.READONLY:
return checkReadOnlyListType(listConstructor);
case TypeTags.TYPEDESC:
List<BType> results = new ArrayList<>();
listConstructor.isTypedescExpr = true;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType));
}
List<BType> actualTypes = new ArrayList<>();
for (int i = 0; i < listConstructor.exprs.size(); i++) {
final BLangExpression expr = listConstructor.exprs.get(i);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(results.get(i));
}
}
if (actualTypes.size() == 1) {
listConstructor.typedescType = actualTypes.get(0);
} else {
listConstructor.typedescType = new BTupleType(actualTypes);
}
return new BTypedescType(listConstructor.typedescType, null);
}
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
if (bType == symTable.semanticError) {
getInferredTupleType(exprToLog, symTable.semanticError);
} else {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType,
getInferredTupleType(exprToLog, symTable.noType));
}
return symTable.semanticError;
}
private BType getListConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.READONLY:
case TypeTags.TYPEDESC:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
}
return symTable.semanticError;
}
private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) {
BType eType = arrayType.eType;
if (arrayType.state == BArrayState.INFERRED) {
arrayType.size = listConstructor.exprs.size();
arrayType.state = BArrayState.CLOSED;
} else if ((arrayType.state != BArrayState.OPEN) && (arrayType.size != listConstructor.exprs.size())) {
if (arrayType.size < listConstructor.exprs.size()) {
dlog.error(listConstructor.pos,
DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size,
listConstructor.exprs.size());
return symTable.semanticError;
}
if (!types.hasFillerValue(eType)) {
dlog.error(listConstructor.pos,
DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType);
return symTable.semanticError;
}
}
boolean errored = false;
for (BLangExpression expr : listConstructor.exprs) {
if (exprIncompatible(eType, expr) && !errored) {
errored = true;
}
}
return errored ? symTable.semanticError : arrayType;
}
private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) {
List<BLangExpression> exprs = listConstructor.exprs;
List<BType> memberTypes = tupleType.tupleTypes;
BType restType = tupleType.restType;
int listExprSize = exprs.size();
int memberTypeSize = memberTypes.size();
if (listExprSize < memberTypeSize) {
for (int i = listExprSize; i < memberTypeSize; i++) {
if (!types.hasFillerValue(memberTypes.get(i))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return symTable.semanticError;
}
}
} else if (listExprSize > memberTypeSize && restType == null) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return symTable.semanticError;
}
boolean errored = false;
int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize;
for (int i = 0; i < nonRestCountToCheck; i++) {
if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) {
errored = true;
}
}
for (int i = nonRestCountToCheck; i < exprs.size(); i++) {
if (exprIncompatible(restType, exprs.get(i)) && !errored) {
errored = true;
}
}
return errored ? symTable.semanticError : tupleType;
}
private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return types.checkType(listConstructor, inferredType, symTable.readonlyType);
}
for (BLangExpression expr : listConstructor.exprs) {
if (exprIncompatible(symTable.readonlyType, expr)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private boolean exprIncompatible(BType eType, BLangExpression expr) {
if (expr.typeChecked) {
return expr.getBType() == symTable.semanticError;
}
BLangExpression exprToCheck = expr;
if (this.nonErrorLoggingCheck) {
expr.cloneAttempt++;
exprToCheck = nodeCloner.cloneNode(expr);
}
return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError;
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) {
return checkExprList(exprs, env, symTable.noType);
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> types = new ArrayList<>();
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
this.env = env;
this.expType = expType;
for (BLangExpression e : exprs) {
checkExpr(e, this.env, expType);
types.add(resultType);
}
this.env = prevEnv;
this.expType = preExpType;
return types;
}
private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) {
List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
BTupleType tupleType = new BTupleType(memTypes);
if (expType.tag != TypeTags.READONLY) {
return tupleType;
}
tupleType.flags |= Flags.READONLY;
return tupleType;
}
public void visit(BLangRecordLiteral recordLiteral) {
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) {
expType = defineInferredRecordType(recordLiteral, expType);
} else if (expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
resultType = getEffectiveMappingType(recordLiteral,
checkMappingConstructorCompatibility(expType, recordLiteral));
}
private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) {
if (applicableMappingType == symTable.semanticError ||
(applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags,
Flags.READONLY))) {
return applicableMappingType;
}
Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>();
LinkedHashMap<String, BField> applicableTypeFields =
applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields :
new LinkedHashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
continue;
}
String name;
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field;
if (!keyValueField.readonly) {
continue;
}
BLangExpression keyExpr = keyValueField.key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
name = ((BLangSimpleVarRef) keyExpr).variableName.value;
} else {
name = (String) ((BLangLiteral) keyExpr).value;
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
if (!varNameField.readonly) {
continue;
}
name = varNameField.variableName.value;
}
if (applicableTypeFields.containsKey(name) &&
Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) {
continue;
}
readOnlyFields.put(name, field);
}
if (readOnlyFields.isEmpty()) {
return applicableMappingType;
}
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
LinkedHashMap<String, BField> newFields = new LinkedHashMap<>();
for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) {
RecordLiteralNode.RecordField field = readOnlyEntry.getValue();
String key = readOnlyEntry.getKey();
Name fieldName = names.fromString(key);
BType readOnlyFieldType;
if (field.isKeyValueField()) {
readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType();
} else {
readOnlyFieldType = ((BLangRecordVarNameField) field).getBType();
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(Flag.REQUIRED);
add(Flag.READONLY);
}}), fieldName, pkgID, readOnlyFieldType, recordSymbol,
((BLangNode) field).pos, VIRTUAL);
newFields.put(key, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags);
if (applicableMappingType.tag == TypeTags.MAP) {
recordType.sealed = false;
recordType.restFieldType = ((BMapType) applicableMappingType).constraint;
} else {
BRecordType applicableRecordType = (BRecordType) applicableMappingType;
boolean allReadOnlyFields = true;
for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) {
String fieldName = origEntry.getKey();
BField field = origEntry.getValue();
if (readOnlyFields.containsKey(fieldName)) {
continue;
}
BVarSymbol origFieldSymbol = field.symbol;
long origFieldFlags = origFieldSymbol.flags;
if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) {
allReadOnlyFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID,
origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL);
newFields.put(fieldName, new BField(field.name, null, fieldSymbol));
recordSymbol.scope.define(field.name, fieldSymbol);
}
recordType.sealed = applicableRecordType.sealed;
recordType.restFieldType = applicableRecordType.restFieldType;
if (recordType.sealed && allReadOnlyFields) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
recordType.fields = newFields;
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
if (applicableMappingType.tag == TypeTags.MAP) {
recordLiteral.expectedType = applicableMappingType;
}
return recordType;
}
private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType,
mappingConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
if (!erroredExpType) {
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
}
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor);
}
BType possibleType = getMappingConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.MAP:
return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType :
symTable.semanticError;
case TypeTags.RECORD:
boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType);
boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType,
mappingConstructor.fields,
mappingConstructor.pos);
return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError;
case TypeTags.READONLY:
return checkReadOnlyMappingType(mappingConstructor);
}
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(inferredType, mappingConstructor);
}
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BLangExpression exprToCheck;
if (field.isKeyValueField()) {
exprToCheck = ((BLangRecordKeyValueField) field).valueExpr;
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
} else {
exprToCheck = (BLangRecordVarNameField) field;
}
if (exprIncompatible(symTable.readonlyType, exprToCheck)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private BType getMappingConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.MAP:
case TypeTags.RECORD:
case TypeTags.READONLY:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env,
symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
}
return symTable.semanticError;
}
private boolean isMappingConstructorCompatibleType(BType type) {
return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP;
}
private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) {
if (expType == symTable.semanticError) {
return;
}
if (expType.tag != TypeTags.UNION) {
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
return;
}
BUnionType unionType = (BUnionType) expType;
BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
if (memberTypes.length == 2) {
BRecordType recType = null;
if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[0];
} else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[1];
}
if (recType != null) {
validateSpecifiedFields(mappingConstructorExpr, recType);
validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos);
return;
}
}
for (BType bType : memberTypes) {
if (isMappingConstructorCompatibleType(bType)) {
dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR,
unionType);
return;
}
}
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType);
}
private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) {
boolean isFieldsValid = true;
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BType checkedType = checkMappingField(field, possibleType);
if (isFieldsValid && checkedType == symTable.semanticError) {
isFieldsValid = false;
}
}
return isFieldsValid;
}
private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields,
Location pos) {
HashSet<String> specFieldNames = getFieldNames(specifiedFields);
boolean hasAllRequiredFields = true;
for (BField field : type.fields.values()) {
String fieldName = field.name.value;
if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)
&& !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
if (hasAllRequiredFields) {
hasAllRequiredFields = false;
}
}
}
return hasAllRequiredFields;
}
private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) {
HashSet<String> fieldNames = new HashSet<>();
for (RecordLiteralNode.RecordField specifiedField : specifiedFields) {
if (specifiedField.isKeyValueField()) {
String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField);
if (name == null) {
continue;
}
fieldNames.add(name);
} else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField));
} else {
fieldNames.addAll(getSpreadOpFieldRequiredFieldNames(
(BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField));
}
}
return fieldNames;
}
private String getKeyValueFieldName(BLangRecordKeyValueField field) {
BLangRecordKey key = field.key;
if (key.computedKey) {
return null;
}
BLangExpression keyExpr = key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyExpr).variableName.value;
} else if (keyExpr.getKind() == NodeKind.LITERAL) {
return (String) ((BLangLiteral) keyExpr).value;
}
return null;
}
private String getVarNameFieldName(BLangRecordVarNameField field) {
return field.variableName.value;
}
private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) {
BType spreadType = checkExpr(field.expr, env);
if (spreadType.tag != TypeTags.RECORD) {
return Collections.emptyList();
}
List<String> fieldNames = new ArrayList<>();
for (BField bField : ((BRecordType) spreadType).getFields().values()) {
if (!Symbols.isOptional(bField.symbol)) {
fieldNames.add(bField.name.value);
}
}
return fieldNames;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerIdentifier != null) {
String workerName = workerFlushExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
} else {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName));
if (symbol != symTable.notFoundSymbol) {
workerFlushExpr.workerSymbol = symbol;
}
}
}
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(workerFlushExpr, actualType, expType);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier));
if (symTable.notFoundSymbol.equals(symbol)) {
syncSendExpr.workerType = symTable.semanticError;
} else {
syncSendExpr.workerType = symbol.type;
syncSendExpr.workerSymbol = symbol;
}
syncSendExpr.env = this.env;
checkExpr(syncSendExpr.expr, this.env);
if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND,
syncSendExpr.expr.getBType());
}
String workerName = syncSendExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
}
syncSendExpr.expectedType = expType;
resultType = expType == symTable.noType ? symTable.nilType : expType;
}
@Override
public void visit(BLangWorkerReceive workerReceiveExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier));
workerReceiveExpr.env = this.env;
if (symTable.notFoundSymbol.equals(symbol)) {
workerReceiveExpr.workerType = symTable.semanticError;
} else {
workerReceiveExpr.workerType = symbol.type;
workerReceiveExpr.workerSymbol = symbol;
}
if (symTable.noType == this.expType) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION);
}
workerReceiveExpr.setBType(this.expType);
resultType = this.expType;
}
private boolean workerExists(SymbolEnv env, String workerName) {
if (workerName.equals(DEFAULT_WORKER_NAME)) {
return true;
}
BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName));
return symbol != this.symTable.notFoundSymbol &&
symbol.type.tag == TypeTags.FUTURE &&
((BFutureType) symbol.type).workerDerivative;
}
@Override
public void visit(BLangConstRef constRef) {
constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env,
names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName));
types.setImplicitCastExpr(constRef, constRef.getBType(), expType);
resultType = constRef.getBType();
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.semanticError;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
if (varRefExpr.isLValue) {
varRefExpr.setBType(this.symTable.anyType);
} else {
varRefExpr.setBType(this.symTable.semanticError);
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDERSCORE_NOT_ALLOWED);
}
varRefExpr.symbol = new BVarSymbol(0, true, varName,
names.originalNameFromIdNode(varRefExpr.variableName),
env.enclPkg.symbol.pkgID, varRefExpr.getBType(), env.scope.owner,
varRefExpr.pos, VIRTUAL);
resultType = varRefExpr.getBType();
return;
}
Name compUnitName = getCurrentCompUnit(varRefExpr);
varRefExpr.pkgSymbol =
symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName);
if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) {
varRefExpr.symbol = symTable.notFoundSymbol;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias);
}
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName);
if (symbol == symTable.notFoundSymbol && env.enclType != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclType.getBType().tsymbol);
}
if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSelfReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
markAndRegisterClosureVariable(symbol, varRefExpr.pos, env);
} else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) {
actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null);
varRefExpr.symbol = symbol;
} else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) symbol;
varRefExpr.symbol = constSymbol;
BType symbolType = symbol.type;
if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE ||
(expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream()
.anyMatch(memType -> memType.tag == TypeTags.FINITE &&
types.isAssignable(symbolType, memType)))) {
actualType = symbolType;
} else {
actualType = constSymbol.literalType;
}
if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) {
actualType = symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE);
}
} else {
varRefExpr.symbol = symbol;
logUndefinedSymbolError(varRefExpr.pos, varName.value);
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName),
env.enclPkg.symbol.pkgID, null, env.scope.owner,
varRefExpr.pos, SOURCE);
symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env);
boolean unresolvedReference = false;
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference;
bLangVarReference.isLValue = true;
checkExpr(recordRefField.variableReference, env);
if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol ||
!isValidVariableReference(recordRefField.variableReference)) {
unresolvedReference = true;
continue;
}
BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol;
BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos,
new BVarSymbol(0, names.fromIdNode(recordRefField.variableName),
names.originalNameFromIdNode(recordRefField.variableName),
env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol,
varRefExpr.pos, SOURCE));
fields.put(field.name.value, field);
}
BLangExpression restParam = (BLangExpression) varRefExpr.restParam;
if (restParam != null) {
checkExpr(restParam, env);
unresolvedReference = !isValidVariableReference(restParam);
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BRecordType bRecordType = new BRecordType(recordSymbol);
bRecordType.fields = fields;
recordSymbol.type = bRecordType;
varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, recordSymbol.getOriginalName(),
env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos,
SOURCE);
if (restParam == null) {
bRecordType.sealed = true;
bRecordType.restFieldType = symTable.noType;
} else if (restParam.getBType() == symTable.semanticError) {
bRecordType.restFieldType = symTable.mapType;
} else {
BType restFieldType;
if (restParam.getBType().tag == TypeTags.RECORD) {
restFieldType = ((BRecordType) restParam.getBType()).restFieldType;
} else if (restParam.getBType().tag == TypeTags.MAP) {
restFieldType = ((BMapType) restParam.getBType()).constraint;
} else {
restFieldType = restParam.getBType();
}
bRecordType.restFieldType = restFieldType;
}
resultType = bRecordType;
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
if (varRefExpr.typeNode != null) {
BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env);
varRefExpr.setBType(bType);
checkIndirectErrorVarRef(varRefExpr);
resultType = bType;
return;
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) {
dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType,
varRefExpr.message.getBType());
}
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) {
dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType,
varRefExpr.cause.getBType());
}
}
boolean unresolvedReference = false;
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
BLangVariableReference refItem = (BLangVariableReference) detailItem.expr;
refItem.isLValue = true;
checkExpr(refItem, env);
if (!isValidVariableReference(refItem)) {
unresolvedReference = true;
continue;
}
if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN,
refItem);
unresolvedReference = true;
continue;
}
if (refItem.symbol == null) {
unresolvedReference = true;
}
}
if (varRefExpr.restVar != null) {
varRefExpr.restVar.isLValue = true;
if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
checkExpr(varRefExpr.restVar, env);
unresolvedReference = unresolvedReference
|| varRefExpr.restVar.symbol == null
|| !isValidVariableReference(varRefExpr.restVar);
}
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BType errorRefRestFieldType;
if (varRefExpr.restVar == null) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
errorRefRestFieldType = varRefExpr.restVar.getBType();
} else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) {
errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint;
} else {
dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varRefExpr.restVar.getBType(), symTable.detailType);
resultType = symTable.semanticError;
return;
}
BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly
? symTable.errorType.detailType
: new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC);
resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType);
}
private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) {
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
checkExpr(detailItem.expr, env);
checkExpr(detailItem, env, detailItem.expr.getBType());
}
if (varRefExpr.restVar != null) {
checkExpr(varRefExpr.restVar, env);
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
}
}
@Override
public void visit(BLangTupleVarRef varRefExpr) {
List<BType> results = new ArrayList<>();
for (int i = 0; i < varRefExpr.expressions.size(); i++) {
((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true;
results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType));
}
BTupleType actualType = new BTupleType(results);
if (varRefExpr.restParam != null) {
BLangExpression restExpr = (BLangExpression) varRefExpr.restParam;
((BLangVariableReference) restExpr).isLValue = true;
BType checkedType = checkExpr(restExpr, env, symTable.noType);
if (!(checkedType.tag == TypeTags.ARRAY || checkedType.tag == TypeTags.TUPLE)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType);
resultType = symTable.semanticError;
return;
}
if (checkedType.tag == TypeTags.ARRAY) {
actualType.restType = ((BArrayType) checkedType).eType;
} else {
actualType.restType = checkedType;
}
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.INFERRED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {
return env.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {
return env.enclEnv;
}
if (env.enclType != null && env.enclType == recordTypeNode) {
return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.getBType().tag == symbol.type.tag));
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
checkFieldBasedAccess(nsPrefixedFieldBasedAccess, true);
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
checkFieldBasedAccess(fieldAccessExpr, false);
}
private void checkFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr, boolean isNsPrefixed) {
markLeafNode(fieldAccessExpr);
BLangExpression containerExpression = fieldAccessExpr.expr;
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
fieldAccessExpr.isCompoundAssignmentLValue;
}
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, env));
if (isNsPrefixed && !isXmlAccess(fieldAccessExpr)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (fieldAccessExpr.optionalFieldAccess) {
if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS);
resultType = symTable.semanticError;
return;
}
actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType,
names.fromIdNode(fieldAccessExpr.field));
} else {
actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field));
if (actualType != symTable.semanticError &&
(fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(varRefType)) {
if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
varRefType);
resultType = symTable.semanticError;
return;
}
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) &&
isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
fieldAccessExpr.field.value, varRefType);
resultType = symTable.semanticError;
return;
}
}
}
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
private boolean isAllReadonlyTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Symbols.isFlagOn(type.flags, Flags.READONLY);
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isAllReadonlyTypes(memberType)) {
return false;
}
}
return true;
}
private boolean isInitializationInInit(BType type) {
BObjectType objectType = (BObjectType) type;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
return env.enclInvokable != null && initializerFunc != null &&
env.enclInvokable.symbol == initializerFunc.symbol;
}
private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) {
if (type.tag == TypeTags.RECORD) {
if (Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (!field.name.value.equals(fieldName)) {
continue;
}
return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY);
}
return recordType.sealed;
}
boolean allInvalidUpdates = true;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) {
allInvalidUpdates = false;
}
}
return allInvalidUpdates;
}
private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangExpression expr = fieldAccessExpr.expr;
BType exprType = expr.getBType();
if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) {
return true;
}
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr)
&& exprType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes();
return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType);
}
return false;
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
markLeafNode(indexBasedAccessExpr);
BLangExpression containerExpression = indexBasedAccessExpr.expr;
if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
((BLangTypedescExpr) containerExpression).typeNode);
resultType = symTable.semanticError;
return;
}
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
indexBasedAccessExpr.isCompoundAssignmentLValue;
}
boolean isStringValue = containerExpression.getBType() != null
&& containerExpression.getBType().tag == TypeTags.STRING;
if (!isStringValue) {
checkExpr(containerExpression, this.env, symTable.noType);
}
if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY &&
containerExpression.getBType().tag != TypeTags.TABLE) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED,
containerExpression.getBType());
resultType = symTable.semanticError;
return;
}
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr);
BType exprType = containerExpression.getBType();
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
if (actualType != symTable.semanticError &&
(indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(exprType)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
exprType);
resultType = symTable.semanticError;
return;
} else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) &&
(indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) &&
isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
getConstFieldName(indexExpr), exprType);
resultType = symTable.semanticError;
return;
}
}
if (indexBasedAccessExpr.isLValue) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.setBType(actualType);
resultType = actualType;
return;
}
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
if (invalidModuleAliasUsage(iExpr)) {
return;
}
checkExpr(iExpr.expr, this.env, symTable.noType);
BType varRefType = iExpr.expr.getBType();
switch (varRefType.tag) {
case TypeTags.OBJECT:
checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(iExpr, this.env);
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.SEMANTIC_ERROR:
break;
default:
checkInLangLib(iExpr, varRefType);
}
}
private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.isEmpty()) {
return;
}
checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType);
int positionalArgCount = errorConstructorExpr.positionalArgs.size();
if (positionalArgCount > 1) {
checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType);
}
}
private BType checkExprSilent(BLangExpression expr, BType expType, SymbolEnv env) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
BType type = checkExpr(expr, env, expType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return type;
}
private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) {
BLangRecordKeyValueField field =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
field.valueExpr = (BLangExpression) namedArg.getExpression();
BLangLiteral expr = new BLangLiteral();
expr.value = namedArg.getName().value;
expr.setBType(symTable.stringType);
field.key = new BLangRecordKey(expr);
recordLiteral.fields.add(field);
}
return recordLiteral;
}
private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef;
if (errorTypeRef == null) {
if (expType.tag == TypeTags.ERROR) {
return List.of(expType);
} else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) {
return expandExpectedErrorTypes(expType);
}
} else {
if (errorTypeRef.getBType().tag != TypeTags.ERROR) {
if (errorTypeRef.getBType().tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef);
}
} else {
return List.of(errorTypeRef.getBType());
}
}
return List.of(symTable.errorType);
}
private List<BType> expandExpectedErrorTypes(BType candidateType) {
List<BType> expandedCandidates = new ArrayList<>();
if (candidateType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) candidateType).getMemberTypes()) {
if (types.isAssignable(memberType, symTable.errorType)) {
if (memberType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) memberType).effectiveType);
} else {
expandedCandidates.add(memberType);
}
}
}
} else if (types.isAssignable(candidateType, symTable.errorType)) {
if (candidateType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) candidateType).effectiveType);
} else {
expandedCandidates.add(candidateType);
}
}
return expandedCandidates;
}
public void visit(BLangInvocation.BLangActionInvocation aInv) {
if (aInv.expr == null) {
checkFunctionInvocationExpr(aInv);
return;
}
if (invalidModuleAliasUsage(aInv)) {
return;
}
checkExpr(aInv.expr, this.env, symTable.noType);
BLangExpression varRef = aInv.expr;
switch (varRef.getBType().tag) {
case TypeTags.OBJECT:
checkActionInvocation(aInv, (BObjectType) varRef.getBType());
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(aInv, this.env);
break;
case TypeTags.NONE:
dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name);
resultType = symTable.semanticError;
break;
case TypeTags.SEMANTIC_ERROR:
default:
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType());
resultType = symTable.semanticError;
break;
}
}
private boolean invalidModuleAliasUsage(BLangInvocation invocation) {
Name pkgAlias = names.fromIdNode(invocation.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return true;
}
return false;
}
public void visit(BLangLetExpression letExpression) {
BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())),
new Name(String.format("$let_symbol_%d$", letCount++)),
env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner,
letExpression.pos);
letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env);
}
BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType);
types.checkType(letExpression, exprType, this.expType);
}
private void checkInLangLib(BLangInvocation iExpr, BType varRefType) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value,
iExpr.expr.getBType());
resultType = symTable.semanticError;
return;
}
if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) {
return;
}
checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType);
}
private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType,
BSymbol langLibMethodSymbol) {
if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) {
return false;
}
String packageId = langLibMethodSymbol.pkgID.name.value;
if (!modifierFunctions.containsKey(packageId)) {
return false;
}
String funcName = langLibMethodSymbol.name.value;
if (!modifierFunctions.get(packageId).contains(funcName)) {
return false;
}
if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) {
return false;
}
if (funcName.equals("strip") && TypeTags.isXMLTypeTag(varRefType.tag)) {
return false;
}
dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType);
resultType = symTable.semanticError;
return true;
}
private boolean isFixedLengthList(BType type) {
switch(type.tag) {
case TypeTags.ARRAY:
return (((BArrayType) type).state != BArrayState.OPEN);
case TypeTags.TUPLE:
return (((BTupleType) type).restType == null);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
for (BType member : unionType.getMemberTypes()) {
if (!isFixedLengthList(member)) {
return false;
}
}
return true;
default:
return false;
}
}
private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) {
String invocationName = iExpr.name.getValue();
if (!listLengthModifierFunctions.contains(invocationName)) {
return;
}
if (isFixedLengthList(varRefType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
if (isShiftOnIncompatibleTuples(varRefType, invocationName)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
}
private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) {
if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) &&
hasDifferentTypeThanRest((BTupleType) varRefType)) {
return true;
}
if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) {
BUnionType unionVarRef = (BUnionType) varRefType;
boolean allMemberAreFixedShapeTuples = true;
for (BType member : unionVarRef.getMemberTypes()) {
if (member.tag != TypeTags.TUPLE) {
allMemberAreFixedShapeTuples = false;
break;
}
if (!hasDifferentTypeThanRest((BTupleType) member)) {
allMemberAreFixedShapeTuples = false;
break;
}
}
return allMemberAreFixedShapeTuples;
}
return false;
}
private boolean hasDifferentTypeThanRest(BTupleType tupleType) {
if (tupleType.restType == null) {
return false;
}
for (BType member : tupleType.getTupleTypes()) {
if (!types.isSameType(tupleType.restType, member)) {
return true;
}
}
return false;
}
private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) {
BType type = checkExpr(iExpr.expr, env);
BLangIdentifier invocationIdentifier = iExpr.name;
if (type == symTable.semanticError) {
return false;
}
BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier),
type.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD,
invocationIdentifier, type);
return false;
}
if (fieldSymbol.kind != SymbolKind.FUNCTION) {
checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD,
fieldSymbol.type);
return false;
}
iExpr.symbol = fieldSymbol;
iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType);
checkInvocationParamAndReturnType(iExpr);
iExpr.functionPointerInvocation = true;
return true;
}
private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos,
DiagnosticErrorCode errCode, Object... diagMsgArgs) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(pos, errCode, diagMsgArgs);
resultType = symTable.semanticError;
} else {
checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol);
}
}
@Override
public void visit(BLangObjectConstructorExpression objectCtorExpression) {
if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) {
BObjectType objectType = (BObjectType) objectCtorExpression.classNode.getBType();
if (objectCtorExpression.expectedType.tag == TypeTags.OBJECT) {
BObjectType expObjType = (BObjectType) objectCtorExpression.expectedType;
objectType.typeIdSet = expObjType.typeIdSet;
} else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) {
if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) {
dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR,
objectCtorExpression.expectedType);
resultType = symTable.semanticError;
return;
}
}
}
visit(objectCtorExpression.typeInit);
}
private boolean isDefiniteObjectType(BType type, Set<BTypeIdSet> typeIdSets) {
if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) {
return false;
}
Set<BType> visitedTypes = new HashSet<>();
if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) {
return false;
}
return typeIdSets.size() <= 1;
}
private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) {
if (type.tag == TypeTags.OBJECT) {
var objectType = (BObjectType) type;
typeIdSets.add(objectType.typeIdSet);
return true;
}
if (type.tag == TypeTags.UNION) {
if (!visitedTypes.add(type)) {
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) {
return false;
}
}
return true;
}
return false;
}
private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) {
Set<BTypeIdSet> typeIdSets = new HashSet<>();
if (!isDefiniteObjectType(type, typeIdSets)) {
return false;
}
if (typeIdSets.isEmpty()) {
objectType.typeIdSet = BTypeIdSet.emptySet();
return true;
}
var typeIdIterator = typeIdSets.iterator();
if (typeIdIterator.hasNext()) {
BTypeIdSet typeIdSet = typeIdIterator.next();
objectType.typeIdSet = typeIdSet;
return true;
}
return true;
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
if (actualType.tag == TypeTags.INTERSECTION) {
actualType = ((BIntersectionType) actualType).effectiveType;
}
switch (actualType.tag) {
case TypeTags.OBJECT:
BObjectType actualObjectType = (BObjectType) actualType;
if (isObjectConstructorExpr(cIExpr, actualObjectType)) {
BLangClassDefinition classDefForConstructor = getClassDefinitionForObjectConstructorExpr(cIExpr,
env);
List<BLangType> typeRefs = classDefForConstructor.typeRefs;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);
if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) {
handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv,
false);
} else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags,
Flags.READONLY)) {
handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv,
true);
} else {
analyzeObjectConstructor(classDefForConstructor, pkgEnv);
}
markConstructedObjectIsolatedness(actualObjectType);
}
if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
actualType.tsymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) {
return;
}
}
break;
case TypeTags.STREAM:
if (cIExpr.initInvocation.argExprs.size() > 1) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation);
resultType = symTable.semanticError;
return;
}
BStreamType actualStreamType = (BStreamType) actualType;
if (actualStreamType.completionType != null) {
BType completionType = actualStreamType.completionType;
if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString());
resultType = symTable.semanticError;
return;
}
}
if (!cIExpr.initInvocation.argExprs.isEmpty()) {
BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0);
BType constructType = checkExpr(iteratorExpr, env, symTable.noType);
BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType);
if (constructType.tag != TypeTags.OBJECT) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType,
BLangCompilerConstants.CLOSE_FUNC);
if (closeFunc != null) {
BType closeableIteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, closeableIteratorType)) {
dlog.error(iteratorExpr.pos,
DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
} else {
BType iteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, iteratorType)) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
}
BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType);
if (nextReturnType != null) {
types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
} else {
dlog.error(constructType.tsymbol.getPosition(),
DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType);
}
}
if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType,
actualType);
resultType = symTable.semanticError;
return;
}
resultType = actualType;
return;
case TypeTags.UNION:
List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType);
BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType);
cIExpr.initInvocation.setBType(symTable.nilType);
if (matchedType.tag == TypeTags.OBJECT) {
if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
actualType = matchedType;
break;
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) {
return;
}
}
}
types.checkType(cIExpr, matchedType, expType);
cIExpr.setBType(matchedType);
resultType = matchedType;
return;
default:
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.semanticError;
return;
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType());
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private BUnionType createNextReturnType(Location pos, BStreamType streamType) {
BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS);
recordType.restFieldType = symTable.noType;
recordType.sealed = true;
Name fieldName = Names.VALUE;
BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC,
fieldName, env.enclPkg.packageID,
streamType.constraint, env.scope.owner, pos, VIRTUAL));
field.type = streamType.constraint;
recordType.fields.put(field.name.value, field);
recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID,
recordType, env.scope.owner, pos, VIRTUAL);
recordType.tsymbol.scope = new Scope(env.scope.owner);
recordType.tsymbol.scope.define(fieldName, field.symbol);
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(recordType);
retTypeMembers.addAll(types.getAllTypes(streamType.completionType));
BUnionType unionType = BUnionType.create(null);
unionType.addAll(retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY,
env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL);
return unionType;
}
private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) {
if (!cIExpr.initInvocation.argExprs.isEmpty()
&& ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL,
cIExpr.initInvocation.name.value);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return false;
}
return true;
}
private BType getObjectConstructorReturnType(BType objType, BType initRetType) {
if (initRetType.tag == TypeTags.UNION) {
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(objType);
retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes());
retTypeMembers.remove(symTable.nilType);
BUnionType unionType = BUnionType.create(null, retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0,
Names.EMPTY, env.enclPkg.symbol.pkgID, unionType,
env.scope.owner, symTable.builtinPos, VIRTUAL);
return unionType;
} else if (initRetType.tag == TypeTags.NIL) {
return objType;
}
return symTable.semanticError;
}
private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) {
int objectCount = 0;
for (BType memberType : lhsUnionType.getMemberTypes()) {
int tag = memberType.tag;
if (tag == TypeTags.OBJECT) {
objectCount++;
continue;
}
if (tag != TypeTags.INTERSECTION) {
continue;
}
if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) {
objectCount++;
}
}
boolean containsSingleObject = objectCount == 1;
List<BType> matchingLhsMemberTypes = new ArrayList<>();
for (BType memberType : lhsUnionType.getMemberTypes()) {
if (memberType.tag != TypeTags.OBJECT) {
continue;
}
if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
lhsUnionType.tsymbol);
}
if (containsSingleObject) {
return Collections.singletonList(memberType);
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc;
if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) {
matchingLhsMemberTypes.add(memberType);
}
}
return matchingLhsMemberTypes;
}
private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) {
if (matchingLhsMembers.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
} else if (matchingLhsMembers.size() == 1) {
return matchingLhsMembers.get(0).tsymbol.type;
} else {
dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
}
}
private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) {
invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType));
if (function == null) {
return invocationArguments.isEmpty();
}
if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) {
return true;
}
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
List<BLangExpression> positionalArgs = new ArrayList<>();
for (BLangExpression argument : invocationArguments) {
if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) {
namedArgs.add((BLangNamedArgsExpression) argument);
} else {
positionalArgs.add(argument);
}
}
List<BVarSymbol> requiredParams = function.symbol.params.stream()
.filter(param -> !param.isDefaultable)
.collect(Collectors.toList());
if (requiredParams.size() > invocationArguments.size()) {
return false;
}
List<BVarSymbol> defaultableParams = function.symbol.params.stream()
.filter(param -> param.isDefaultable)
.collect(Collectors.toList());
int givenRequiredParamCount = 0;
for (int i = 0; i < positionalArgs.size(); i++) {
if (function.symbol.params.size() > i) {
givenRequiredParamCount++;
BVarSymbol functionParam = function.symbol.params.get(i);
if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
continue;
}
if (function.symbol.restParam != null) {
BType restParamType = ((BArrayType) function.symbol.restParam.type).eType;
if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) {
return false;
}
continue;
}
return false;
}
for (BLangNamedArgsExpression namedArg : namedArgs) {
boolean foundNamedArg = false;
List<BVarSymbol> params = function.symbol.params;
for (int i = givenRequiredParamCount; i < params.size(); i++) {
BVarSymbol functionParam = params.get(i);
if (!namedArg.name.value.equals(functionParam.name.value)) {
continue;
}
foundNamedArg = true;
BType namedArgExprType = checkExpr(namedArg.expr, env);
if (!types.isAssignable(functionParam.type, namedArgExprType)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
}
if (!foundNamedArg) {
return false;
}
}
return requiredParams.size() <= 0;
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
switch (expType.tag) {
case TypeTags.RECORD:
checkTypesForRecords(waitForAllExpr);
break;
case TypeTags.MAP:
checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint);
LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypesForMap.size() == 1) {
resultType = new BMapType(TypeTags.MAP,
memberTypesForMap.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap);
resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol);
break;
case TypeTags.NONE:
case TypeTags.ANY:
checkTypesForMap(waitForAllExpr, expType);
LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypes.size() == 1) {
resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintType = BUnionType.create(null, memberTypes);
resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol);
break;
default:
dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos));
resultType = symTable.semanticError;
break;
}
waitForAllExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType);
}
}
private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr,
Location pos) {
BRecordType retType = new BRecordType(null, Flags.ANONYMOUS);
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BLangIdentifier fieldName;
if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
fieldName = keyVal.key;
} else {
fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName;
}
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName));
BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type;
BField field = new BField(names.fromIdNode(keyVal.key), null,
new BVarSymbol(0, names.fromIdNode(keyVal.key),
names.originalNameFromIdNode(keyVal.key), env.enclPkg.packageID,
fieldType, null, keyVal.pos, VIRTUAL));
retType.fields.put(field.name.value, field);
}
retType.restFieldType = symTable.noType;
retType.sealed = true;
retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null,
pos, VIRTUAL);
return retType;
}
private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType();
if (bType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) bType).constraint);
} else {
memberTypes.add(bType);
}
}
return memberTypes;
}
private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs;
keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType));
}
private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs();
Map<String, BField> lhsFields = ((BRecordType) expType).fields;
if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitExpr, waitExpr.pos));
resultType = symTable.semanticError;
return;
}
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) {
String key = keyVal.key.value;
if (!lhsFields.containsKey(key)) {
if (((BRecordType) expType).sealed) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType);
resultType = symTable.semanticError;
} else {
BType restFieldType = ((BRecordType) expType).restFieldType;
checkWaitKeyValExpr(keyVal, restFieldType);
}
} else {
checkWaitKeyValExpr(keyVal, lhsFields.get(key).type);
}
}
checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos);
if (symTable.semanticError != resultType) {
resultType = expType;
}
}
private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs,
Location pos) {
type.fields.values().forEach(field -> {
boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) {
BLangExpression expr;
if (keyVal.keyExpr != null) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode
(((BLangSimpleVarRef) keyVal.keyExpr).variableName));
keyVal.keyExpr.setBType(symbol.type);
expr = keyVal.keyExpr;
} else {
expr = keyVal.valueExpr;
}
BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null);
checkExpr(expr, env, futureType);
setEventualTypeForExpression(expr, type);
}
private void setEventualTypeForExpression(BLangExpression expression,
BType currentExpectedType) {
if (expression == null) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BFutureType futureType = (BFutureType) expression.expectedType;
BType currentType = futureType.constraint;
if (types.containsErrorType(currentType)) {
return;
}
BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType);
if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) &&
!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR,
currentExpectedType, eventualType, expression);
}
futureType.constraint = eventualType;
}
private void setEventualTypeForWaitExpression(BLangExpression expression,
Location pos) {
if ((resultType == symTable.semanticError) ||
(types.containsErrorType(resultType))) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) {
if ((resultType == symTable.semanticError) ||
(expression.getKind() != NodeKind.BINARY_EXPR) ||
(types.containsErrorType(resultType))) {
return;
}
if (types.containsErrorType(resultType)) {
return;
}
if (!isReferencingNonWorker((BLangBinaryExpr) expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private boolean isSimpleWorkerReference(BLangExpression expression) {
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression);
BSymbol varRefSymbol = simpleVarRef.symbol;
if (varRefSymbol == null) {
return false;
}
if (workerExists(env, simpleVarRef.variableName.value)) {
return true;
}
return false;
}
private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
if (isReferencingNonWorker(lhsExpr)) {
return true;
}
return isReferencingNonWorker(rhsExpr);
}
private boolean isReferencingNonWorker(BLangExpression expression) {
if (expression.getKind() == NodeKind.BINARY_EXPR) {
return isReferencingNonWorker((BLangBinaryExpr) expression);
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression;
BSymbol varRefSymbol = simpleVarRef.symbol;
String varRefSymbolName = varRefSymbol.getName().value;
if (workerExists(env, varRefSymbolName)) {
return false;
}
}
return true;
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env);
BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType);
SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env);
BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType);
if (condExprType == symTable.semanticError || thenType == symTable.semanticError ||
elseType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(elseType, thenType)) {
resultType = thenType;
} else if (types.isAssignable(thenType, elseType)) {
resultType = elseType;
} else {
dlog.error(ternaryExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, thenType, elseType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
public void visit(BLangWaitExpr waitExpr) {
expType = new BFutureType(TypeTags.FUTURE, expType, null);
checkExpr(waitExpr.getExpression(), env, expType);
if (resultType.tag == TypeTags.UNION) {
LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>());
if (memberTypes.size() == 1) {
resultType = memberTypes.toArray(new BType[0])[0];
} else {
resultType = BUnionType.create(null, memberTypes);
}
} else if (resultType != symTable.semanticError) {
resultType = ((BFutureType) resultType).constraint;
}
BLangExpression waitFutureExpression = waitExpr.getExpression();
if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) {
setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos);
} else {
setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos);
}
waitExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint);
}
}
private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) {
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) memberType).constraint);
} else {
memberTypes.add(memberType);
}
}
return memberTypes;
}
@Override
public void visit(BLangTrapExpr trapExpr) {
boolean firstVisit = trapExpr.expr.getBType() == null;
BType actualType;
BType exprType = checkExpr(trapExpr.expr, env, expType);
boolean definedWithVar = expType == symTable.noType;
if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = trapExpr.getBType();
exprType = trapExpr.expr.getBType();
}
}
if (expType == symTable.semanticError || exprType == symTable.semanticError) {
actualType = symTable.semanticError;
} else {
LinkedHashSet<BType> resultTypes = new LinkedHashSet<>();
if (exprType.tag == TypeTags.UNION) {
resultTypes.addAll(((BUnionType) exprType).getMemberTypes());
} else {
resultTypes.add(exprType);
}
resultTypes.add(symTable.errorType);
actualType = BUnionType.create(null, resultTypes);
}
resultType = types.checkType(trapExpr, actualType, expType);
if (definedWithVar && resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType);
}
}
public void visit(BLangBinaryExpr binaryExpr) {
if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType);
BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType);
if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
resultType = BUnionType.create(null, lhsResultType, rhsResultType);
return;
}
checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr);
SymbolEnv rhsExprEnv;
BType lhsType;
if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(binaryExpr.expectedType)) {
lhsType = checkAndGetType(binaryExpr.lhsExpr, env, binaryExpr);
} else {
lhsType = checkExpr(binaryExpr.lhsExpr, env);
}
if (binaryExpr.opKind == OperatorKind.AND) {
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true);
} else if (binaryExpr.opKind == OperatorKind.OR) {
rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env);
} else {
rhsExprEnv = env;
}
BType rhsType;
if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(binaryExpr.expectedType)) {
rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr);
} else {
rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv);
}
BType actualType = symTable.semanticError;
switch (binaryExpr.opKind) {
case ADD:
BType leftConstituent = getXMLConstituents(lhsType);
BType rightConstituent = getXMLConstituents(rhsType);
if (leftConstituent != null && rightConstituent != null) {
actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null);
break;
}
default:
if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryBitwiseOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType,
binaryExpr, env);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind,
lhsType, rhsType);
} else {
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private boolean isOptionalFloatOrDecimal(BType expectedType) {
if (expectedType.tag == TypeTags.UNION && expectedType.isNullable() && expectedType.tag != TypeTags.ANY) {
Iterator<BType> memberTypeIterator = ((BUnionType) expectedType).getMemberTypes().iterator();
while (memberTypeIterator.hasNext()) {
BType memberType = memberTypeIterator.next();
if (memberType.tag == TypeTags.FLOAT || memberType.tag == TypeTags.DECIMAL) {
return true;
}
}
}
return false;
}
private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
expr.cloneAttempt++;
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (errorCount == 0 && exprCompatibleType != symTable.semanticError) {
return checkExpr(expr, env, binaryExpr.expectedType);
} else {
return checkExpr(expr, env);
}
}
private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) {
while (env != null && env.node != node) {
env = env.enclEnv;
}
return env != null && env.enclEnv != null
? env.enclEnv.createClone()
: new SymbolEnv(node, null);
}
private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) {
SymbolEnv clone = env.createClone();
while (clone != null && clone.node != node) {
clone = clone.enclEnv;
}
if (clone != null) {
clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv));
} else {
clone = new SymbolEnv(node, null);
}
return clone;
}
private BLangNode getLastInputNodeFromEnv(SymbolEnv env) {
while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) {
env = env.enclEnv;
}
return env != null ? env.node : null;
}
public void visit(BLangTransactionalExpr transactionalExpr) {
resultType = types.checkType(transactionalExpr, symTable.booleanType, expType);
}
public void visit(BLangCommitExpr commitExpr) {
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(commitExpr, actualType, expType);
}
private BType getXMLConstituents(BType type) {
BType constituent = null;
if (type.tag == TypeTags.XML) {
constituent = ((BXMLType) type).constraint;
} else if (TypeTags.isXMLNonSequenceType(type.tag)) {
constituent = type;
}
return constituent;
}
private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) {
if (expType.tag != TypeTags.DECIMAL) {
return;
}
switch (binaryExpr.opKind) {
case ADD:
case SUB:
case MUL:
case DIV:
checkExpr(binaryExpr.lhsExpr, env, expType);
checkExpr(binaryExpr.rhsExpr, env, expType);
break;
default:
break;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream()
.filter(type -> type.tag != TypeTags.NIL)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (memberTypes.size() == 1) {
actualType = memberTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, memberTypes);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS,
lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isSameType(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else {
dlog.error(elvisExpr.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsReturnType,
rhsReturnType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangGroupExpr groupExpr) {
resultType = checkExpr(groupExpr.expression, env, expType);
}
public void visit(BLangTypedescExpr accessExpr) {
if (accessExpr.resolvedType == null) {
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
}
int resolveTypeTag = accessExpr.resolvedType.tag;
final BType actualType;
if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) {
actualType = new BTypedescType(accessExpr.resolvedType, null);
} else {
actualType = accessExpr.resolvedType;
}
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.semanticError;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = exprType;
}
} else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = new BTypedescType(exprType, null);
}
} else {
boolean decimalAddNegate = expType.tag == TypeTags.DECIMAL &&
(OperatorKind.ADD.equals(unaryExpr.operator) || OperatorKind.SUB.equals(unaryExpr.operator));
exprType = decimalAddNegate ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.semanticError;
for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) {
annAttachment.attachPoints.add(AttachPoint.Point.TYPE);
semanticAnalyzer.analyzeNode(annAttachment, this.env);
}
BLangExpression expr = conversionExpr.expr;
if (conversionExpr.typeNode == null) {
if (!conversionExpr.annAttachments.isEmpty()) {
resultType = checkExpr(expr, env, this.expType);
}
return;
}
BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos,
symResolver.resolveTypeNode(conversionExpr.typeNode, env));
conversionExpr.targetType = targetType;
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, targetType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) {
checkExpr(expr, env, targetType);
} else {
checkExpr(expr, env, symTable.noType);
}
BType exprType = expr.getBType();
if (types.isTypeCastable(expr, exprType, targetType, this.env)) {
actualType = targetType;
} else if (exprType != symTable.semanticError && exprType != symTable.noType) {
dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType);
}
resultType = types.checkType(conversionExpr, actualType, this.expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType());
bLangLambdaFunction.capturedClosureEnv = env.createClone();
if (!this.nonErrorLoggingCheck) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BType expectedType = expType;
if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE)
.collect(Collectors.collectingAndThen(Collectors.toList(), list -> {
if (list.size() != 1) {
return null;
}
return list.get(0);
}
));
if (invokableType != null) {
expectedType = invokableType;
}
}
if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.semanticError;
return;
}
BInvokableType expectedInvocation = (BInvokableType) expectedType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType));
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.body.expr.getBType();
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (bLangXMLQName.prefix.value.isEmpty()) {
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix));
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
logUndefinedSymbolError(bLangXMLQName.pos, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) {
xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value,
(BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos);
}
if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) {
resultType = symTable.semanticError;
return;
}
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI;
}
private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix,
BPackageSymbol pkgSymbol, Location pos) {
BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env,
names.fromString(localname), SymTag.CONSTANT);
if (constSymbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname);
}
return null;
}
BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol;
if (constantSymbol.literalType.tag != TypeTags.STRING) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType);
return null;
}
String constVal = (String) constantSymbol.value.value;
int s = constVal.indexOf('{');
int e = constVal.lastIndexOf('}');
if (e > s + 1) {
pkgSymbol.isUsed = true;
String nsURI = constVal.substring(s + 1, e);
String local = constVal.substring(e);
return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos,
SOURCE);
}
dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname);
return null;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name;
checkExpr(name, xmlAttributeEnv, symTable.stringType);
if (name.prefix.value.isEmpty()) {
name.namespaceURI = null;
}
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
Set<String> usedPrefixes = new HashSet<>();
BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix;
if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) {
usedPrefixes.add(elemNamePrefix.value);
}
for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) {
if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) {
BLangXMLQuotedString value = attribute.value;
if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) {
dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION);
}
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix;
if (prefix != null && !prefix.value.isEmpty()) {
usedPrefixes.add(prefix.value);
}
}
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) {
if (usedPrefixes.contains(nsEntry.getKey().value)) {
bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue());
}
}
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType,
this.expType);
if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) {
markChildrenAsImmutable(bLangXMLElementLiteral);
}
}
private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) {
BLangXMLQName attrName = (BLangXMLQName) attribute.name;
return (attrName.prefix.value.isEmpty()
&& attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE))
|| attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE);
}
public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) {
if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
return symTable.xmlElementType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) {
return symTable.xmlTextType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) {
return symTable.xmlPIType;
}
return symTable.xmlCommentType;
}
public void muteErrorLog() {
this.nonErrorLoggingCheck = true;
this.dlog.mute();
}
public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) {
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
}
public BType getXMLSequenceType(BType xmlSubType) {
switch (xmlSubType.tag) {
case TypeTags.XML_ELEMENT:
return new BXMLType(symTable.xmlElementType, null);
case TypeTags.XML_COMMENT:
return new BXMLType(symTable.xmlCommentType, null);
case TypeTags.XML_PI:
return new BXMLType(symTable.xmlPIType, null);
default:
return symTable.xmlTextType;
}
}
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT
&& expType != symTable.noType) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
"XML Sequence");
resultType = symTable.semanticError;
return;
}
List<BType> xmlTypesInSequence = new ArrayList<>();
for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) {
resultType = checkExpr(expressionItem, env, expType);
if (!xmlTypesInSequence.contains(resultType)) {
xmlTypesInSequence.add(resultType);
}
}
if (expType.tag == TypeTags.XML || expType == symTable.noType) {
if (xmlTypesInSequence.size() == 1) {
resultType = getXMLSequenceType(xmlTypesInSequence.get(0));
return;
}
resultType = symTable.xmlType;
return;
}
if (expType.tag == TypeTags.XML_TEXT) {
resultType = symTable.xmlTextType;
return;
}
for (BType item : ((BUnionType) expType).getMemberTypes()) {
if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, symTable.xmlType);
resultType = symTable.semanticError;
return;
}
}
resultType = symTable.xmlType;
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments;
checkStringTemplateExprs(literalValues);
BLangExpression xmlExpression = literalValues.get(0);
if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL &&
((String) ((BLangLiteral) xmlExpression).value).isEmpty()) {
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType);
return;
}
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType,
this.expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
checkStringTemplateExprs(bLangXMLQuotedString.textFragments);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
dlog.error(xmlAttributeAccessExpr.pos,
DiagnosticErrorCode.DEPRECATED_XML_ATTRIBUTE_ACCESS);
resultType = symTable.semanticError;
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
checkStringTemplateExprs(stringTemplateLiteral.exprs);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType);
if (type == symTable.semanticError) {
resultType = type;
return;
}
BObjectType literalType = (BObjectType) type;
BType stringsType = literalType.fields.get("strings").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
BType insertionsType = literalType.fields.get("insertions").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
resultType = type;
}
private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) {
if (expType == symTable.noType || containsAnyType(expType)) {
return symTable.rawTemplateType;
}
BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos);
BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType,
DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE);
if (type == symTable.semanticError) {
return type;
}
if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type);
return symTable.semanticError;
}
BObjectType litObjType = (BObjectType) type;
BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol;
if (litObjType.fields.size() > 2) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType);
type = symTable.semanticError;
}
if (!objTSymbol.attachedFuncs.isEmpty()) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType);
type = symTable.semanticError;
}
return type;
}
private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType,
DiagnosticCode code, Location pos) {
BType listType = fieldType.tag != TypeTags.INTERSECTION ? fieldType :
((BIntersectionType) fieldType).effectiveType;
boolean errored = false;
if (listType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) listType;
if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) {
dlog.error(pos, code, arrayType.size, exprs.size());
return false;
}
for (BLangExpression expr : exprs) {
errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored;
}
} else if (listType.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) listType;
final int size = exprs.size();
final int requiredItems = tupleType.tupleTypes.size();
if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) {
dlog.error(pos, code, requiredItems, size);
return false;
}
int i;
List<BType> memberTypes = tupleType.tupleTypes;
for (i = 0; i < requiredItems; i++) {
errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored;
}
if (size > requiredItems) {
for (; i < size; i++) {
errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored;
}
}
} else {
throw new IllegalStateException("Expected a list type, but found: " + listType);
}
return errored;
}
private boolean containsAnyType(BType type) {
if (type == symTable.anyType) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().contains(symTable.anyType);
}
return false;
}
private BType getCompatibleRawTemplateType(BType expType, Location pos) {
if (expType.tag != TypeTags.UNION) {
return expType;
}
BUnionType unionType = (BUnionType) expType;
List<BType> compatibleTypes = new ArrayList<>();
for (BType type : unionType.getMemberTypes()) {
if (types.isAssignable(type, symTable.rawTemplateType)) {
compatibleTypes.add(type);
}
}
if (compatibleTypes.size() == 0) {
return expType;
}
if (compatibleTypes.size() > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType,
expType);
return symTable.semanticError;
}
return compatibleTypes.get(0);
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
checkExpr(intRangeExpression.startExpr, env, symTable.intType);
checkExpr(intRangeExpression.endExpr, env, symTable.intType);
resultType = new BArrayType(symTable.intType);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {
if (expType.tag != TypeTags.TYPEDESC) {
dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc);
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType());
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv));
});
LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.semanticError)) {
actualType = symTable.semanticError;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, matchExprTypes);
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
checkWithinQueryExpr = isWithinQuery();
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean cleanPrevEnvs = false;
if (prevEnvs.empty()) {
prevEnvs.push(env);
cleanPrevEnvs = true;
}
if (breakToParallelQueryEnv) {
queryEnvs.push(prevEnvs.peek());
} else {
queryEnvs.push(env);
}
queryFinalClauses.push(queryExpr.getSelectClause());
List<BLangNode> clauses = queryExpr.getQueryClauses();
BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection();
clauses.forEach(clause -> clause.accept(this));
BType actualType = resolveQueryType(queryEnvs.peek(), ((BLangSelectClause) queryFinalClauses.peek()).expression,
collectionNode.getBType(), expType, queryExpr);
actualType = (actualType == symTable.semanticError) ? actualType :
types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
if (cleanPrevEnvs) {
prevEnvs.pop();
}
if (actualType.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) actualType;
tableType.constraintPos = queryExpr.pos;
tableType.isTypeInlineDefined = true;
if (!validateTableType(tableType)) {
resultType = symTable.semanticError;
return;
}
}
checkWithinQueryExpr = false;
resultType = actualType;
}
private boolean isWithinQuery() {
return !queryEnvs.isEmpty() && !queryFinalClauses.isEmpty();
}
private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType,
BType targetType, BLangQueryExpr queryExpr) {
List<BType> resultTypes = types.getAllTypes(targetType).stream()
.filter(t -> !types.isAssignable(t, symTable.errorType))
.filter(t -> !types.isAssignable(t, symTable.nilType))
.collect(Collectors.toList());
if (resultTypes.isEmpty()) {
resultTypes.add(symTable.noType);
}
BType actualType = symTable.semanticError;
List<BType> selectTypes = new ArrayList<>();
List<BType> resolvedTypes = new ArrayList<>();
BType selectType, resolvedType;
for (BType type : resultTypes) {
switch (type.tag) {
case TypeTags.ARRAY:
selectType = checkExpr(selectExp, env, ((BArrayType) type).eType);
resolvedType = new BArrayType(selectType);
break;
case TypeTags.TABLE:
selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint,
true, true));
resolvedType = symTable.tableType;
break;
case TypeTags.STREAM:
selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint,
true, true));
resolvedType = symTable.streamType;
break;
case TypeTags.STRING:
case TypeTags.XML:
selectType = checkExpr(selectExp, env, type);
resolvedType = selectType;
break;
case TypeTags.NONE:
default:
selectType = checkExpr(selectExp, env, type);
resolvedType = getNonContextualQueryType(selectType, collectionType);
break;
}
if (selectType != symTable.semanticError) {
if (resolvedType.tag == TypeTags.STREAM) {
queryExpr.isStream = true;
}
if (resolvedType.tag == TypeTags.TABLE) {
queryExpr.isTable = true;
}
selectTypes.add(selectType);
resolvedTypes.add(resolvedType);
}
}
if (selectTypes.size() == 1) {
BType errorType = getErrorType(collectionType, queryExpr);
selectType = selectTypes.get(0);
if (queryExpr.isStream) {
return new BStreamType(TypeTags.STREAM, selectType, errorType, null);
} else if (queryExpr.isTable) {
actualType = getQueryTableType(queryExpr, selectType);
} else {
actualType = resolvedTypes.get(0);
}
if (errorType != null && errorType.tag != TypeTags.NIL) {
return BUnionType.create(null, actualType, errorType);
} else {
return actualType;
}
} else if (selectTypes.size() > 1) {
dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes);
return actualType;
} else {
return actualType;
}
}
private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) {
final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
if (!queryExpr.fieldNameIdentifierList.isEmpty()) {
validateKeySpecifier(queryExpr.fieldNameIdentifierList, constraintType);
markReadOnlyForConstraintType(constraintType);
tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream()
.map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList());
return BUnionType.create(null, tableType, symTable.errorType);
}
return tableType;
}
private void validateKeySpecifier(List<IdentifierNode> fieldList, BType constraintType) {
for (IdentifierNode identifier : fieldList) {
BField field = types.getTableConstraintField(constraintType, identifier.getValue());
if (field == null) {
dlog.error(identifier.getPosition(), DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER,
identifier.getValue(), constraintType);
} else if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
field.symbol.flags |= Flags.READONLY;
}
}
}
private void markReadOnlyForConstraintType(BType constraintType) {
if (constraintType.tag != TypeTags.RECORD) {
return;
}
BRecordType recordType = (BRecordType) constraintType;
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
return;
}
}
if (recordType.sealed) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr) {
if (collectionType.tag == TypeTags.SEMANTIC_ERROR) {
return null;
}
BType returnType = null, errorType = null;
switch (collectionType.tag) {
case TypeTags.STREAM:
errorType = ((BStreamType) collectionType).completionType;
break;
case TypeTags.OBJECT:
returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType);
break;
default:
BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
if (itrSymbol == this.symTable.notFoundSymbol) {
return null;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol;
returnType = types.getResultTypeOfNextInvocation((BObjectType) invokableSymbol.retType);
}
List<BType> errorTypes = new ArrayList<>();
if (returnType != null) {
types.getAllTypes(returnType).stream()
.filter(t -> types.isAssignable(t, symTable.errorType))
.forEach(errorTypes::add);
}
if (checkWithinQueryExpr && queryExpr.isStream) {
if (errorTypes.isEmpty()) {
errorTypes.add(symTable.nilType);
}
errorTypes.add(symTable.errorType);
}
if (!errorTypes.isEmpty()) {
if (errorTypes.size() == 1) {
errorType = errorTypes.get(0);
} else {
errorType = BUnionType.create(null, errorTypes.toArray(new BType[0]));
}
}
return errorType;
}
private BType getNonContextualQueryType(BType staticType, BType basicType) {
BType resultType;
switch (basicType.tag) {
case TypeTags.TABLE:
resultType = symTable.tableType;
break;
case TypeTags.STREAM:
resultType = symTable.streamType;
break;
case TypeTags.XML:
resultType = new BXMLType(staticType, null);
break;
case TypeTags.STRING:
resultType = symTable.stringType;
break;
default:
resultType = new BArrayType(staticType);
break;
}
return resultType;
}
@Override
public void visit(BLangQueryAction queryAction) {
if (prevEnvs.empty()) {
prevEnvs.push(env);
} else {
prevEnvs.push(prevEnvs.peek());
}
queryEnvs.push(prevEnvs.peek());
BLangDoClause doClause = queryAction.getDoClause();
queryFinalClauses.push(doClause);
List<BLangNode> clauses = queryAction.getQueryClauses();
clauses.forEach(clause -> clause.accept(this));
semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek()));
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
prevEnvs.pop();
}
@Override
public void visit(BLangFromClause fromClause) {
boolean prevBreakToParallelEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop());
fromClause.env = fromEnv;
queryEnvs.push(fromEnv);
checkExpr(fromClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(fromClause);
handleInputClauseVariables(fromClause, queryEnvs.peek());
this.breakToParallelQueryEnv = prevBreakToParallelEnv;
}
@Override
public void visit(BLangJoinClause joinClause) {
boolean prevBreakEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop());
joinClause.env = joinEnv;
queryEnvs.push(joinEnv);
checkExpr(joinClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(joinClause);
handleInputClauseVariables(joinClause, queryEnvs.peek());
if (joinClause.onClause != null) {
((BLangOnClause) joinClause.onClause).accept(this);
}
this.breakToParallelQueryEnv = prevBreakEnv;
}
@Override
public void visit(BLangLetClause letClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop());
letClause.env = letEnv;
queryEnvs.push(letEnv);
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv);
}
}
@Override
public void visit(BLangWhereClause whereClause) {
whereClause.env = handleFilterClauses(whereClause.expression);
}
@Override
public void visit(BLangSelectClause selectClause) {
SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop());
selectClause.env = selectEnv;
queryEnvs.push(selectEnv);
}
@Override
public void visit(BLangDoClause doClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop());
doClause.env = letEnv;
queryEnvs.push(letEnv);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType);
if (!types.isAssignable(exprType, symTable.errorType)) {
dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED,
symTable.errorType, exprType);
}
}
@Override
public void visit(BLangLimitClause limitClause) {
BType exprType = checkExpr(limitClause.expression, queryEnvs.peek());
if (!types.isAssignable(exprType, symTable.intType)) {
dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.intType, exprType);
}
}
@Override
public void visit(BLangOnClause onClause) {
BType lhsType, rhsType;
BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek());
onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode);
lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv);
onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode);
rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek());
if (!types.isAssignable(lhsType, rhsType)) {
dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType);
}
}
@Override
public void visit(BLangOrderByClause orderByClause) {
orderByClause.env = queryEnvs.peek();
for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) {
BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env);
if (!types.isOrderedType(exprType, false)) {
dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED);
}
}
}
@Override
public void visit(BLangDo doNode) {
if (doNode.onFailClause != null) {
doNode.onFailClause.accept(this);
}
}
public void visit(BLangOnFailClause onFailClause) {
onFailClause.body.stmts.forEach(stmt -> stmt.accept(this));
}
private SymbolEnv handleFilterClauses (BLangExpression filterExpression) {
checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType);
BType actualType = filterExpression.getBType();
if (TypeTags.TUPLE == actualType.tag) {
dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.booleanType, actualType);
}
SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, queryFinalClauses.peek(), queryEnvs.pop());
queryEnvs.push(filterEnv);
return filterEnv;
}
private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) {
if (bLangInputClause.variableDefinitionNode == null) {
return;
}
BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable();
if (bLangInputClause.isDeclaredWithVar) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv);
if (types.isAssignable(bLangInputClause.varType, typeNodeType)) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
if (typeNodeType != symTable.semanticError) {
dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
bLangInputClause.varType, typeNodeType);
}
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) {
String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic";
BLangExpression exprWithCheckingKeyword = checkedExpr.expr;
boolean firstVisit = exprWithCheckingKeyword.getBType() == null;
BType checkExprCandidateType;
if (expType == symTable.noType) {
checkExprCandidateType = symTable.noType;
} else {
BType exprType = getCandidateType(checkedExpr, expType);
if (exprType == symTable.semanticError) {
checkExprCandidateType = BUnionType.create(null, expType, symTable.errorType);
} else {
checkExprCandidateType = addDefaultErrorIfNoErrorComponentFound(expType);
}
}
if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(expType)) {
rewriteWithEnsureTypeFunc(checkedExpr, checkExprCandidateType);
}
BType exprType = checkExpr(checkedExpr.expr, env, checkExprCandidateType);
if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = checkedExpr.getBType();
exprType = checkedExpr.expr.getBType();
}
}
boolean isErrorType = types.isAssignable(exprType, symTable.errorType);
if (exprType.tag != TypeTags.UNION && !isErrorType) {
if (exprType.tag == TypeTags.READONLY) {
checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{
add(symTable.errorType);
}};
resultType = symTable.anyAndReadonly;
return;
} else if (exprType != symTable.semanticError) {
dlog.error(checkedExpr.expr.pos,
DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS,
operatorType);
}
checkedExpr.setBType(symTable.semanticError);
return;
}
List<BType> errorTypes = new ArrayList<>();
List<BType> nonErrorTypes = new ArrayList<>();
if (!isErrorType) {
for (BType memberType : ((BUnionType) exprType).getMemberTypes()) {
if (memberType.tag == TypeTags.READONLY) {
errorTypes.add(symTable.errorType);
nonErrorTypes.add(symTable.anyAndReadonly);
continue;
}
if (types.isAssignable(memberType, symTable.errorType)) {
errorTypes.add(memberType);
continue;
}
nonErrorTypes.add(memberType);
}
} else {
errorTypes.add(exprType);
}
checkedExpr.equivalentErrorTypeList = errorTypes;
if (errorTypes.isEmpty()) {
dlog.error(checkedExpr.expr.pos,
DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
checkedExpr.setBType(symTable.semanticError);
return;
}
BType actualType;
if (nonErrorTypes.size() == 0) {
actualType = symTable.neverType;
} else if (nonErrorTypes.size() == 1) {
actualType = nonErrorTypes.get(0);
} else {
actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes));
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type) {
BType rhsType = getCandidateType(checkedExpr, type);
if (rhsType == symTable.semanticError) {
rhsType = getCandidateType(checkedExpr, rhsType);
}
BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType);
if (!types.isLax(candidateLaxType)) {
return;
}
ArrayList<BLangExpression> argExprs = new ArrayList<>();
BType typedescType = new BTypedescType(expType, null);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = expType;
typedescExpr.setBType(typedescType);
argExprs.add(typedescExpr);
BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE,
argExprs, checkedExpr.expr, checkedExpr.pos);
invocation.symbol = symResolver.lookupLangLibMethod(type,
names.fromString(invocation.name.value));
invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
checkedExpr.expr = invocation;
}
private BType getCandidateLaxType(BLangNode expr, BType rhsType) {
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
return types.getSafeType(rhsType, false, true);
}
return rhsType;
}
private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
checkedExpr.expr.cloneAttempt++;
BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr);
BType rhsType;
if (checkExprCandidateType == symTable.semanticError) {
rhsType = checkExpr(clone, env);
} else {
rhsType = checkExpr(clone, env, checkExprCandidateType);
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return rhsType;
}
private BType addDefaultErrorIfNoErrorComponentFound(BType type) {
for (BType t : types.getAllTypes(type)) {
if (types.isAssignable(t, symTable.errorType)) {
return type;
}
}
return BUnionType.create(null, type, symTable.errorType);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
resultType = serviceConstructorExpr.serviceNode.symbol.type;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env));
checkExpr(typeTestExpr.expr, env);
resultType = types.checkType(typeTestExpr, symTable.booleanType, expType);
}
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc);
BType actualType = symTable.semanticError;
BSymbol symbol =
this.symResolver.resolveAnnotation(annotAccessExpr.pos, env,
names.fromString(annotAccessExpr.pkgAlias.getValue()),
names.fromString(annotAccessExpr.annotationName.getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION,
annotAccessExpr.annotationName.getValue());
} else {
annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol;
BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType :
((BAnnotationSymbol) symbol).attachedType.type;
actualType = BUnionType.create(null, annotType, symTable.nilType);
}
this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType);
}
private boolean isValidVariableReference(BLangExpression varRef) {
switch (varRef.getKind()) {
case SIMPLE_VARIABLE_REF:
case RECORD_VARIABLE_REF:
case TUPLE_VARIABLE_REF:
case ERROR_VARIABLE_REF:
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case XML_ATTRIBUTE_ACCESS_EXPR:
return true;
default:
dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType());
return false;
}
}
private BType getEffectiveReadOnlyType(Location pos, BType origTargetType) {
if (origTargetType == symTable.readonlyType) {
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
return ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType) expType,
env, symTable, anonymousModelHelper, names,
new HashSet<>());
}
if (origTargetType.tag != TypeTags.UNION) {
return origTargetType;
}
boolean hasReadOnlyType = false;
LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>();
for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) {
if (memberType == symTable.readonlyType) {
hasReadOnlyType = true;
continue;
}
nonReadOnlyTypes.add(memberType);
}
if (!hasReadOnlyType) {
return origTargetType;
}
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes);
nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType)
expType,
env, symTable, anonymousModelHelper,
names, new HashSet<>()));
return nonReadOnlyUnion;
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.semanticError;
bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError));
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
valueTypeNode.pos = symTable.builtinPos;
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.setBType(bType);
}
}
public void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.semanticError);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias);
} else {
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
funcSymbol = symbol;
}
if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) &&
(symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
funcSymbol = symbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) {
BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) {
if (!missingNodesHelper.isMissingNode(funcName)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName);
}
iExpr.argExprs.forEach(arg -> checkExpr(arg, env));
resultType = symTable.semanticError;
return;
}
if (isFunctionPointer(funcSymbol)) {
iExpr.functionPointerInvocation = true;
markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID);
if (langLibPackageID) {
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
if (langLibPackageID && !iExpr.argExprs.isEmpty()) {
checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol);
}
}
protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) {
BLangInvokableNode encInvokable = env.enclInvokable;
if (symbol.closure || (symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
env.node.getKind() != NodeKind.ARROW_EXPR && env.node.getKind() != NodeKind.EXPR_FUNCTION_BODY &&
encInvokable != null && !encInvokable.flagSet.contains(Flag.LAMBDA)) {
return;
}
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA)
&& !isFunctionArgument(symbol, encInvokable.requiredParams)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
if (env.node.getKind() == NodeKind.ARROW_EXPR
&& !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol) {
resolvedSymbol.closure = true;
((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && encInvokable != null &&
!encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
}
private boolean isNotFunction(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION
|| (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
return false;
}
if (isFunctionPointer(funcSymbol)) {
return false;
}
return true;
}
private boolean isFunctionPointer(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE
&& funcSymbol.kind == SymbolKind.FUNCTION
&& (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE;
}
private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr,
BType expectedType) {
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) {
BType target = checkErrCtrTargetTypeAndSetSymbol(namedArgsExpression, expectedType);
BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression);
BType type = checkExpr(clone, env, target);
if (type == symTable.semanticError) {
checkExpr(namedArgsExpression, env);
} else {
checkExpr(namedArgsExpression, env, target);
}
namedArgs.add(namedArgsExpression);
}
return namedArgs;
}
private BType checkErrCtrTargetTypeAndSetSymbol(BLangNamedArgsExpression namedArgsExpression, BType expectedType) {
if (expectedType == symTable.semanticError) {
return symTable.semanticError;
}
if (expectedType.tag == TypeTags.MAP) {
return ((BMapType) expectedType).constraint;
}
if (expectedType.tag != TypeTags.RECORD) {
return symTable.semanticError;
}
BRecordType recordType = (BRecordType) expectedType;
BField targetField = recordType.fields.get(namedArgsExpression.name.value);
if (targetField != null) {
namedArgsExpression.varSymbol = targetField.symbol;
return targetField.type;
}
if (!recordType.sealed && !recordType.fields.isEmpty()) {
dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name,
recordType);
}
return recordType.sealed ? symTable.noType : recordType.restFieldType;
}
private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) {
if (objectType.getKind() == TypeKind.SERVICE &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION);
return;
}
Name funcName =
names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol =
symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
iExpr.pos, env, names.fromIdNode(iExpr.name), (BObjectTypeSymbol) objectType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
funcSymbol = invocableField;
iExpr.functionPointerInvocation = true;
}
}
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value,
objectType);
resultType = symTable.semanticError;
return;
}
} else {
iExpr.symbol = funcSymbol;
}
if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
checkInvocationParamAndReturnType(iExpr);
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) {
if (checkInvalidActionInvocation(aInv)) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, aInv.expr.getBType());
this.resultType = symTable.semanticError;
aInv.symbol = symTable.notFoundSymbol;
return;
}
Name remoteMethodQName = names
.fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value));
Name actionName = names.fromIdNode(aInv.name);
BSymbol remoteFuncSymbol = symResolver
.resolveObjectMethod(aInv.pos, env, remoteMethodQName, (BObjectTypeSymbol) expType.tsymbol);
if (remoteFuncSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
aInv.pos, env, names.fromIdNode(aInv.name), (BObjectTypeSymbol) expType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
remoteFuncSymbol = invocableField;
aInv.functionPointerInvocation = true;
}
}
if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) {
dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType);
resultType = symTable.semanticError;
return;
}
if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName);
this.resultType = symTable.semanticError;
return;
}
if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) &&
Symbols.isFlagOn(expType.flags, Flags.CLIENT) &&
types.isNeverTypeOrStructureTypeWithARequiredNeverMember
((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL);
}
aInv.symbol = remoteFuncSymbol;
checkInvocationParamAndReturnType(aInv);
}
private boolean checkInvalidActionInvocation(BLangInvocation.BLangActionInvocation aInv) {
return aInv.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(((((BLangSimpleVarRef) aInv.expr).symbol.tag & SymTag.ENDPOINT) !=
SymTag.ENDPOINT) && !aInv.async);
}
private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) {
return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol;
}
private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(iExpr.name.value);
BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName);
if (funcSymbol == symTable.notFoundSymbol) {
return symTable.notFoundSymbol;
}
iExpr.symbol = funcSymbol;
iExpr.langLibInvocation = true;
SymbolEnv enclEnv = this.env;
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
iExpr.argExprs.add(0, iExpr.expr);
checkInvocationParamAndReturnType(iExpr);
this.env = enclEnv;
return funcSymbol;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams,
Set<String> requiredParamNames) {
if (openIncRecordParams.size() != 1) {
return null;
}
LinkedHashMap<String, BField> fields = ((BRecordType) openIncRecordParams.get(0).type).fields;
for (String paramName : requiredParamNames) {
if (!fields.containsKey(paramName)) {
return null;
}
}
return openIncRecordParams.get(0);
}
private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol,
List<BVarSymbol> incRecordParams) {
Set<String> requiredParamNames = new HashSet<>();
List<BVarSymbol> openIncRecordParams = new ArrayList<>();
for (BVarSymbol paramSymbol : invokableSymbol.params) {
if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) &&
paramSymbol.type.getKind() == TypeKind.RECORD) {
boolean recordWithDisallowFieldsOnly = true;
LinkedHashMap<String, BField> fields = ((BRecordType) paramSymbol.type).fields;
for (String fieldName : fields.keySet()) {
BField field = fields.get(fieldName);
if (field.symbol.type.tag != TypeTags.NEVER) {
recordWithDisallowFieldsOnly = false;
incRecordParams.add(field.symbol);
requiredParamNames.add(fieldName);
}
}
if (recordWithDisallowFieldsOnly && ((BRecordType) paramSymbol.type).restFieldType != symTable.noType) {
openIncRecordParams.add(paramSymbol);
}
} else {
requiredParamNames.add(paramSymbol.name.value);
}
}
return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);
return symTable.semanticError;
}
if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);
List<BType> paramTypes = ((BInvokableType) invokableSymbol.type).getParameterTypes();
List<BVarSymbol> incRecordParams = new ArrayList<>();
BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,
incRecordParams);
int parameterCountForPositionalArgs = paramTypes.size();
int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();
iExpr.requiredArgs = new ArrayList<>();
for (BVarSymbol symbol : invokableSymbol.params) {
if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||
symbol.type.tag != TypeTags.RECORD) {
continue;
}
LinkedHashMap<String, BField> fields = ((BRecordType) symbol.type).fields;
if (fields.isEmpty()) {
continue;
}
for (String field : fields.keySet()) {
if (fields.get(field).type.tag != TypeTags.NEVER) {
parameterCountForNamedArgs = parameterCountForNamedArgs - 1;
break;
}
}
}
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
foundNamedArg = true;
if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCountForPositionalArgs) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,
incRecordParamAllowAdditionalFields);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol;
BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type;
BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol;
List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params);
List<BLangExpression> nonRestArgs = iExpr.requiredArgs;
List<BVarSymbol> valueProvidedParams = new ArrayList<>();
List<BVarSymbol> requiredParams = new ArrayList<>();
List<BVarSymbol> requiredIncRecordParams = new ArrayList<>();
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.isDefaultable) {
continue;
}
requiredParams.add(nonRestParam);
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) {
requiredIncRecordParams.add(incRecordParam);
}
}
int i = 0;
for (; i < nonRestArgs.size(); i++) {
BLangExpression arg = nonRestArgs.get(i);
if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) {
BType expectedType = paramTypes.get(i);
BType actualType = arg.getBType();
if (expectedType == symTable.charStringType) {
arg.cloneAttempt++;
BLangExpression clonedArg = nodeCloner.cloneNode(arg);
BType argType = checkExprSilent(clonedArg, expectedType, env);
if (argType != symTable.semanticError) {
actualType = argType;
}
}
types.checkType(arg.pos, actualType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
types.setImplicitCastExpr(arg, arg.getBType(), expectedType);
}
if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
if (i < nonRestParams.size()) {
BVarSymbol param = nonRestParams.get(i);
checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation);
valueProvidedParams.add(param);
requiredParams.remove(param);
continue;
}
break;
}
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangIdentifier argName = ((NamedArgNode) arg).getName();
BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr,
nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields);
if (varSym == null) {
dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName);
break;
}
requiredParams.remove(varSym);
requiredIncRecordParams.remove(varSym);
if (valueProvidedParams.contains(varSym)) {
dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value);
continue;
}
checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation);
((BLangNamedArgsExpression) arg).varSymbol = varSym;
valueProvidedParams.add(varSym);
}
}
BVarSymbol restParam = invokableTypeSymbol.restParam;
boolean errored = false;
if (!requiredParams.isEmpty() && vararg == null) {
for (BVarSymbol requiredParam : requiredParams) {
if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name,
iExpr.name.value);
errored = true;
}
}
}
if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) {
for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) {
for (BVarSymbol requiredParam : requiredParams) {
if (requiredParam.type == requiredIncRecordParam.owner.type) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER,
requiredIncRecordParam.name, iExpr.name.value);
errored = true;
}
}
}
}
if (restParam == null &&
(!iExpr.restArgs.isEmpty() ||
(vararg != null && valueProvidedParams.size() == nonRestParams.size()))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
errored = true;
}
if (errored) {
return symTable.semanticError;
}
BType listTypeRestArg = restParam == null ? null : restParam.type;
BRecordType mappingTypeRestArg = null;
if (vararg != null && nonRestArgs.size() < nonRestParams.size()) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
List<BType> tupleMemberTypes = new ArrayList<>();
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL);
mappingTypeRestArg = new BRecordType(recordSymbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
BType tupleRestType = null;
BVarSymbol fieldSymbol;
for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) {
BType paramType = paramTypes.get(j);
BVarSymbol nonRestParam = nonRestParams.get(j);
Name paramName = nonRestParam.name;
tupleMemberTypes.add(paramType);
boolean required = requiredParams.contains(nonRestParam);
fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName,
nonRestParam.getOriginalName(), pkgID, paramType, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(paramName.value, new BField(paramName, null, fieldSymbol));
}
if (listTypeRestArg != null) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
tupleRestType = ((BArrayType) listTypeRestArg).eType;
} else if (listTypeRestArg.tag == TypeTags.TUPLE) {
BTupleType restTupleType = (BTupleType) listTypeRestArg;
tupleMemberTypes.addAll(restTupleType.tupleTypes);
if (restTupleType.restType != null) {
tupleRestType = restTupleType.restType;
}
}
}
BTupleType tupleType = new BTupleType(tupleMemberTypes);
tupleType.restType = tupleRestType;
listTypeRestArg = tupleType;
mappingTypeRestArg.sealed = true;
mappingTypeRestArg.restFieldType = symTable.noType;
mappingTypeRestArg.fields = fields;
recordSymbol.type = mappingTypeRestArg;
mappingTypeRestArg.tsymbol = recordSymbol;
}
if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return symTable.semanticError;
}
BType restType = null;
if (vararg != null && !iExpr.restArgs.isEmpty()) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
}
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
iExpr.restArgs.add(vararg);
restType = this.resultType;
} else if (vararg != null) {
iExpr.restArgs.add(vararg);
if (mappingTypeRestArg != null) {
LinkedHashSet<BType> restTypes = new LinkedHashSet<>();
restTypes.add(listTypeRestArg);
restTypes.add(mappingTypeRestArg);
BType actualType = BUnionType.create(null, restTypes);
checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation);
} else {
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
}
restType = this.resultType;
} else if (!iExpr.restArgs.isEmpty()) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
} else {
BTupleType tupleType = (BTupleType) listTypeRestArg;
List<BType> tupleMemberTypes = tupleType.tupleTypes;
BType tupleRestType = tupleType.restType;
int tupleMemCount = tupleMemberTypes.size();
for (int j = 0; j < iExpr.restArgs.size(); j++) {
BLangExpression restArg = iExpr.restArgs.get(j);
BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType;
checkTypeParamExpr(restArg, this.env, memType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
}
}
BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType());
if (restType != symTable.semanticError &&
Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) &&
Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) {
retType = unifier.build(retType, expType, iExpr, types, symTable, dlog);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID);
String sortFuncName = "sort";
if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) {
checkArrayLibSortFuncArgs(iExpr);
}
if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) {
return this.generateFutureType(invokableSymbol, retType);
} else {
return retType;
}
}
private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) {
if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
if (iExpr.argExprs.size() != 3) {
return;
}
BLangExpression keyFunction = iExpr.argExprs.get(2);
BType keyFunctionType = keyFunction.getBType();
if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (keyFunctionType.tag == TypeTags.NIL) {
if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
return;
}
Location pos;
BType returnType;
if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
pos = keyFunction.pos;
returnType = keyFunction.getBType().getReturnType();
} else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) {
BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction);
pos = arrowFunction.body.expr.pos;
returnType = arrowFunction.body.expr.getBType();
if (returnType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
} else {
BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction;
pos = keyLambdaFunction.function.pos;
returnType = keyLambdaFunction.function.getBType().getReturnType();
}
if (!types.isOrderedType(returnType, false)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType);
}
}
private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr,
List<BVarSymbol> nonRestParams,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.getName().value.equals(argName.value)) {
return nonRestParam;
}
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (incRecordParam.getName().value.equals(argName.value)) {
return incRecordParam;
}
}
if (incRecordParamAllowAdditionalFields != null) {
BRecordType incRecordType = (BRecordType) incRecordParamAllowAdditionalFields.type;
checkExpr(expr, env, incRecordType.restFieldType);
if (!incRecordType.fields.containsKey(argName.value)) {
return new BVarSymbol(0, names.fromIdNode(argName), names.originalNameFromIdNode(argName),
null, symTable.noType, null, argName.pos, VIRTUAL);
}
}
return null;
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) {
boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX);
return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart);
}
private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral);
}
private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
if (typeParamAnalyzer.notRequireTypeParams(env)) {
checkExpr(arg, env, expectedType);
return;
}
if (requireTypeInference(arg, inferTypeForNumericLiteral)) {
BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env);
BType inferredType = checkExpr(arg, env, expType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType);
return;
}
checkExpr(arg, env, expectedType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType);
}
private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) {
switch (expr.getKind()) {
case GROUP_EXPR:
return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral);
case ARROW_EXPR:
case LIST_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
return true;
case ELVIS_EXPR:
case TERNARY_EXPR:
case NUMERIC_LITERAL:
return inferTypeForNumericLiteral;
default:
return false;
}
}
private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) {
BType fieldType = symTable.semanticError;
boolean keyValueField = field.isKeyValueField();
boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP;
boolean readOnlyConstructorField = false;
String fieldName = null;
Location pos = null;
BLangExpression valueExpr = null;
if (keyValueField) {
valueExpr = ((BLangRecordKeyValueField) field).valueExpr;
} else if (!spreadOpField) {
valueExpr = (BLangRecordVarNameField) field;
}
switch (mappingType.tag) {
case TypeTags.RECORD:
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(key.expr, key.computedKey,
(BRecordType) mappingType);
fieldType = typeSymbolPair.determinedType;
key.fieldSymbol = typeSymbolPair.fieldSymbol;
readOnlyConstructorField = keyValField.readonly;
pos = key.expr.pos;
fieldName = getKeyValueFieldName(keyValField);
} else if (spreadOpField) {
BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
checkExpr(spreadExpr, this.env);
BType spreadExprType = spreadExpr.getBType();
if (spreadExprType.tag == TypeTags.MAP) {
return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint,
getAllFieldType((BRecordType) mappingType),
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
if (spreadExprType.tag != TypeTags.RECORD) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadExprType);
return symTable.semanticError;
}
boolean errored = false;
for (BField bField : ((BRecordType) spreadExprType).fields.values()) {
BType specFieldType = bField.type;
BSymbol fieldSymbol = symResolver.resolveStructField(spreadExpr.pos, this.env, bField.name,
mappingType.tsymbol);
BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, fieldSymbol, bField.name,
(BRecordType) mappingType);
if (expectedFieldType != symTable.semanticError &&
!types.isAssignable(specFieldType, expectedFieldType)) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD,
expectedFieldType, bField.name, specFieldType);
if (!errored) {
errored = true;
}
}
}
return errored ? symTable.semanticError : symTable.noType;
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(varNameField, false,
(BRecordType) mappingType);
fieldType = typeSymbolPair.determinedType;
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
break;
case TypeTags.MAP:
if (spreadOpField) {
BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
BType spreadOpType = checkExpr(spreadExp, this.env);
BType spreadOpMemberType;
switch (spreadOpType.tag) {
case TypeTags.RECORD:
List<BType> types = new ArrayList<>();
BRecordType recordType = (BRecordType) spreadOpType;
for (BField recField : recordType.fields.values()) {
types.add(recField.type);
}
if (!recordType.sealed) {
types.add(recordType.restFieldType);
}
spreadOpMemberType = getRepresentativeBroadType(types);
break;
case TypeTags.MAP:
spreadOpMemberType = ((BMapType) spreadOpType).constraint;
break;
default:
dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadOpType);
return symTable.semanticError;
}
return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
boolean validMapKey;
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey);
readOnlyConstructorField = keyValField.readonly;
pos = key.pos;
fieldName = getKeyValueFieldName(keyValField);
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError;
break;
}
if (readOnlyConstructorField) {
if (types.isSelectivelyImmutableType(fieldType)) {
fieldType =
ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType) fieldType,
env, symTable, anonymousModelHelper, names,
new HashSet<>());
} else if (!types.isInherentlyImmutableType(fieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType);
fieldType = symTable.semanticError;
}
}
if (spreadOpField) {
valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
}
BLangExpression exprToCheck = valueExpr;
if (this.nonErrorLoggingCheck) {
exprToCheck = nodeCloner.cloneNode(valueExpr);
} else {
((BLangNode) field).setBType(fieldType);
}
return checkExpr(exprToCheck, this.env, fieldType);
}
private TypeSymbolPair checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey,
BRecordType recordType) {
Name fieldName;
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return new TypeSymbolPair(null, symTable.semanticError);
}
LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (recordType.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(recordType.restFieldType);
}
return new TypeSymbolPair(null, BUnionType.create(null, fieldTypes));
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) {
fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value);
} else {
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return new TypeSymbolPair(null, symTable.semanticError);
}
BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env, fieldName, recordType.tsymbol);
BType type = checkRecordLiteralKeyByName(keyExpr.pos, fieldSymbol, fieldName, recordType);
return new TypeSymbolPair(fieldSymbol instanceof BVarSymbol ? (BVarSymbol) fieldSymbol : null, type);
}
private BType checkRecordLiteralKeyByName(Location location, BSymbol fieldSymbol, Name key,
BRecordType recordType) {
if (fieldSymbol != symTable.notFoundSymbol) {
return fieldSymbol.type;
}
if (recordType.sealed) {
dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key,
recordType.tsymbol.type.getKind().typeName(), recordType);
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType getAllFieldType(BRecordType recordType) {
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BField field : recordType.fields.values()) {
possibleTypes.add(field.type);
}
BType restFieldType = recordType.restFieldType;
if (restFieldType != null && restFieldType != symTable.noType) {
possibleTypes.add(restFieldType);
}
return BUnionType.create(null, possibleTypes);
}
private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) {
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return false;
}
return true;
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ||
(keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) {
return true;
}
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return false;
}
private BType addNilForNillableAccessType(BType actualType) {
if (actualType.isNullable()) {
return actualType;
}
return BUnionType.create(null, actualType, symTable.nilType);
}
private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return symTable.semanticError;
}
if (recordType.sealed) {
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess,
Name fieldName, BObjectType objectType) {
BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos,
this.env, fieldName, objectType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(bLangFieldBasedAccess.field.pos,
DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol);
return symTable.semanticError;
}
if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) &&
!Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) {
fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol);
fieldSymbol.flags &= ~Flags.ISOLATED;
fieldSymbol.type.flags &= ~Flags.ISOLATED;
}
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkTupleFieldType(BType tupleType, int indexValue) {
BTupleType bTupleType = (BTupleType) tupleType;
if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) {
return bTupleType.restType;
} else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) {
return symTable.semanticError;
}
return bTupleType.tupleTypes.get(indexValue);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName == null) {
return;
}
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME &&
startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH);
}
private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) {
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
BType type = expr.getBType();
if (type == symTable.semanticError) {
continue;
}
if (!types.isNonNilSimpleBasicTypeOrString(type)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType), type);
}
}
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
List<BLangExpression> tempConcatExpressions = new ArrayList<>();
for (BLangExpression expr : exprs) {
BType exprType;
if (expr.getKind() == NodeKind.QUERY_EXPR) {
exprType = checkExpr(expr, xmlElementEnv, expType);
} else {
exprType = checkExpr(expr, xmlElementEnv);
}
if (TypeTags.isXMLTypeTag(exprType.tag)) {
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
tempConcatExpressions = new ArrayList<>();
}
newChildren.add(expr);
continue;
}
BType type = expr.getBType();
if (type.tag >= TypeTags.JSON &&
!TypeTags.isIntegerTypeTag(type.tag) && !TypeTags.isStringTypeTag(type.tag)) {
if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType, symTable.xmlType), type);
}
continue;
}
tempConcatExpressions.add(expr);
}
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
}
return newChildren;
}
private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments = exprs;
xmlTextLiteral.pos = exprs.get(0).pos;
xmlTextLiteral.setBType(symTable.xmlType);
return xmlTextLiteral;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = BUnionType.create(null, actualType);
if (returnsNull(accessExpr)) {
unionType.add(symTable.nilType);
}
BType parentType = accessExpr.expr.getBType();
if (accessExpr.errorSafeNavigation
&& (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION
&& ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) {
unionType.add(symTable.errorType);
}
if (unionType.getMemberTypes().size() == 1) {
return unionType.getMemberTypes().toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.getBType();
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
&& accessExpr.expr.getBType().tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.OBJECT) {
return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return symTable.semanticError;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType == symTable.semanticError) {
return fieldType;
}
return addNilForNillableAccessType(fieldType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.isEmpty()) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private RecordUnionDiagnostics checkRecordUnion(BLangFieldBasedAccess fieldAccessExpr, Set<BType> memberTypes,
Name fieldName) {
RecordUnionDiagnostics recordUnionDiagnostics = new RecordUnionDiagnostics();
for (BType memberType : memberTypes) {
BRecordType recordMember = (BRecordType) memberType;
if (recordMember.getFields().containsKey(fieldName.getValue())) {
BType individualFieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, recordMember);
if (individualFieldType == symTable.semanticError) {
recordUnionDiagnostics.optionalInRecords.add(recordMember);
}
} else {
recordUnionDiagnostics.undeclaredInRecords.add(recordMember);
}
}
return recordUnionDiagnostics;
}
private void logRhsFieldAccExprErrors(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BRecordType recordVarRefType = (BRecordType) varRefType;
boolean isFieldDeclared = recordVarRefType.getFields().containsKey(fieldName.getValue());
if (isFieldDeclared) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.FIELD_ACCESS_CANNOT_BE_USED_TO_ACCESS_OPTIONAL_FIELDS);
} else if (recordVarRefType.sealed) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_RECORD, fieldName, varRefType);
} else {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_IN_RECORD_TYPE, fieldName,
varRefType);
}
} else {
LinkedHashSet<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
RecordUnionDiagnostics recUnionInfo = checkRecordUnion(fieldAccessExpr, memberTypes, fieldName);
if (recUnionInfo.hasUndeclaredAndOptional()) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.UNDECLARED_AND_OPTIONAL_FIELDS_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords),
recUnionInfo.recordsToString(recUnionInfo.optionalInRecords));
} else if (recUnionInfo.hasUndeclared()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords));
} else if (recUnionInfo.hasOptional()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.optionalInRecords));
}
}
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.semanticError;
if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) {
actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) {
actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (actualType != symTable.semanticError) {
fieldAccessExpr.originalType = actualType;
return actualType;
}
if (!fieldAccessExpr.isLValue) {
logRhsFieldAccExprErrors(fieldAccessExpr, varRefType, fieldName);
return actualType;
}
actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE,
fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType);
}
} else if (types.isLax(varRefType)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT,
varRefType);
return symTable.semanticError;
}
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
BType laxFieldAccessType = getLaxFieldAccessType(varRefType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
}
actualType = symTable.xmlType;
fieldAccessExpr.originalType = actualType;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS,
varRefType);
}
return actualType;
}
private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr;
String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value;
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix));
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE,
nsPrefixedFieldAccess.nsPrefix);
} else if (nsSymbol.getKind() == SymbolKind.PACKAGE) {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst(
nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value,
(BPackageSymbol) nsSymbol, fieldAccessExpr.pos);
} else {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol;
}
}
private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) {
return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType);
}
private BType getLaxFieldAccessType(BType exprType) {
switch (exprType.tag) {
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return symTable.stringType;
case TypeTags.MAP:
return ((BMapType) exprType).constraint;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) exprType;
if (types.isSameType(symTable.jsonType, unionType)) {
return symTable.jsonType;
}
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType)));
return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes);
}
return symTable.semanticError;
}
private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType actualType = symTable.semanticError;
boolean nillableExprType = false;
BType effectiveType = varRefType;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
}
}
if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) {
actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName);
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD,
varRefType, fieldName);
}
fieldAccessExpr.nilSafeNavigation = nillableExprType;
fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isLax(effectiveType)) {
BType laxFieldAccessType = getLaxFieldAccessType(effectiveType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType);
}
if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private boolean accessCouldResultInError(BType type) {
if (type.tag == TypeTags.JSON) {
return true;
}
if (type.tag == TypeTags.MAP) {
return false;
}
if (type.tag == TypeTags.XML) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError);
} else {
return false;
}
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) {
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType());
boolean nillableExprType = false;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
if (nillableExprType) {
varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
if (!types.isSubTypeOfMapping(varRefType)) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
}
}
}
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.semanticError;
if (types.isSubTypeOfMapping(varRefType)) {
checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType);
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.STRING && isConst(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD,
fieldName, indexBasedAccessExpr.expr.getBType());
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
indexBasedAccessExpr.nilSafeNavigation = nillableExprType;
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isSubTypeOfList(varRefType)) {
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType);
indexBasedAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.INT && isConst(indexExpr)) {
dlog.error(indexBasedAccessExpr.indexExpr.pos,
DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr));
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
} else if (types.isAssignable(varRefType, symTable.stringType)) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
indexBasedAccessExpr.originalType = symTable.charStringType;
actualType = symTable.charStringType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (indexBasedAccessExpr.isLValue) {
indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
return actualType;
}
BType type = checkExpr(indexExpr, this.env, symTable.intType);
if (type == symTable.semanticError) {
return type;
}
indexBasedAccessExpr.originalType = varRefType;
actualType = varRefType;
} else if (varRefType.tag == TypeTags.TABLE) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS,
varRefType);
return symTable.semanticError;
}
BTableType tableType = (BTableType) indexBasedAccessExpr.expr.getBType();
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (tableType.keyTypeConstraint == null) {
keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.getBType()).
fieldNameList, ((BTableType) indexBasedAccessExpr.expr.getBType()).constraint);
if (keyTypeConstraint == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE,
indexBasedAccessExpr.expr);
return symTable.semanticError;
}
}
if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) {
checkExpr(indexExpr, this.env, keyTypeConstraint);
if (indexExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
} else {
List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr)
indexBasedAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes;
if (keyConstraintTypes.size() != multiKeyExpressionList.size()) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
for (int i = 0; i < multiKeyExpressionList.size(); i++) {
BLangExpression keyExpr = multiKeyExpressionList.get(i);
checkExpr(keyExpr, this.env, keyConstraintTypes.get(i));
if (keyExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
}
}
if (expType.tag != TypeTags.NONE) {
BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType);
if (resultType == symTable.semanticError) {
return symTable.semanticError;
}
}
BType constraint = tableType.constraint;
actualType = addNilForNillableAccessType(constraint);
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (varRefType == symTable.semanticError) {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
return symTable.semanticError;
} else {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private Long getConstIndex(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value :
(Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private String getConstFieldName(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value :
(String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType,
BArrayType arrayType) {
BType actualType = symTable.semanticError;
switch (indexExprType.tag) {
case TypeTags.INT:
BLangExpression indexExpr = indexBasedAccess.indexExpr;
if (!isConst(indexExpr) || arrayType.state == BArrayState.OPEN) {
actualType = arrayType.eType;
break;
}
actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType;
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) indexExprType;
boolean validIndexExists = false;
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
if (indexValue >= 0 &&
(arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) {
validIndexExists = true;
break;
}
}
if (!validIndexExists) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.UNION:
List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(matchedType -> (BFiniteType) matchedType)
.collect(Collectors.toList());
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType);
if (elementType == symTable.semanticError) {
return symTable.semanticError;
}
actualType = arrayType.eType;
}
return actualType;
}
private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.ARRAY) {
return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type);
}
if (type.tag == TypeTags.TUPLE) {
return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType());
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.INT:
if (isConst(indexExpr)) {
actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue());
} else {
BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType();
LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>());
actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null,
tupleTypes);
}
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
BType fieldType = checkTupleFieldType(tuple, indexValue);
if (fieldType.tag != TypeTags.SEMANTIC_ERROR) {
possibleTypes.add(fieldType);
}
}
if (possibleTypes.size() == 0) {
return symTable.semanticError;
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) {
tupleType.tupleTypes
.forEach(memberType -> {
if (memberType.tag == TypeTags.UNION) {
collectMemberTypes((BUnionType) memberType, memberTypes);
} else {
memberTypes.add(memberType);
}
});
return memberTypes;
}
private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.MAP) {
BType constraint = ((BMapType) type).constraint;
return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint);
}
if (type.tag == TypeTags.RECORD) {
return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType());
}
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.STRING:
if (isConst(indexExpr)) {
String fieldName = IdentifierUtils.escapeSpecialCharacters(getConstFieldName(indexExpr));
actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType != symTable.semanticError) {
return actualType;
}
actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
return actualType;
}
if (actualType == symTable.neverType) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
if (accessExpr.isLValue) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
LinkedHashSet<BType> fieldTypes = record.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (record.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(record.restFieldType);
}
if (fieldTypes.stream().noneMatch(BType::isNullable)) {
fieldTypes.add(symTable.nilType);
}
actualType = BUnionType.create(null, fieldTypes);
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
String fieldName = (String) ((BLangLiteral) finiteMember).value;
BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
}
if (fieldType != symTable.semanticError) {
fieldType = addNilForNillableAccessType(fieldType);
}
}
if (fieldType.tag == TypeTags.SEMANTIC_ERROR) {
continue;
}
possibleTypes.add(fieldType);
}
if (possibleTypes.isEmpty()) {
return symTable.semanticError;
}
if (possibleTypes.stream().noneMatch(BType::isNullable)) {
possibleTypes.add(symTable.nilType);
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.getMemberTypes());
} else {
return Lists.of(type);
}
}
private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType());
LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.getBType();
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) {
return new LinkedHashSet<BType>() {
{
add(symTable.semanticError);
}
};
}
assignable = this.types.isAssignable(type, pattern.variable.getBType());
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) {
if (encounteredTypes.contains(type)) {
return false;
}
encounteredTypes.add(type);
switch (type.tag) {
case TypeTags.UNION:
for (BType bType1 : ((BUnionType) type).getMemberTypes()) {
if (couldHoldTableValues(bType1, encounteredTypes)) {
return true;
}
}
return false;
case TypeTags.MAP:
return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (couldHoldTableValues(field.type, encounteredTypes)) {
return true;
}
}
return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes);
case TypeTags.ARRAY:
return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes);
case TypeTags.TUPLE:
for (BType bType : ((BTupleType) type).getTupleTypes()) {
if (couldHoldTableValues(bType, encounteredTypes)) {
return true;
}
}
return false;
}
return false;
}
private boolean isConst(BLangExpression expression) {
if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) {
return true;
}
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
}
private Name getCurrentCompUnit(BLangNode node) {
return names.fromString(node.pos.lineRange().filePath());
}
private BType getRepresentativeBroadType(List<BType> inferredTypeList) {
for (int i = 0; i < inferredTypeList.size(); i++) {
BType type = inferredTypeList.get(i);
if (type.tag == TypeTags.SEMANTIC_ERROR) {
return type;
}
for (int j = i + 1; j < inferredTypeList.size(); j++) {
BType otherType = inferredTypeList.get(j);
if (otherType.tag == TypeTags.SEMANTIC_ERROR) {
return otherType;
}
if (types.isAssignable(otherType, type)) {
inferredTypeList.remove(j);
j -= 1;
continue;
}
if (types.isAssignable(type, otherType)) {
inferredTypeList.remove(i);
i -= 1;
break;
}
}
}
if (inferredTypeList.size() == 1) {
return inferredTypeList.get(0);
}
return BUnionType.create(null, inferredTypeList.toArray(new BType[0]));
}
private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>();
List<BType> restFieldTypes = new ArrayList<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValue.key;
BLangExpression expression = keyValue.valueExpr;
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, env, symTable.stringType);
BType exprType = checkExpr(expression, env, expType);
if (isUniqueType(restFieldTypes, exprType)) {
restFieldTypes.add(exprType);
}
} else {
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr),
keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) :
checkExpr(expression, env, expType),
true, keyValue.readonly);
}
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType);
int typeTag = type.tag;
if (typeTag == TypeTags.MAP) {
BType constraintType = ((BMapType) type).constraint;
if (isUniqueType(restFieldTypes, constraintType)) {
restFieldTypes.add(constraintType);
}
}
if (type.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) type;
for (BField recField : recordType.fields.values()) {
addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type,
!Symbols.isOptional(recField.symbol), false);
}
if (!recordType.sealed) {
BType restFieldType = recordType.restFieldType;
if (isUniqueType(restFieldTypes, restFieldType)) {
restFieldTypes.add(restFieldType);
}
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField),
varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) :
checkExpr(varNameField, env, expType),
true, varNameField.readonly);
}
}
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
boolean allReadOnlyNonRestFields = true;
for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) {
FieldInfo fieldInfo = entry.getValue();
List<BType> types = fieldInfo.types;
if (types.contains(symTable.semanticError)) {
return symTable.semanticError;
}
String key = entry.getKey();
Name fieldName = names.fromString(key);
BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0]));
Set<Flag> flags = new HashSet<>();
if (fieldInfo.required) {
flags.add(Flag.REQUIRED);
} else {
flags.add(Flag.OPTIONAL);
}
if (fieldInfo.readonly) {
flags.add(Flag.READONLY);
} else if (allReadOnlyNonRestFields) {
allReadOnlyNonRestFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = fields;
if (restFieldTypes.contains(symTable.semanticError)) {
return symTable.semanticError;
}
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else if (restFieldTypes.size() == 1) {
recordType.restFieldType = restFieldTypes.get(0);
} else {
recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0]));
}
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) {
recordType.flags |= Flags.READONLY;
recordSymbol.flags |= Flags.READONLY;
}
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
return recordType;
}
private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location,
SymbolOrigin origin) {
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(Flags.ANONYMOUS,
names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)),
pkgID, null, env.scope.owner, location, origin);
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, location);
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
return recordSymbol;
}
private String getKeyName(BLangExpression key) {
return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value;
}
private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString,
BType exprType, boolean required, boolean readonly) {
if (!nonRestFieldTypes.containsKey(keyString)) {
nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required,
readonly));
return;
}
FieldInfo fieldInfo = nonRestFieldTypes.get(keyString);
List<BType> typeList = fieldInfo.types;
if (isUniqueType(typeList, exprType)) {
typeList.add(exprType);
}
if (required && !fieldInfo.required) {
fieldInfo.required = true;
}
}
private boolean isUniqueType(List<BType> typeList, BType type) {
boolean isRecord = type.tag == TypeTags.RECORD;
for (BType bType : typeList) {
if (isRecord) {
if (type == bType) {
return false;
}
} else if (types.isSameType(type, bType)) {
return false;
}
}
return true;
}
private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType,
BType expType) {
if (expType == symTable.semanticError) {
return expType;
}
boolean unionExpType = expType.tag == TypeTags.UNION;
if (expType == mutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) {
return mutableXmlSubType;
}
BXMLSubType immutableXmlSubType = (BXMLSubType)
ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable,
anonymousModelHelper, names);
if (expType == immutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) {
return immutableXmlSubType;
}
if (!unionExpType) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
List<BType> compatibleTypes = new ArrayList<>();
for (BType memberType : ((BUnionType) expType).getMemberTypes()) {
if (compatibleTypes.contains(memberType)) {
continue;
}
if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) {
compatibleTypes.add(memberType);
continue;
}
if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) {
compatibleTypes.add(mutableXmlSubType);
continue;
}
if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) {
compatibleTypes.add(immutableXmlSubType);
}
}
if (compatibleTypes.isEmpty()) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
if (compatibleTypes.size() == 1) {
return compatibleTypes.get(0);
}
dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType);
return symTable.semanticError;
}
private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) {
for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) {
BType childType = modifiedChild.getBType();
if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) {
continue;
}
modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types,
(SelectivelyImmutableReferenceType) childType,
env, symTable, anonymousModelHelper, names));
if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild);
}
}
}
private void logUndefinedSymbolError(Location pos, String name) {
if (!missingNodesHelper.isMissingNode(name)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name);
}
}
private void markTypeAsIsolated(BType actualType) {
actualType.flags |= Flags.ISOLATED;
actualType.tsymbol.flags |= Flags.ISOLATED;
}
private boolean isObjectConstructorExpr(BLangTypeInit cIExpr, BType actualType) {
return cIExpr.getType() != null && Symbols.isFlagOn(actualType.tsymbol.flags, Flags.ANONYMOUS);
}
private BLangClassDefinition getClassDefinitionForObjectConstructorExpr(BLangTypeInit cIExpr, SymbolEnv env) {
List<BLangClassDefinition> classDefinitions = env.enclPkg.classDefinitions;
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) cIExpr.getType();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(userDefinedType.pos, env,
names.fromIdNode(userDefinedType.pkgAlias),
names.fromIdNode(userDefinedType.typeName));
for (BLangClassDefinition classDefinition : classDefinitions) {
if (classDefinition.symbol == symbol) {
return classDefinition;
}
}
return null;
}
private void handleObjectConstrExprForReadOnly(BLangTypeInit cIExpr, BObjectType actualObjectType,
BLangClassDefinition classDefForConstructor, SymbolEnv env,
boolean logErrors) {
boolean hasNeverReadOnlyField = false;
for (BField field : actualObjectType.fields.values()) {
BType fieldType = field.type;
if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) {
analyzeObjectConstructor(classDefForConstructor, env);
hasNeverReadOnlyField = true;
if (!logErrors) {
return;
}
dlog.error(field.pos,
DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE,
fieldType);
}
}
if (hasNeverReadOnlyField) {
return;
}
classDefForConstructor.flagSet.add(Flag.READONLY);
actualObjectType.flags |= Flags.READONLY;
actualObjectType.tsymbol.flags |= Flags.READONLY;
ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types,
anonymousModelHelper, symTable, names, cIExpr.pos);
analyzeObjectConstructor(classDefForConstructor, env);
}
private void markConstructedObjectIsolatedness(BObjectType actualObjectType) {
if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) {
markTypeAsIsolated(actualObjectType);
return;
}
for (BField field : actualObjectType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) ||
!types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) {
return;
}
}
markTypeAsIsolated(actualObjectType);
}
private void markLeafNode(BLangAccessExpression accessExpression) {
BLangNode parent = accessExpression.parent;
if (parent == null) {
accessExpression.leafNode = true;
return;
}
NodeKind kind = parent.getKind();
while (kind == NodeKind.GROUP_EXPR) {
parent = parent.parent;
if (parent == null) {
accessExpression.leafNode = true;
break;
}
kind = parent.getKind();
}
if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) {
accessExpression.leafNode = true;
}
}
private static class FieldInfo {
List<BType> types;
boolean required;
boolean readonly;
private FieldInfo(List<BType> types, boolean required, boolean readonly) {
this.types = types;
this.required = required;
this.readonly = readonly;
}
}
private static class TypeSymbolPair {
private BVarSymbol fieldSymbol;
private BType determinedType;
public TypeSymbolPair(BVarSymbol fieldSymbol, BType determinedType) {
this.fieldSymbol = fieldSymbol;
this.determinedType = determinedType;
}
}
private static class RecordUnionDiagnostics {
Set<BRecordType> undeclaredInRecords = new LinkedHashSet<>();
Set<BRecordType> optionalInRecords = new LinkedHashSet<>();
boolean hasUndeclaredAndOptional() {
return undeclaredInRecords.size() > 0 && optionalInRecords.size() > 0;
}
boolean hasUndeclared() {
return undeclaredInRecords.size() > 0;
}
boolean hasOptional() {
return optionalInRecords.size() > 0;
}
String recordsToString(Set<BRecordType> recordTypeSet) {
StringBuilder recordNames = new StringBuilder();
int recordSetSize = recordTypeSet.size();
int index = 0;
for (BRecordType recordType : recordTypeSet) {
index++;
recordNames.append(recordType.tsymbol.getName().getValue());
if (recordSetSize > 1) {
if (index == recordSetSize - 1) {
recordNames.append("', and '");
} else if (index < recordSetSize) {
recordNames.append("', '");
}
}
}
return recordNames.toString();
}
}
}
|
class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>();
private static Set<String> listLengthModifierFunctions = new HashSet<>();
private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>();
private static final String LIST_LANG_LIB = "lang.array";
private static final String MAP_LANG_LIB = "lang.map";
private static final String TABLE_LANG_LIB = "lang.table";
private static final String VALUE_LANG_LIB = "lang.value";
private static final String XML_LANG_LIB = "lang.xml";
private static final String FUNCTION_NAME_PUSH = "push";
private static final String FUNCTION_NAME_POP = "pop";
private static final String FUNCTION_NAME_SHIFT = "shift";
private static final String FUNCTION_NAME_UNSHIFT = "unshift";
private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType";
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private NodeCloner nodeCloner;
private Types types;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
private boolean isTypeChecked;
private TypeNarrower typeNarrower;
private TypeParamAnalyzer typeParamAnalyzer;
private BLangAnonymousModelHelper anonymousModelHelper;
private SemanticAnalyzer semanticAnalyzer;
private Unifier unifier;
private boolean nonErrorLoggingCheck = false;
private int letCount = 0;
private Stack<SymbolEnv> queryEnvs, prevEnvs;
private Stack<BLangNode> queryFinalClauses;
private boolean checkWithinQueryExpr = false;
private BLangMissingNodesHelper missingNodesHelper;
private boolean breakToParallelQueryEnv = false;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
static {
listLengthModifierFunctions.add(FUNCTION_NAME_PUSH);
listLengthModifierFunctions.add(FUNCTION_NAME_POP);
listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT);
listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT);
modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeAll");
add("setLength");
add("reverse");
add("sort");
add("pop");
add("push");
add("shift");
add("unshift");
}});
modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{
add("put");
add("add");
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{
add("mergeJson");
}});
modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{
add("setName");
add("setChildren");
add("strip");
}});
}
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.queryFinalClauses = new Stack<>();
this.queryEnvs = new Stack<>();
this.prevEnvs = new Stack<>();
this.unifier = new Unifier();
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
if (expr.typeChecked) {
return expr.getBType();
}
if (expType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) expType).effectiveType;
}
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
this.isTypeChecked = true;
expr.expectedType = expType;
expr.accept(this);
if (resultType.tag == TypeTags.INTERSECTION) {
resultType = ((BIntersectionType) resultType).effectiveType;
}
expr.setTypeCheckedType(resultType);
expr.typeChecked = isTypeChecked;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
validateAndSetExprExpectedType(expr);
return resultType;
}
private void analyzeObjectConstructor(BLangNode node, SymbolEnv env) {
if (!nonErrorLoggingCheck) {
semanticAnalyzer.analyzeNode(node, env);
}
}
private void validateAndSetExprExpectedType(BLangExpression expr) {
if (resultType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null &&
expr.expectedType.tag == TypeTags.MAP && expr.getBType().tag == TypeTags.RECORD) {
return;
}
expr.expectedType = resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = setLiteralValueAndGetType(literalExpr, expType);
if (literalType == symTable.semanticError || literalExpr.isFiniteContext) {
return;
}
resultType = types.checkType(literalExpr, literalType, expType);
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
checkXMLNamespacePrefixes(xmlElementAccess.filters);
checkExpr(xmlElementAccess.expr, env, symTable.xmlType);
resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
checkXMLNamespacePrefixes(xmlNavigation.filters);
if (xmlNavigation.childIndex != null) {
checkExpr(xmlNavigation.childIndex, env, symTable.intType);
}
BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType);
if (exprType.tag == TypeTags.UNION) {
dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS,
xmlNavigation.expr.getBType());
}
BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN
? symTable.xmlType : symTable.xmlElementSeqType;
types.checkType(xmlNavigation, actualType, expType);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
resultType = symTable.xmlType;
} else {
resultType = symTable.xmlElementSeqType;
}
}
private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) {
for (BLangXMLElementFilter filter : filters) {
if (!filter.namespace.isEmpty()) {
Name nsName = names.fromString(filter.namespace);
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName);
filter.namespaceSymbol = nsSymbol;
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName);
}
}
}
}
private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) {
BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag);
Object literalValue = literalExpr.value;
if (literalType.tag == TypeTags.INT || literalType.tag == TypeTags.BYTE) {
if (expType.tag == TypeTags.FLOAT) {
literalType = symTable.floatType;
literalExpr.value = ((Long) literalValue).doubleValue();
} else if (expType.tag == TypeTags.DECIMAL &&
!NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) {
literalType = symTable.decimalType;
literalExpr.value = String.valueOf(literalValue);
} else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) {
literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue);
if (literalType == symTable.semanticError) {
return symTable.semanticError;
}
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
BType intSubType = null;
boolean intOrIntCompatibleTypeFound = false;
for (BType memType : memberTypes) {
if ((memType.tag != TypeTags.INT && TypeTags.isIntegerTypeTag(memType.tag)) ||
memType.tag == TypeTags.BYTE) {
intSubType = memType;
} else if (memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON ||
memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) {
intOrIntCompatibleTypeFound = true;
}
}
if (intOrIntCompatibleTypeFound) {
return setLiteralValueAndGetType(literalExpr, symTable.intType);
}
if (intSubType != null) {
return setLiteralValueAndGetType(literalExpr, intSubType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) {
return setLiteralValueAndGetType(literalExpr, symTable.byteType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) {
return setLiteralValueAndGetType(literalExpr, symTable.floatType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
}
} else if (literalType.tag == TypeTags.FLOAT) {
String literal = String.valueOf(literalValue);
String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal);
boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal);
if (expType.tag == TypeTags.DECIMAL) {
if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
symTable.floatType);
resultType = symTable.semanticError;
return resultType;
}
literalType = symTable.decimalType;
literalExpr.value = numericLiteral;
} else if (expType.tag == TypeTags.FLOAT) {
literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral));
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (!isDiscriminatedFloat
&& literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
} else if (literalType.tag == TypeTags.DECIMAL) {
return decimalLiteral(literalValue, literalExpr, expType);
} else if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) {
if (expType.tag == TypeTags.CHAR_STRING) {
return symTable.charStringType;
}
if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
for (BType memType : memberTypes) {
if (TypeTags.isStringTypeTag(memType.tag)) {
return setLiteralValueAndGetType(literalExpr, memType);
} else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA ||
memType.tag == TypeTags.ANY) {
return setLiteralValueAndGetType(literalExpr, symTable.charStringType);
} else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType,
literalExpr)) {
setLiteralValueForFiniteType(literalExpr, symTable.charStringType);
return literalType;
}
}
}
boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else {
if (this.expType.tag == TypeTags.FINITE) {
boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else if (this.expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.expType;
boolean foundMember = unionType.getMemberTypes()
.stream()
.anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr));
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
}
}
if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) {
literalType = new BArrayType(symTable.byteType);
}
return literalType;
}
private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) {
Set<BType> memberTypes = expType.getMemberTypes();
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == desiredType.tag
|| memType.tag == TypeTags.JSON
|| memType.tag == TypeTags.ANYDATA
|| memType.tag == TypeTags.ANY)) {
return setLiteralValueAndGetType(literalExpr, desiredType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
return symTable.noType;
}
private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType,
int targetMemberTypeTag) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
if (valueExpr.getBType().tag == targetMemberTypeTag &&
types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) {
return true;
}
}
return false;
}
private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) {
String literal = String.valueOf(literalValue);
if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
symTable.decimalType);
resultType = symTable.semanticError;
return resultType;
}
if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal);
resultType = symTable.decimalType;
return symTable.decimalType;
}
private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) {
types.setImplicitCastExpr(literalExpr, type, this.expType);
this.resultType = type;
literalExpr.isFiniteContext = true;
}
private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) {
List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(memFiniteType -> (BFiniteType) memFiniteType)
.collect(Collectors.toList());
if (finiteTypeMembers.isEmpty()) {
return symTable.semanticError;
}
int tag = matchType.tag;
Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>();
for (BFiniteType finiteType : finiteTypeMembers) {
Set<BLangExpression> set = new HashSet<>();
for (BLangExpression expression : finiteType.getValueSpace()) {
if (expression.getBType().tag == tag) {
set.add(expression);
}
}
matchedValueSpace.addAll(set);
}
if (matchedValueSpace.isEmpty()) {
return symTable.semanticError;
}
return new BFiniteType(null, matchedValueSpace);
}
private BType getIntLiteralType(Location location, BType expType, BType literalType,
Object literalValue) {
switch (expType.tag) {
case TypeTags.INT:
return symTable.intType;
case TypeTags.BYTE:
if (types.isByteLiteralValue((Long) literalValue)) {
return symTable.byteType;
}
break;
case TypeTags.SIGNED32_INT:
if (types.isSigned32LiteralValue((Long) literalValue)) {
return symTable.signed32IntType;
}
break;
case TypeTags.SIGNED16_INT:
if (types.isSigned16LiteralValue((Long) literalValue)) {
return symTable.signed16IntType;
}
break;
case TypeTags.SIGNED8_INT:
if (types.isSigned8LiteralValue((Long) literalValue)) {
return symTable.signed8IntType;
}
break;
case TypeTags.UNSIGNED32_INT:
if (types.isUnsigned32LiteralValue((Long) literalValue)) {
return symTable.unsigned32IntType;
}
break;
case TypeTags.UNSIGNED16_INT:
if (types.isUnsigned16LiteralValue((Long) literalValue)) {
return symTable.unsigned16IntType;
}
break;
case TypeTags.UNSIGNED8_INT:
if (types.isUnsigned8LiteralValue((Long) literalValue)) {
return symTable.unsigned8IntType;
}
break;
default:
}
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, literalType);
resultType = symTable.semanticError;
return resultType;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) {
BType inferredType = getInferredTupleType(listConstructor, expType);
resultType = inferredType == symTable.semanticError ?
symTable.semanticError : types.checkType(listConstructor, inferredType, expType);
return;
}
resultType = checkListConstructorCompatibility(expType, listConstructor);
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) {
List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
}
if (tableConstructorExpr.recordLiteralList.size() == 0) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE);
resultType = symTable.semanticError;
return;
}
BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr);
BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null);
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
recordLiteral.setBType(inherentMemberType);
}
if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) {
resultType = symTable.semanticError;
return;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
resultType = tableType;
return;
}
BType applicableExpType = expType.tag == TypeTags.INTERSECTION ?
((BIntersectionType) expType).effectiveType : expType;
if (applicableExpType.tag == TypeTags.TABLE) {
List<BType> memTypes = new ArrayList<>();
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
BLangRecordLiteral clonedExpr = recordLiteral;
if (this.nonErrorLoggingCheck) {
clonedExpr.cloneAttempt++;
clonedExpr = nodeCloner.cloneNode(recordLiteral);
}
BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint);
if (recordType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
memTypes.add(recordType);
}
BTableType expectedTableType = (BTableType) applicableExpType;
if (expectedTableType.constraint.tag == TypeTags.MAP && expectedTableType.isTypeInlineDefined) {
validateMapConstraintTable(applicableExpType);
return;
}
if (!(validateKeySpecifierInTableConstructor((BTableType) applicableExpType,
tableConstructorExpr.recordLiteralList) &&
validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) {
resultType = symTable.semanticError;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType),
null);
if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) {
tableType.flags |= Flags.READONLY;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) {
tableType.fieldNameList = expectedTableType.fieldNameList;
}
resultType = tableType;
} else if (applicableExpType.tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> matchingTypes = new ArrayList<>();
BUnionType expectedType = (BUnionType) applicableExpType;
for (BType memType : expectedType.getMemberTypes()) {
dlog.resetErrorCount();
BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr);
}
BType resultType = checkExpr(clonedTableExpr, env, memType);
if (resultType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(matchingTypes, resultType)) {
matchingTypes.add(resultType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (matchingTypes.isEmpty()) {
BLangTableConstructorExpr exprToLog = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(tableConstructorExpr);
}
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getInferredTableType(exprToLog));
} else if (matchingTypes.size() != 1) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
} else {
resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0));
return;
}
resultType = symTable.semanticError;
} else {
resultType = symTable.semanticError;
}
}
private BType getInferredTableType(BLangTableConstructorExpr exprToLog) {
List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null);
}
private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) {
if (tableConstructorExpr.tableKeySpecifier != null) {
if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr.
tableKeySpecifier), tableConstructorExpr.recordLiteralList))) {
resultType = symTable.semanticError;
return true;
}
tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier);
}
return false;
}
private BType inferTableMemberType(List<BType> memTypes, BType expType) {
if (memTypes.isEmpty()) {
return ((BTableType) expType).constraint;
}
LinkedHashSet<BType> result = new LinkedHashSet<>();
result.add(memTypes.get(0));
BUnionType unionType = BUnionType.create(null, result);
for (int i = 1; i < memTypes.size(); i++) {
BType source = memTypes.get(i);
if (!types.isAssignable(source, unionType)) {
result.add(source);
unionType = BUnionType.create(null, result);
}
}
if (unionType.getMemberTypes().size() == 1) {
return memTypes.get(0);
}
return unionType;
}
private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) {
BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier;
List<String> keySpecifierFieldNames = new ArrayList<>();
Set<BField> allFieldSet = new LinkedHashSet<>();
for (BType memType : memTypes) {
allFieldSet.addAll(((BRecordType) memType).fields.values());
}
Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet);
for (BType memType : memTypes) {
commonFieldSet.retainAll(((BRecordType) memType).fields.values());
}
List<String> requiredFieldNames = new ArrayList<>();
if (keySpecifier != null) {
for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) {
requiredFieldNames.add(((BLangIdentifier) identifierNode).value);
keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value);
}
}
List<String> fieldNames = new ArrayList<>();
for (BField field : allFieldSet) {
String fieldName = field.name.value;
if (fieldNames.contains(fieldName)) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY,
fieldName);
return symTable.semanticError;
}
fieldNames.add(fieldName);
boolean isOptional = true;
for (BField commonField : commonFieldSet) {
if (commonField.name.value.equals(fieldName)) {
isOptional = false;
requiredFieldNames.add(commonField.name.value);
}
}
if (isOptional) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL));
} else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY));
} else if (requiredFieldNames.contains(fieldName)) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED));
}
}
return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos);
}
private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, Location pos) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL);
for (BField field : allFieldSet) {
recordSymbol.scope.define(field.name, field.symbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = allFieldSet.stream().collect(getFieldCollector());
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
return recordType;
}
private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() {
BinaryOperator<BField> mergeFunc = (u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
};
return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new);
}
private boolean validateTableType(BTableType tableType) {
BType constraint = tableType.constraint;
if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) {
dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint);
resultType = symTable.semanticError;
return false;
}
return true;
}
private boolean validateKeySpecifierInTableConstructor(BTableType tableType,
List<BLangRecordLiteral> recordLiterals) {
List<String> fieldNameList = tableType.fieldNameList;
if (fieldNameList != null) {
return validateTableKeyValue(fieldNameList, recordLiterals);
}
return true;
}
private boolean validateTableKeyValue(List<String> keySpecifierFieldNames,
List<BLangRecordLiteral> recordLiterals) {
for (String fieldName : keySpecifierFieldNames) {
for (BLangRecordLiteral recordLiteral : recordLiterals) {
BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName);
if (recordKeyValueField != null && isConstExpression(recordKeyValueField.getValue())) {
continue;
}
dlog.error(recordLiteral.pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean isConstExpression(BLangExpression expression) {
switch(expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case STRING_TEMPLATE_LITERAL:
case XML_ELEMENT_LITERAL:
case XML_TEXT_LITERAL:
case LIST_CONSTRUCTOR_EXPR:
case TABLE_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
case TYPE_CONVERSION_EXPR:
case UNARY_EXPR:
case BINARY_EXPR:
case TYPE_TEST_EXPR:
case TERNARY_EXPR:
return true;
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
case GROUP_EXPR:
return isConstExpression(((BLangGroupExpr) expression).expression);
default:
return false;
}
}
private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral,
String fieldName) {
for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) {
BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField;
if (fieldName.equals(recordKeyValueField.key.toString())) {
return recordKeyValueField;
}
}
return null;
}
public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint,
Location pos) {
for (String fieldName : fieldNameList) {
BField field = types.getTableConstraintField(constraint, fieldName);
if (field == null) {
dlog.error(pos,
DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!types.isAssignable(field.type, symTable.anydataType)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr,
BTableType tableType) {
BType constraintType = tableType.constraint;
List<String> fieldNameList = new ArrayList<>();
boolean isKeySpecifierEmpty = tableConstructorExpr.tableKeySpecifier == null;
if (!isKeySpecifierEmpty) {
fieldNameList.addAll(getTableKeyNameList(tableConstructorExpr.tableKeySpecifier));
if (tableType.fieldNameList == null &&
!validateKeySpecifier(fieldNameList,
constraintType.tag != TypeTags.INTERSECTION ? constraintType :
((BIntersectionType) constraintType).effectiveType,
tableConstructorExpr.tableKeySpecifier.pos)) {
return false;
}
if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH,
tableType.fieldNameList.toString(), fieldNameList.toString());
resultType = symTable.semanticError;
return false;
}
}
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (keyTypeConstraint != null) {
List<BType> memberTypes = new ArrayList<>();
switch (keyTypeConstraint.tag) {
case TypeTags.TUPLE:
for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) {
memberTypes.add((BType) type);
}
break;
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) keyTypeConstraint).getFields();
memberTypes = fieldList.entrySet().stream()
.filter(e -> fieldNameList.contains(e.getKey())).map(entry -> entry.getValue().type)
.collect(Collectors.toList());
if (memberTypes.isEmpty()) {
memberTypes.add(keyTypeConstraint);
}
break;
default:
memberTypes.add(keyTypeConstraint);
}
if (isKeySpecifierEmpty && keyTypeConstraint.tag == TypeTags.NEVER) {
return true;
}
if (isKeySpecifierEmpty ||
tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) {
if (isKeySpecifierEmpty) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_EMPTY_FOR_PROVIDED_KEY_CONSTRAINT, memberTypes);
} else {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT,
memberTypes, tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList);
}
resultType = symTable.semanticError;
return false;
}
List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier.
fieldNameIdentifierList;
int index = 0;
for (IdentifierNode identifier : fieldNameIdentifierList) {
BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value);
if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos,
DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT,
fieldNameIdentifierList.toString(), memberTypes.toString());
resultType = symTable.semanticError;
return false;
}
index++;
}
}
return true;
}
public void validateMapConstraintTable(BType expType) {
if (expType != null && (((BTableType) expType).fieldNameList != null ||
((BTableType) expType).keyTypeConstraint != null) &&
!expType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) {
dlog.error(((BTableType) expType).keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) {
List<String> fieldNamesList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNamesList.add(((BLangIdentifier) identifier).value);
}
return fieldNamesList;
}
private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) {
if (fieldNames == null) {
return symTable.semanticError;
}
List<BType> memTypes = new ArrayList<>();
for (String fieldName : fieldNames) {
BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName);
if (tableConstraintField == null) {
return symTable.semanticError;
}
BType fieldType = tableConstraintField.type;
memTypes.add(fieldType);
}
if (memTypes.size() == 1) {
return memTypes.get(0);
}
return new BTupleType(memTypes);
}
private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.nonErrorLoggingCheck = true;
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType);
if (!erroredExpType && inferredTupleType != symTable.semanticError) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType);
}
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
return symTable.semanticError;
}
return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor);
}
BType possibleType = getListConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.ARRAY:
return checkArrayType(listConstructor, (BArrayType) possibleType);
case TypeTags.TUPLE:
return checkTupleType(listConstructor, (BTupleType) possibleType);
case TypeTags.READONLY:
return checkReadOnlyListType(listConstructor);
case TypeTags.TYPEDESC:
List<BType> results = new ArrayList<>();
listConstructor.isTypedescExpr = true;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType));
}
List<BType> actualTypes = new ArrayList<>();
for (int i = 0; i < listConstructor.exprs.size(); i++) {
final BLangExpression expr = listConstructor.exprs.get(i);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(results.get(i));
}
}
if (actualTypes.size() == 1) {
listConstructor.typedescType = actualTypes.get(0);
} else {
listConstructor.typedescType = new BTupleType(actualTypes);
}
return new BTypedescType(listConstructor.typedescType, null);
}
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
if (bType == symTable.semanticError) {
getInferredTupleType(exprToLog, symTable.semanticError);
} else {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType,
getInferredTupleType(exprToLog, symTable.noType));
}
return symTable.semanticError;
}
private BType getListConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.READONLY:
case TypeTags.TYPEDESC:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
}
return symTable.semanticError;
}
private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) {
BType eType = arrayType.eType;
if (arrayType.state == BArrayState.INFERRED) {
arrayType.size = listConstructor.exprs.size();
arrayType.state = BArrayState.CLOSED;
} else if ((arrayType.state != BArrayState.OPEN) && (arrayType.size != listConstructor.exprs.size())) {
if (arrayType.size < listConstructor.exprs.size()) {
dlog.error(listConstructor.pos,
DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size,
listConstructor.exprs.size());
return symTable.semanticError;
}
if (!types.hasFillerValue(eType)) {
dlog.error(listConstructor.pos,
DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType);
return symTable.semanticError;
}
}
boolean errored = false;
for (BLangExpression expr : listConstructor.exprs) {
if (exprIncompatible(eType, expr) && !errored) {
errored = true;
}
}
return errored ? symTable.semanticError : arrayType;
}
private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) {
List<BLangExpression> exprs = listConstructor.exprs;
List<BType> memberTypes = tupleType.tupleTypes;
BType restType = tupleType.restType;
int listExprSize = exprs.size();
int memberTypeSize = memberTypes.size();
if (listExprSize < memberTypeSize) {
for (int i = listExprSize; i < memberTypeSize; i++) {
if (!types.hasFillerValue(memberTypes.get(i))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return symTable.semanticError;
}
}
} else if (listExprSize > memberTypeSize && restType == null) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return symTable.semanticError;
}
boolean errored = false;
int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize;
for (int i = 0; i < nonRestCountToCheck; i++) {
if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) {
errored = true;
}
}
for (int i = nonRestCountToCheck; i < exprs.size(); i++) {
if (exprIncompatible(restType, exprs.get(i)) && !errored) {
errored = true;
}
}
return errored ? symTable.semanticError : tupleType;
}
private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return types.checkType(listConstructor, inferredType, symTable.readonlyType);
}
for (BLangExpression expr : listConstructor.exprs) {
if (exprIncompatible(symTable.readonlyType, expr)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private boolean exprIncompatible(BType eType, BLangExpression expr) {
if (expr.typeChecked) {
return expr.getBType() == symTable.semanticError;
}
BLangExpression exprToCheck = expr;
if (this.nonErrorLoggingCheck) {
expr.cloneAttempt++;
exprToCheck = nodeCloner.cloneNode(expr);
}
return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError;
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) {
return checkExprList(exprs, env, symTable.noType);
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> types = new ArrayList<>();
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
this.env = env;
this.expType = expType;
for (BLangExpression e : exprs) {
checkExpr(e, this.env, expType);
types.add(resultType);
}
this.env = prevEnv;
this.expType = preExpType;
return types;
}
private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) {
List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
BTupleType tupleType = new BTupleType(memTypes);
if (expType.tag != TypeTags.READONLY) {
return tupleType;
}
tupleType.flags |= Flags.READONLY;
return tupleType;
}
public void visit(BLangRecordLiteral recordLiteral) {
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) {
expType = defineInferredRecordType(recordLiteral, expType);
} else if (expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
resultType = getEffectiveMappingType(recordLiteral,
checkMappingConstructorCompatibility(expType, recordLiteral));
}
private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) {
if (applicableMappingType == symTable.semanticError ||
(applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags,
Flags.READONLY))) {
return applicableMappingType;
}
Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>();
LinkedHashMap<String, BField> applicableTypeFields =
applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields :
new LinkedHashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
continue;
}
String name;
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field;
if (!keyValueField.readonly) {
continue;
}
BLangExpression keyExpr = keyValueField.key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
name = ((BLangSimpleVarRef) keyExpr).variableName.value;
} else {
name = (String) ((BLangLiteral) keyExpr).value;
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
if (!varNameField.readonly) {
continue;
}
name = varNameField.variableName.value;
}
if (applicableTypeFields.containsKey(name) &&
Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) {
continue;
}
readOnlyFields.put(name, field);
}
if (readOnlyFields.isEmpty()) {
return applicableMappingType;
}
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
LinkedHashMap<String, BField> newFields = new LinkedHashMap<>();
for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) {
RecordLiteralNode.RecordField field = readOnlyEntry.getValue();
String key = readOnlyEntry.getKey();
Name fieldName = names.fromString(key);
BType readOnlyFieldType;
if (field.isKeyValueField()) {
readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType();
} else {
readOnlyFieldType = ((BLangRecordVarNameField) field).getBType();
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(Flag.REQUIRED);
add(Flag.READONLY);
}}), fieldName, pkgID, readOnlyFieldType, recordSymbol,
((BLangNode) field).pos, VIRTUAL);
newFields.put(key, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags);
if (applicableMappingType.tag == TypeTags.MAP) {
recordType.sealed = false;
recordType.restFieldType = ((BMapType) applicableMappingType).constraint;
} else {
BRecordType applicableRecordType = (BRecordType) applicableMappingType;
boolean allReadOnlyFields = true;
for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) {
String fieldName = origEntry.getKey();
BField field = origEntry.getValue();
if (readOnlyFields.containsKey(fieldName)) {
continue;
}
BVarSymbol origFieldSymbol = field.symbol;
long origFieldFlags = origFieldSymbol.flags;
if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) {
allReadOnlyFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID,
origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL);
newFields.put(fieldName, new BField(field.name, null, fieldSymbol));
recordSymbol.scope.define(field.name, fieldSymbol);
}
recordType.sealed = applicableRecordType.sealed;
recordType.restFieldType = applicableRecordType.restFieldType;
if (recordType.sealed && allReadOnlyFields) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
recordType.fields = newFields;
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
if (applicableMappingType.tag == TypeTags.MAP) {
recordLiteral.expectedType = applicableMappingType;
}
return recordType;
}
private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType,
mappingConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
if (!erroredExpType) {
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
}
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor);
}
BType possibleType = getMappingConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.MAP:
return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType :
symTable.semanticError;
case TypeTags.RECORD:
boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType);
boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType,
mappingConstructor.fields,
mappingConstructor.pos);
return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError;
case TypeTags.READONLY:
return checkReadOnlyMappingType(mappingConstructor);
}
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(inferredType, mappingConstructor);
}
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BLangExpression exprToCheck;
if (field.isKeyValueField()) {
exprToCheck = ((BLangRecordKeyValueField) field).valueExpr;
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
} else {
exprToCheck = (BLangRecordVarNameField) field;
}
if (exprIncompatible(symTable.readonlyType, exprToCheck)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private BType getMappingConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.MAP:
case TypeTags.RECORD:
case TypeTags.READONLY:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env,
symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
}
return symTable.semanticError;
}
private boolean isMappingConstructorCompatibleType(BType type) {
return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP;
}
private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) {
if (expType == symTable.semanticError) {
return;
}
if (expType.tag != TypeTags.UNION) {
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
return;
}
BUnionType unionType = (BUnionType) expType;
BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
if (memberTypes.length == 2) {
BRecordType recType = null;
if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[0];
} else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[1];
}
if (recType != null) {
validateSpecifiedFields(mappingConstructorExpr, recType);
validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos);
return;
}
}
for (BType bType : memberTypes) {
if (isMappingConstructorCompatibleType(bType)) {
dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR,
unionType);
return;
}
}
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType);
}
private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) {
boolean isFieldsValid = true;
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BType checkedType = checkMappingField(field, possibleType);
if (isFieldsValid && checkedType == symTable.semanticError) {
isFieldsValid = false;
}
}
return isFieldsValid;
}
private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields,
Location pos) {
HashSet<String> specFieldNames = getFieldNames(specifiedFields);
boolean hasAllRequiredFields = true;
for (BField field : type.fields.values()) {
String fieldName = field.name.value;
if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)
&& !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
if (hasAllRequiredFields) {
hasAllRequiredFields = false;
}
}
}
return hasAllRequiredFields;
}
private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) {
HashSet<String> fieldNames = new HashSet<>();
for (RecordLiteralNode.RecordField specifiedField : specifiedFields) {
if (specifiedField.isKeyValueField()) {
String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField);
if (name == null) {
continue;
}
fieldNames.add(name);
} else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField));
} else {
fieldNames.addAll(getSpreadOpFieldRequiredFieldNames(
(BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField));
}
}
return fieldNames;
}
private String getKeyValueFieldName(BLangRecordKeyValueField field) {
BLangRecordKey key = field.key;
if (key.computedKey) {
return null;
}
BLangExpression keyExpr = key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyExpr).variableName.value;
} else if (keyExpr.getKind() == NodeKind.LITERAL) {
return (String) ((BLangLiteral) keyExpr).value;
}
return null;
}
private String getVarNameFieldName(BLangRecordVarNameField field) {
return field.variableName.value;
}
private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) {
BType spreadType = checkExpr(field.expr, env);
if (spreadType.tag != TypeTags.RECORD) {
return Collections.emptyList();
}
List<String> fieldNames = new ArrayList<>();
for (BField bField : ((BRecordType) spreadType).getFields().values()) {
if (!Symbols.isOptional(bField.symbol)) {
fieldNames.add(bField.name.value);
}
}
return fieldNames;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerIdentifier != null) {
String workerName = workerFlushExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
} else {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName));
if (symbol != symTable.notFoundSymbol) {
workerFlushExpr.workerSymbol = symbol;
}
}
}
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(workerFlushExpr, actualType, expType);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier));
if (symTable.notFoundSymbol.equals(symbol)) {
syncSendExpr.workerType = symTable.semanticError;
} else {
syncSendExpr.workerType = symbol.type;
syncSendExpr.workerSymbol = symbol;
}
syncSendExpr.env = this.env;
checkExpr(syncSendExpr.expr, this.env);
if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND,
syncSendExpr.expr.getBType());
}
String workerName = syncSendExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
}
syncSendExpr.expectedType = expType;
resultType = expType == symTable.noType ? symTable.nilType : expType;
}
@Override
public void visit(BLangWorkerReceive workerReceiveExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier));
workerReceiveExpr.env = this.env;
if (symTable.notFoundSymbol.equals(symbol)) {
workerReceiveExpr.workerType = symTable.semanticError;
} else {
workerReceiveExpr.workerType = symbol.type;
workerReceiveExpr.workerSymbol = symbol;
}
if (symTable.noType == this.expType) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION);
}
workerReceiveExpr.setBType(this.expType);
resultType = this.expType;
}
private boolean workerExists(SymbolEnv env, String workerName) {
if (workerName.equals(DEFAULT_WORKER_NAME)) {
return true;
}
BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName));
return symbol != this.symTable.notFoundSymbol &&
symbol.type.tag == TypeTags.FUTURE &&
((BFutureType) symbol.type).workerDerivative;
}
@Override
public void visit(BLangConstRef constRef) {
constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env,
names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName));
types.setImplicitCastExpr(constRef, constRef.getBType(), expType);
resultType = constRef.getBType();
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.semanticError;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
varRefExpr.setBType(this.symTable.anyType);
varRefExpr.symbol = new BVarSymbol(0, true, varName,
names.originalNameFromIdNode(varRefExpr.variableName),
env.enclPkg.symbol.pkgID, varRefExpr.getBType(), env.scope.owner,
varRefExpr.pos, VIRTUAL);
resultType = varRefExpr.getBType();
return;
}
Name compUnitName = getCurrentCompUnit(varRefExpr);
varRefExpr.pkgSymbol =
symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName);
if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) {
varRefExpr.symbol = symTable.notFoundSymbol;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias);
}
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName);
if (symbol == symTable.notFoundSymbol && env.enclType != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclType.getBType().tsymbol);
}
if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSelfReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
markAndRegisterClosureVariable(symbol, varRefExpr.pos, env);
} else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) {
actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null);
varRefExpr.symbol = symbol;
} else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) symbol;
varRefExpr.symbol = constSymbol;
BType symbolType = symbol.type;
if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE ||
(expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream()
.anyMatch(memType -> memType.tag == TypeTags.FINITE &&
types.isAssignable(symbolType, memType)))) {
actualType = symbolType;
} else {
actualType = constSymbol.literalType;
}
if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) {
actualType = symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE);
}
} else {
varRefExpr.symbol = symbol;
logUndefinedSymbolError(varRefExpr.pos, varName.value);
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName),
env.enclPkg.symbol.pkgID, null, env.scope.owner,
varRefExpr.pos, SOURCE);
symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env);
boolean unresolvedReference = false;
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference;
bLangVarReference.isLValue = true;
checkExpr(recordRefField.variableReference, env);
if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol ||
!isValidVariableReference(recordRefField.variableReference)) {
unresolvedReference = true;
continue;
}
BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol;
BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos,
new BVarSymbol(0, names.fromIdNode(recordRefField.variableName),
names.originalNameFromIdNode(recordRefField.variableName),
env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol,
varRefExpr.pos, SOURCE));
fields.put(field.name.value, field);
}
BLangExpression restParam = (BLangExpression) varRefExpr.restParam;
if (restParam != null) {
checkExpr(restParam, env);
unresolvedReference = !isValidVariableReference(restParam);
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BRecordType bRecordType = new BRecordType(recordSymbol);
bRecordType.fields = fields;
recordSymbol.type = bRecordType;
varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, recordSymbol.getOriginalName(),
env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos,
SOURCE);
if (restParam == null) {
bRecordType.sealed = true;
bRecordType.restFieldType = symTable.noType;
} else if (restParam.getBType() == symTable.semanticError) {
bRecordType.restFieldType = symTable.mapType;
} else {
BType restFieldType;
if (restParam.getBType().tag == TypeTags.RECORD) {
restFieldType = ((BRecordType) restParam.getBType()).restFieldType;
} else if (restParam.getBType().tag == TypeTags.MAP) {
restFieldType = ((BMapType) restParam.getBType()).constraint;
} else {
restFieldType = restParam.getBType();
}
bRecordType.restFieldType = restFieldType;
}
resultType = bRecordType;
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
if (varRefExpr.typeNode != null) {
BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env);
varRefExpr.setBType(bType);
checkIndirectErrorVarRef(varRefExpr);
resultType = bType;
return;
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) {
dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType,
varRefExpr.message.getBType());
}
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) {
dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType,
varRefExpr.cause.getBType());
}
}
boolean unresolvedReference = false;
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
BLangVariableReference refItem = (BLangVariableReference) detailItem.expr;
refItem.isLValue = true;
checkExpr(refItem, env);
if (!isValidVariableReference(refItem)) {
unresolvedReference = true;
continue;
}
if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN,
refItem);
unresolvedReference = true;
continue;
}
if (refItem.symbol == null) {
unresolvedReference = true;
}
}
if (varRefExpr.restVar != null) {
varRefExpr.restVar.isLValue = true;
if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
checkExpr(varRefExpr.restVar, env);
unresolvedReference = unresolvedReference
|| varRefExpr.restVar.symbol == null
|| !isValidVariableReference(varRefExpr.restVar);
}
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BType errorRefRestFieldType;
if (varRefExpr.restVar == null) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
errorRefRestFieldType = varRefExpr.restVar.getBType();
} else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) {
errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint;
} else {
dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varRefExpr.restVar.getBType(), symTable.detailType);
resultType = symTable.semanticError;
return;
}
BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly
? symTable.errorType.detailType
: new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC);
resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType);
}
private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) {
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
checkExpr(detailItem.expr, env);
checkExpr(detailItem, env, detailItem.expr.getBType());
}
if (varRefExpr.restVar != null) {
checkExpr(varRefExpr.restVar, env);
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
}
}
@Override
public void visit(BLangTupleVarRef varRefExpr) {
List<BType> results = new ArrayList<>();
for (int i = 0; i < varRefExpr.expressions.size(); i++) {
((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true;
results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType));
}
BTupleType actualType = new BTupleType(results);
if (varRefExpr.restParam != null) {
BLangExpression restExpr = (BLangExpression) varRefExpr.restParam;
((BLangVariableReference) restExpr).isLValue = true;
BType checkedType = checkExpr(restExpr, env, symTable.noType);
if (!(checkedType.tag == TypeTags.ARRAY || checkedType.tag == TypeTags.TUPLE)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType);
resultType = symTable.semanticError;
return;
}
if (checkedType.tag == TypeTags.ARRAY) {
actualType.restType = ((BArrayType) checkedType).eType;
} else {
actualType.restType = checkedType;
}
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.INFERRED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {
return env.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {
return env.enclEnv;
}
if (env.enclType != null && env.enclType == recordTypeNode) {
return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.getBType().tag == symbol.type.tag));
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
checkFieldBasedAccess(nsPrefixedFieldBasedAccess, true);
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
checkFieldBasedAccess(fieldAccessExpr, false);
}
private void checkFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr, boolean isNsPrefixed) {
markLeafNode(fieldAccessExpr);
BLangExpression containerExpression = fieldAccessExpr.expr;
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
fieldAccessExpr.isCompoundAssignmentLValue;
}
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, env));
if (isNsPrefixed && !isXmlAccess(fieldAccessExpr)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (fieldAccessExpr.optionalFieldAccess) {
if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS);
resultType = symTable.semanticError;
return;
}
actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType,
names.fromIdNode(fieldAccessExpr.field));
} else {
actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field));
if (actualType != symTable.semanticError &&
(fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(varRefType)) {
if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
varRefType);
resultType = symTable.semanticError;
return;
}
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) &&
isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
fieldAccessExpr.field.value, varRefType);
resultType = symTable.semanticError;
return;
}
}
}
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
private boolean isAllReadonlyTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Symbols.isFlagOn(type.flags, Flags.READONLY);
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isAllReadonlyTypes(memberType)) {
return false;
}
}
return true;
}
private boolean isInitializationInInit(BType type) {
BObjectType objectType = (BObjectType) type;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
return env.enclInvokable != null && initializerFunc != null &&
env.enclInvokable.symbol == initializerFunc.symbol;
}
private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) {
if (type.tag == TypeTags.RECORD) {
if (Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (!field.name.value.equals(fieldName)) {
continue;
}
return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY);
}
return recordType.sealed;
}
boolean allInvalidUpdates = true;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) {
allInvalidUpdates = false;
}
}
return allInvalidUpdates;
}
private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangExpression expr = fieldAccessExpr.expr;
BType exprType = expr.getBType();
if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) {
return true;
}
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr)
&& exprType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes();
return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType);
}
return false;
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
markLeafNode(indexBasedAccessExpr);
BLangExpression containerExpression = indexBasedAccessExpr.expr;
if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
((BLangTypedescExpr) containerExpression).typeNode);
resultType = symTable.semanticError;
return;
}
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
indexBasedAccessExpr.isCompoundAssignmentLValue;
}
boolean isStringValue = containerExpression.getBType() != null
&& containerExpression.getBType().tag == TypeTags.STRING;
if (!isStringValue) {
checkExpr(containerExpression, this.env, symTable.noType);
}
if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY &&
containerExpression.getBType().tag != TypeTags.TABLE) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED,
containerExpression.getBType());
resultType = symTable.semanticError;
return;
}
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr);
BType exprType = containerExpression.getBType();
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
if (actualType != symTable.semanticError &&
(indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(exprType)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
exprType);
resultType = symTable.semanticError;
return;
} else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) &&
(indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) &&
isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
getConstFieldName(indexExpr), exprType);
resultType = symTable.semanticError;
return;
}
}
if (indexBasedAccessExpr.isLValue) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.setBType(actualType);
resultType = actualType;
return;
}
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
if (invalidModuleAliasUsage(iExpr)) {
return;
}
checkExpr(iExpr.expr, this.env, symTable.noType);
BType varRefType = iExpr.expr.getBType();
switch (varRefType.tag) {
case TypeTags.OBJECT:
checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(iExpr, this.env);
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.SEMANTIC_ERROR:
break;
default:
checkInLangLib(iExpr, varRefType);
}
}
private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.isEmpty()) {
return;
}
checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType);
int positionalArgCount = errorConstructorExpr.positionalArgs.size();
if (positionalArgCount > 1) {
checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType);
}
}
private BType checkExprSilent(BLangExpression expr, BType expType, SymbolEnv env) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
BType type = checkExpr(expr, env, expType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return type;
}
private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) {
BLangRecordKeyValueField field =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
field.valueExpr = (BLangExpression) namedArg.getExpression();
BLangLiteral expr = new BLangLiteral();
expr.value = namedArg.getName().value;
expr.setBType(symTable.stringType);
field.key = new BLangRecordKey(expr);
recordLiteral.fields.add(field);
}
return recordLiteral;
}
private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef;
if (errorTypeRef == null) {
if (expType.tag == TypeTags.ERROR) {
return List.of(expType);
} else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) {
return expandExpectedErrorTypes(expType);
}
} else {
if (errorTypeRef.getBType().tag != TypeTags.ERROR) {
if (errorTypeRef.getBType().tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef);
errorConstructorExpr.errorTypeRef.setBType(symTable.semanticError);
}
} else {
return List.of(errorTypeRef.getBType());
}
}
return List.of(symTable.errorType);
}
private List<BType> expandExpectedErrorTypes(BType candidateType) {
List<BType> expandedCandidates = new ArrayList<>();
if (candidateType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) candidateType).getMemberTypes()) {
if (types.isAssignable(memberType, symTable.errorType)) {
if (memberType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) memberType).effectiveType);
} else {
expandedCandidates.add(memberType);
}
}
}
} else if (types.isAssignable(candidateType, symTable.errorType)) {
if (candidateType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) candidateType).effectiveType);
} else {
expandedCandidates.add(candidateType);
}
}
return expandedCandidates;
}
public void visit(BLangInvocation.BLangActionInvocation aInv) {
if (aInv.expr == null) {
checkFunctionInvocationExpr(aInv);
return;
}
if (invalidModuleAliasUsage(aInv)) {
return;
}
checkExpr(aInv.expr, this.env, symTable.noType);
BLangExpression varRef = aInv.expr;
switch (varRef.getBType().tag) {
case TypeTags.OBJECT:
checkActionInvocation(aInv, (BObjectType) varRef.getBType());
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(aInv, this.env);
break;
case TypeTags.NONE:
dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name);
resultType = symTable.semanticError;
break;
case TypeTags.SEMANTIC_ERROR:
default:
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType());
resultType = symTable.semanticError;
break;
}
}
private boolean invalidModuleAliasUsage(BLangInvocation invocation) {
Name pkgAlias = names.fromIdNode(invocation.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return true;
}
return false;
}
public void visit(BLangLetExpression letExpression) {
BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())),
new Name(String.format("$let_symbol_%d$", letCount++)),
env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner,
letExpression.pos);
letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env);
}
BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType);
types.checkType(letExpression, exprType, this.expType);
}
private void checkInLangLib(BLangInvocation iExpr, BType varRefType) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value,
iExpr.expr.getBType());
resultType = symTable.semanticError;
return;
}
if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) {
return;
}
checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType);
}
private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType,
BSymbol langLibMethodSymbol) {
if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) {
return false;
}
String packageId = langLibMethodSymbol.pkgID.name.value;
if (!modifierFunctions.containsKey(packageId)) {
return false;
}
String funcName = langLibMethodSymbol.name.value;
if (!modifierFunctions.get(packageId).contains(funcName)) {
return false;
}
if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) {
return false;
}
if (funcName.equals("strip") && TypeTags.isXMLTypeTag(varRefType.tag)) {
return false;
}
dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType);
resultType = symTable.semanticError;
return true;
}
private boolean isFixedLengthList(BType type) {
switch(type.tag) {
case TypeTags.ARRAY:
return (((BArrayType) type).state != BArrayState.OPEN);
case TypeTags.TUPLE:
return (((BTupleType) type).restType == null);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
for (BType member : unionType.getMemberTypes()) {
if (!isFixedLengthList(member)) {
return false;
}
}
return true;
default:
return false;
}
}
private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) {
String invocationName = iExpr.name.getValue();
if (!listLengthModifierFunctions.contains(invocationName)) {
return;
}
if (isFixedLengthList(varRefType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
if (isShiftOnIncompatibleTuples(varRefType, invocationName)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
}
private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) {
if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) &&
hasDifferentTypeThanRest((BTupleType) varRefType)) {
return true;
}
if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) {
BUnionType unionVarRef = (BUnionType) varRefType;
boolean allMemberAreFixedShapeTuples = true;
for (BType member : unionVarRef.getMemberTypes()) {
if (member.tag != TypeTags.TUPLE) {
allMemberAreFixedShapeTuples = false;
break;
}
if (!hasDifferentTypeThanRest((BTupleType) member)) {
allMemberAreFixedShapeTuples = false;
break;
}
}
return allMemberAreFixedShapeTuples;
}
return false;
}
private boolean hasDifferentTypeThanRest(BTupleType tupleType) {
if (tupleType.restType == null) {
return false;
}
for (BType member : tupleType.getTupleTypes()) {
if (!types.isSameType(tupleType.restType, member)) {
return true;
}
}
return false;
}
private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) {
BType type = checkExpr(iExpr.expr, env);
BLangIdentifier invocationIdentifier = iExpr.name;
if (type == symTable.semanticError) {
return false;
}
BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier),
type.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD,
invocationIdentifier, type);
return false;
}
if (fieldSymbol.kind != SymbolKind.FUNCTION) {
checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD,
fieldSymbol.type);
return false;
}
iExpr.symbol = fieldSymbol;
iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType);
checkInvocationParamAndReturnType(iExpr);
iExpr.functionPointerInvocation = true;
return true;
}
private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos,
DiagnosticErrorCode errCode, Object... diagMsgArgs) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(pos, errCode, diagMsgArgs);
resultType = symTable.semanticError;
} else {
checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol);
}
}
@Override
public void visit(BLangObjectConstructorExpression objectCtorExpression) {
if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) {
BObjectType objectType = (BObjectType) objectCtorExpression.classNode.getBType();
if (objectCtorExpression.expectedType.tag == TypeTags.OBJECT) {
BObjectType expObjType = (BObjectType) objectCtorExpression.expectedType;
objectType.typeIdSet = expObjType.typeIdSet;
} else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) {
if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) {
dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR,
objectCtorExpression.expectedType);
resultType = symTable.semanticError;
return;
}
}
}
visit(objectCtorExpression.typeInit);
}
private boolean isDefiniteObjectType(BType type, Set<BTypeIdSet> typeIdSets) {
if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) {
return false;
}
Set<BType> visitedTypes = new HashSet<>();
if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) {
return false;
}
return typeIdSets.size() <= 1;
}
private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) {
if (type.tag == TypeTags.OBJECT) {
var objectType = (BObjectType) type;
typeIdSets.add(objectType.typeIdSet);
return true;
}
if (type.tag == TypeTags.UNION) {
if (!visitedTypes.add(type)) {
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) {
return false;
}
}
return true;
}
return false;
}
private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) {
Set<BTypeIdSet> typeIdSets = new HashSet<>();
if (!isDefiniteObjectType(type, typeIdSets)) {
return false;
}
if (typeIdSets.isEmpty()) {
objectType.typeIdSet = BTypeIdSet.emptySet();
return true;
}
var typeIdIterator = typeIdSets.iterator();
if (typeIdIterator.hasNext()) {
BTypeIdSet typeIdSet = typeIdIterator.next();
objectType.typeIdSet = typeIdSet;
return true;
}
return true;
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
if (actualType.tag == TypeTags.INTERSECTION) {
actualType = ((BIntersectionType) actualType).effectiveType;
}
switch (actualType.tag) {
case TypeTags.OBJECT:
BObjectType actualObjectType = (BObjectType) actualType;
if (isObjectConstructorExpr(cIExpr, actualObjectType)) {
BLangClassDefinition classDefForConstructor = getClassDefinitionForObjectConstructorExpr(cIExpr,
env);
List<BLangType> typeRefs = classDefForConstructor.typeRefs;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);
if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) {
handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv,
false);
} else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags,
Flags.READONLY)) {
handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv,
true);
} else {
analyzeObjectConstructor(classDefForConstructor, pkgEnv);
}
markConstructedObjectIsolatedness(actualObjectType);
}
if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
actualType.tsymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) {
return;
}
}
break;
case TypeTags.STREAM:
if (cIExpr.initInvocation.argExprs.size() > 1) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation);
resultType = symTable.semanticError;
return;
}
BStreamType actualStreamType = (BStreamType) actualType;
if (actualStreamType.completionType != null) {
BType completionType = actualStreamType.completionType;
if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString());
resultType = symTable.semanticError;
return;
}
}
if (!cIExpr.initInvocation.argExprs.isEmpty()) {
BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0);
BType constructType = checkExpr(iteratorExpr, env, symTable.noType);
BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType);
if (constructType.tag != TypeTags.OBJECT) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType,
BLangCompilerConstants.CLOSE_FUNC);
if (closeFunc != null) {
BType closeableIteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, closeableIteratorType)) {
dlog.error(iteratorExpr.pos,
DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
} else {
BType iteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, iteratorType)) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
}
BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType);
if (nextReturnType != null) {
types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
} else {
dlog.error(constructType.tsymbol.getPosition(),
DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType);
}
}
if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType,
actualType);
resultType = symTable.semanticError;
return;
}
resultType = actualType;
return;
case TypeTags.UNION:
List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType);
BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType);
cIExpr.initInvocation.setBType(symTable.nilType);
if (matchedType.tag == TypeTags.OBJECT) {
if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
actualType = matchedType;
break;
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) {
return;
}
}
}
types.checkType(cIExpr, matchedType, expType);
cIExpr.setBType(matchedType);
resultType = matchedType;
return;
default:
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.semanticError;
return;
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType());
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private BUnionType createNextReturnType(Location pos, BStreamType streamType) {
BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS);
recordType.restFieldType = symTable.noType;
recordType.sealed = true;
Name fieldName = Names.VALUE;
BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC,
fieldName, env.enclPkg.packageID,
streamType.constraint, env.scope.owner, pos, VIRTUAL));
field.type = streamType.constraint;
recordType.fields.put(field.name.value, field);
recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID,
recordType, env.scope.owner, pos, VIRTUAL);
recordType.tsymbol.scope = new Scope(env.scope.owner);
recordType.tsymbol.scope.define(fieldName, field.symbol);
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(recordType);
retTypeMembers.addAll(types.getAllTypes(streamType.completionType));
BUnionType unionType = BUnionType.create(null);
unionType.addAll(retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY,
env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL);
return unionType;
}
private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) {
if (!cIExpr.initInvocation.argExprs.isEmpty()
&& ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL,
cIExpr.initInvocation.name.value);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return false;
}
return true;
}
private BType getObjectConstructorReturnType(BType objType, BType initRetType) {
if (initRetType.tag == TypeTags.UNION) {
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(objType);
retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes());
retTypeMembers.remove(symTable.nilType);
BUnionType unionType = BUnionType.create(null, retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0,
Names.EMPTY, env.enclPkg.symbol.pkgID, unionType,
env.scope.owner, symTable.builtinPos, VIRTUAL);
return unionType;
} else if (initRetType.tag == TypeTags.NIL) {
return objType;
}
return symTable.semanticError;
}
private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) {
int objectCount = 0;
for (BType memberType : lhsUnionType.getMemberTypes()) {
int tag = memberType.tag;
if (tag == TypeTags.OBJECT) {
objectCount++;
continue;
}
if (tag != TypeTags.INTERSECTION) {
continue;
}
if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) {
objectCount++;
}
}
boolean containsSingleObject = objectCount == 1;
List<BType> matchingLhsMemberTypes = new ArrayList<>();
for (BType memberType : lhsUnionType.getMemberTypes()) {
if (memberType.tag != TypeTags.OBJECT) {
continue;
}
if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
lhsUnionType.tsymbol);
}
if (containsSingleObject) {
return Collections.singletonList(memberType);
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc;
if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) {
matchingLhsMemberTypes.add(memberType);
}
}
return matchingLhsMemberTypes;
}
private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) {
if (matchingLhsMembers.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
} else if (matchingLhsMembers.size() == 1) {
return matchingLhsMembers.get(0).tsymbol.type;
} else {
dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
}
}
private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) {
invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType));
if (function == null) {
return invocationArguments.isEmpty();
}
if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) {
return true;
}
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
List<BLangExpression> positionalArgs = new ArrayList<>();
for (BLangExpression argument : invocationArguments) {
if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) {
namedArgs.add((BLangNamedArgsExpression) argument);
} else {
positionalArgs.add(argument);
}
}
List<BVarSymbol> requiredParams = function.symbol.params.stream()
.filter(param -> !param.isDefaultable)
.collect(Collectors.toList());
if (requiredParams.size() > invocationArguments.size()) {
return false;
}
List<BVarSymbol> defaultableParams = function.symbol.params.stream()
.filter(param -> param.isDefaultable)
.collect(Collectors.toList());
int givenRequiredParamCount = 0;
for (int i = 0; i < positionalArgs.size(); i++) {
if (function.symbol.params.size() > i) {
givenRequiredParamCount++;
BVarSymbol functionParam = function.symbol.params.get(i);
if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
continue;
}
if (function.symbol.restParam != null) {
BType restParamType = ((BArrayType) function.symbol.restParam.type).eType;
if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) {
return false;
}
continue;
}
return false;
}
for (BLangNamedArgsExpression namedArg : namedArgs) {
boolean foundNamedArg = false;
List<BVarSymbol> params = function.symbol.params;
for (int i = givenRequiredParamCount; i < params.size(); i++) {
BVarSymbol functionParam = params.get(i);
if (!namedArg.name.value.equals(functionParam.name.value)) {
continue;
}
foundNamedArg = true;
BType namedArgExprType = checkExpr(namedArg.expr, env);
if (!types.isAssignable(functionParam.type, namedArgExprType)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
}
if (!foundNamedArg) {
return false;
}
}
return requiredParams.size() <= 0;
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
switch (expType.tag) {
case TypeTags.RECORD:
checkTypesForRecords(waitForAllExpr);
break;
case TypeTags.MAP:
checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint);
LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypesForMap.size() == 1) {
resultType = new BMapType(TypeTags.MAP,
memberTypesForMap.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap);
resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol);
break;
case TypeTags.NONE:
case TypeTags.ANY:
checkTypesForMap(waitForAllExpr, expType);
LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypes.size() == 1) {
resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintType = BUnionType.create(null, memberTypes);
resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol);
break;
default:
dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos));
resultType = symTable.semanticError;
break;
}
waitForAllExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType);
}
}
private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr,
Location pos) {
BRecordType retType = new BRecordType(null, Flags.ANONYMOUS);
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BLangIdentifier fieldName;
if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
fieldName = keyVal.key;
} else {
fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName;
}
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName));
BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type;
BField field = new BField(names.fromIdNode(keyVal.key), null,
new BVarSymbol(0, names.fromIdNode(keyVal.key),
names.originalNameFromIdNode(keyVal.key), env.enclPkg.packageID,
fieldType, null, keyVal.pos, VIRTUAL));
retType.fields.put(field.name.value, field);
}
retType.restFieldType = symTable.noType;
retType.sealed = true;
retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null,
pos, VIRTUAL);
return retType;
}
private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType();
if (bType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) bType).constraint);
} else {
memberTypes.add(bType);
}
}
return memberTypes;
}
private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs;
keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType));
}
private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs();
Map<String, BField> lhsFields = ((BRecordType) expType).fields;
if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitExpr, waitExpr.pos));
resultType = symTable.semanticError;
return;
}
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) {
String key = keyVal.key.value;
if (!lhsFields.containsKey(key)) {
if (((BRecordType) expType).sealed) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType);
resultType = symTable.semanticError;
} else {
BType restFieldType = ((BRecordType) expType).restFieldType;
checkWaitKeyValExpr(keyVal, restFieldType);
}
} else {
checkWaitKeyValExpr(keyVal, lhsFields.get(key).type);
}
}
checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos);
if (symTable.semanticError != resultType) {
resultType = expType;
}
}
private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs,
Location pos) {
type.fields.values().forEach(field -> {
boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) {
BLangExpression expr;
if (keyVal.keyExpr != null) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode
(((BLangSimpleVarRef) keyVal.keyExpr).variableName));
keyVal.keyExpr.setBType(symbol.type);
expr = keyVal.keyExpr;
} else {
expr = keyVal.valueExpr;
}
BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null);
checkExpr(expr, env, futureType);
setEventualTypeForExpression(expr, type);
}
private void setEventualTypeForExpression(BLangExpression expression,
BType currentExpectedType) {
if (expression == null) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BFutureType futureType = (BFutureType) expression.expectedType;
BType currentType = futureType.constraint;
if (types.containsErrorType(currentType)) {
return;
}
BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType);
if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) &&
!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR,
currentExpectedType, eventualType, expression);
}
futureType.constraint = eventualType;
}
private void setEventualTypeForWaitExpression(BLangExpression expression,
Location pos) {
if ((resultType == symTable.semanticError) ||
(types.containsErrorType(resultType))) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) {
if ((resultType == symTable.semanticError) ||
(expression.getKind() != NodeKind.BINARY_EXPR) ||
(types.containsErrorType(resultType))) {
return;
}
if (types.containsErrorType(resultType)) {
return;
}
if (!isReferencingNonWorker((BLangBinaryExpr) expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private boolean isSimpleWorkerReference(BLangExpression expression) {
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression);
BSymbol varRefSymbol = simpleVarRef.symbol;
if (varRefSymbol == null) {
return false;
}
if (workerExists(env, simpleVarRef.variableName.value)) {
return true;
}
return false;
}
private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
if (isReferencingNonWorker(lhsExpr)) {
return true;
}
return isReferencingNonWorker(rhsExpr);
}
private boolean isReferencingNonWorker(BLangExpression expression) {
if (expression.getKind() == NodeKind.BINARY_EXPR) {
return isReferencingNonWorker((BLangBinaryExpr) expression);
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression;
BSymbol varRefSymbol = simpleVarRef.symbol;
String varRefSymbolName = varRefSymbol.getName().value;
if (workerExists(env, varRefSymbolName)) {
return false;
}
}
return true;
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env);
BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType);
SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env);
BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType);
if (condExprType == symTable.semanticError || thenType == symTable.semanticError ||
elseType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(elseType, thenType)) {
resultType = thenType;
} else if (types.isAssignable(thenType, elseType)) {
resultType = elseType;
} else {
dlog.error(ternaryExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, thenType, elseType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
public void visit(BLangWaitExpr waitExpr) {
expType = new BFutureType(TypeTags.FUTURE, expType, null);
checkExpr(waitExpr.getExpression(), env, expType);
if (resultType.tag == TypeTags.UNION) {
LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>());
if (memberTypes.size() == 1) {
resultType = memberTypes.toArray(new BType[0])[0];
} else {
resultType = BUnionType.create(null, memberTypes);
}
} else if (resultType != symTable.semanticError) {
resultType = ((BFutureType) resultType).constraint;
}
BLangExpression waitFutureExpression = waitExpr.getExpression();
if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) {
setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos);
} else {
setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos);
}
waitExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint);
}
}
private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) {
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) memberType).constraint);
} else {
memberTypes.add(memberType);
}
}
return memberTypes;
}
@Override
public void visit(BLangTrapExpr trapExpr) {
boolean firstVisit = trapExpr.expr.getBType() == null;
BType actualType;
BType exprType = checkExpr(trapExpr.expr, env, expType);
boolean definedWithVar = expType == symTable.noType;
if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = trapExpr.getBType();
exprType = trapExpr.expr.getBType();
}
}
if (expType == symTable.semanticError || exprType == symTable.semanticError) {
actualType = symTable.semanticError;
} else {
LinkedHashSet<BType> resultTypes = new LinkedHashSet<>();
if (exprType.tag == TypeTags.UNION) {
resultTypes.addAll(((BUnionType) exprType).getMemberTypes());
} else {
resultTypes.add(exprType);
}
resultTypes.add(symTable.errorType);
actualType = BUnionType.create(null, resultTypes);
}
resultType = types.checkType(trapExpr, actualType, expType);
if (definedWithVar && resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType);
}
}
public void visit(BLangBinaryExpr binaryExpr) {
if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType);
BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType);
if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
resultType = BUnionType.create(null, lhsResultType, rhsResultType);
return;
}
checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr);
SymbolEnv rhsExprEnv;
BType lhsType;
if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(binaryExpr.expectedType)) {
lhsType = checkAndGetType(binaryExpr.lhsExpr, env, binaryExpr);
} else {
lhsType = checkExpr(binaryExpr.lhsExpr, env);
}
if (binaryExpr.opKind == OperatorKind.AND) {
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true);
} else if (binaryExpr.opKind == OperatorKind.OR) {
rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env);
} else {
rhsExprEnv = env;
}
BType rhsType;
if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(binaryExpr.expectedType)) {
rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr);
} else {
rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv);
}
BType actualType = symTable.semanticError;
switch (binaryExpr.opKind) {
case ADD:
BType leftConstituent = getXMLConstituents(lhsType);
BType rightConstituent = getXMLConstituents(rhsType);
if (leftConstituent != null && rightConstituent != null) {
actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null);
break;
}
default:
if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryBitwiseOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType,
binaryExpr, env);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind,
lhsType, rhsType);
} else {
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private boolean isOptionalFloatOrDecimal(BType expectedType) {
if (expectedType.tag == TypeTags.UNION && expectedType.isNullable() && expectedType.tag != TypeTags.ANY) {
Iterator<BType> memberTypeIterator = ((BUnionType) expectedType).getMemberTypes().iterator();
while (memberTypeIterator.hasNext()) {
BType memberType = memberTypeIterator.next();
if (memberType.tag == TypeTags.FLOAT || memberType.tag == TypeTags.DECIMAL) {
return true;
}
}
}
return false;
}
private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
expr.cloneAttempt++;
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (errorCount == 0 && exprCompatibleType != symTable.semanticError) {
return checkExpr(expr, env, binaryExpr.expectedType);
} else {
return checkExpr(expr, env);
}
}
private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) {
while (env != null && env.node != node) {
env = env.enclEnv;
}
return env != null && env.enclEnv != null
? env.enclEnv.createClone()
: new SymbolEnv(node, null);
}
private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) {
SymbolEnv clone = env.createClone();
while (clone != null && clone.node != node) {
clone = clone.enclEnv;
}
if (clone != null) {
clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv));
} else {
clone = new SymbolEnv(node, null);
}
return clone;
}
private BLangNode getLastInputNodeFromEnv(SymbolEnv env) {
while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) {
env = env.enclEnv;
}
return env != null ? env.node : null;
}
public void visit(BLangTransactionalExpr transactionalExpr) {
resultType = types.checkType(transactionalExpr, symTable.booleanType, expType);
}
public void visit(BLangCommitExpr commitExpr) {
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(commitExpr, actualType, expType);
}
private BType getXMLConstituents(BType type) {
BType constituent = null;
if (type.tag == TypeTags.XML) {
constituent = ((BXMLType) type).constraint;
} else if (TypeTags.isXMLNonSequenceType(type.tag)) {
constituent = type;
}
return constituent;
}
private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) {
if (expType.tag != TypeTags.DECIMAL) {
return;
}
switch (binaryExpr.opKind) {
case ADD:
case SUB:
case MUL:
case DIV:
checkExpr(binaryExpr.lhsExpr, env, expType);
checkExpr(binaryExpr.rhsExpr, env, expType);
break;
default:
break;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream()
.filter(type -> type.tag != TypeTags.NIL)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (memberTypes.size() == 1) {
actualType = memberTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, memberTypes);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS,
lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isSameType(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else {
dlog.error(elvisExpr.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsReturnType,
rhsReturnType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangGroupExpr groupExpr) {
resultType = checkExpr(groupExpr.expression, env, expType);
}
public void visit(BLangTypedescExpr accessExpr) {
if (accessExpr.resolvedType == null) {
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
}
int resolveTypeTag = accessExpr.resolvedType.tag;
final BType actualType;
if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) {
actualType = new BTypedescType(accessExpr.resolvedType, null);
} else {
actualType = accessExpr.resolvedType;
}
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.semanticError;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = exprType;
}
} else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = new BTypedescType(exprType, null);
}
} else {
boolean decimalAddNegate = expType.tag == TypeTags.DECIMAL &&
(OperatorKind.ADD.equals(unaryExpr.operator) || OperatorKind.SUB.equals(unaryExpr.operator));
exprType = decimalAddNegate ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.semanticError;
for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) {
annAttachment.attachPoints.add(AttachPoint.Point.TYPE);
semanticAnalyzer.analyzeNode(annAttachment, this.env);
}
BLangExpression expr = conversionExpr.expr;
if (conversionExpr.typeNode == null) {
if (!conversionExpr.annAttachments.isEmpty()) {
resultType = checkExpr(expr, env, this.expType);
}
return;
}
BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos,
symResolver.resolveTypeNode(conversionExpr.typeNode, env));
conversionExpr.targetType = targetType;
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, targetType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) {
checkExpr(expr, env, targetType);
} else {
checkExpr(expr, env, symTable.noType);
}
BType exprType = expr.getBType();
if (types.isTypeCastable(expr, exprType, targetType, this.env)) {
actualType = targetType;
} else if (exprType != symTable.semanticError && exprType != symTable.noType) {
dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType);
}
resultType = types.checkType(conversionExpr, actualType, this.expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType());
bLangLambdaFunction.capturedClosureEnv = env.createClone();
if (!this.nonErrorLoggingCheck) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BType expectedType = expType;
if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE)
.collect(Collectors.collectingAndThen(Collectors.toList(), list -> {
if (list.size() != 1) {
return null;
}
return list.get(0);
}
));
if (invokableType != null) {
expectedType = invokableType;
}
}
if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.semanticError;
return;
}
BInvokableType expectedInvocation = (BInvokableType) expectedType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType));
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.body.expr.getBType();
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (bLangXMLQName.prefix.value.isEmpty()) {
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix));
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
logUndefinedSymbolError(bLangXMLQName.pos, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) {
xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value,
(BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos);
}
if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) {
resultType = symTable.semanticError;
return;
}
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI;
}
private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix,
BPackageSymbol pkgSymbol, Location pos) {
BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env,
names.fromString(localname), SymTag.CONSTANT);
if (constSymbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname);
}
return null;
}
BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol;
if (constantSymbol.literalType.tag != TypeTags.STRING) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType);
return null;
}
String constVal = (String) constantSymbol.value.value;
int s = constVal.indexOf('{');
int e = constVal.lastIndexOf('}');
if (e > s + 1) {
pkgSymbol.isUsed = true;
String nsURI = constVal.substring(s + 1, e);
String local = constVal.substring(e);
return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos,
SOURCE);
}
dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname);
return null;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name;
checkExpr(name, xmlAttributeEnv, symTable.stringType);
if (name.prefix.value.isEmpty()) {
name.namespaceURI = null;
}
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
Set<String> usedPrefixes = new HashSet<>();
BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix;
if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) {
usedPrefixes.add(elemNamePrefix.value);
}
for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) {
if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) {
BLangXMLQuotedString value = attribute.value;
if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) {
dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION);
}
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix;
if (prefix != null && !prefix.value.isEmpty()) {
usedPrefixes.add(prefix.value);
}
}
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) {
if (usedPrefixes.contains(nsEntry.getKey().value)) {
bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue());
}
}
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType,
this.expType);
if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) {
markChildrenAsImmutable(bLangXMLElementLiteral);
}
}
private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) {
BLangXMLQName attrName = (BLangXMLQName) attribute.name;
return (attrName.prefix.value.isEmpty()
&& attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE))
|| attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE);
}
public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) {
if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
return symTable.xmlElementType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) {
return symTable.xmlTextType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) {
return symTable.xmlPIType;
}
return symTable.xmlCommentType;
}
public void muteErrorLog() {
this.nonErrorLoggingCheck = true;
this.dlog.mute();
}
public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) {
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
}
public BType getXMLSequenceType(BType xmlSubType) {
switch (xmlSubType.tag) {
case TypeTags.XML_ELEMENT:
return new BXMLType(symTable.xmlElementType, null);
case TypeTags.XML_COMMENT:
return new BXMLType(symTable.xmlCommentType, null);
case TypeTags.XML_PI:
return new BXMLType(symTable.xmlPIType, null);
default:
return symTable.xmlTextType;
}
}
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT
&& expType != symTable.noType) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
"XML Sequence");
resultType = symTable.semanticError;
return;
}
List<BType> xmlTypesInSequence = new ArrayList<>();
for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) {
resultType = checkExpr(expressionItem, env, expType);
if (!xmlTypesInSequence.contains(resultType)) {
xmlTypesInSequence.add(resultType);
}
}
if (expType.tag == TypeTags.XML || expType == symTable.noType) {
if (xmlTypesInSequence.size() == 1) {
resultType = getXMLSequenceType(xmlTypesInSequence.get(0));
return;
}
resultType = symTable.xmlType;
return;
}
if (expType.tag == TypeTags.XML_TEXT) {
resultType = symTable.xmlTextType;
return;
}
for (BType item : ((BUnionType) expType).getMemberTypes()) {
if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, symTable.xmlType);
resultType = symTable.semanticError;
return;
}
}
resultType = symTable.xmlType;
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments;
checkStringTemplateExprs(literalValues);
BLangExpression xmlExpression = literalValues.get(0);
if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL &&
((String) ((BLangLiteral) xmlExpression).value).isEmpty()) {
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType);
return;
}
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType,
this.expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
checkStringTemplateExprs(bLangXMLQuotedString.textFragments);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
dlog.error(xmlAttributeAccessExpr.pos,
DiagnosticErrorCode.DEPRECATED_XML_ATTRIBUTE_ACCESS);
resultType = symTable.semanticError;
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
checkStringTemplateExprs(stringTemplateLiteral.exprs);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType);
if (type == symTable.semanticError) {
resultType = type;
return;
}
BObjectType literalType = (BObjectType) type;
BType stringsType = literalType.fields.get("strings").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
BType insertionsType = literalType.fields.get("insertions").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
resultType = type;
}
private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) {
if (expType == symTable.noType || containsAnyType(expType)) {
return symTable.rawTemplateType;
}
BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos);
BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType,
DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE);
if (type == symTable.semanticError) {
return type;
}
if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type);
return symTable.semanticError;
}
BObjectType litObjType = (BObjectType) type;
BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol;
if (litObjType.fields.size() > 2) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType);
type = symTable.semanticError;
}
if (!objTSymbol.attachedFuncs.isEmpty()) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType);
type = symTable.semanticError;
}
return type;
}
private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType,
DiagnosticCode code, Location pos) {
BType listType = fieldType.tag != TypeTags.INTERSECTION ? fieldType :
((BIntersectionType) fieldType).effectiveType;
boolean errored = false;
if (listType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) listType;
if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) {
dlog.error(pos, code, arrayType.size, exprs.size());
return false;
}
for (BLangExpression expr : exprs) {
errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored;
}
} else if (listType.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) listType;
final int size = exprs.size();
final int requiredItems = tupleType.tupleTypes.size();
if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) {
dlog.error(pos, code, requiredItems, size);
return false;
}
int i;
List<BType> memberTypes = tupleType.tupleTypes;
for (i = 0; i < requiredItems; i++) {
errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored;
}
if (size > requiredItems) {
for (; i < size; i++) {
errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored;
}
}
} else {
throw new IllegalStateException("Expected a list type, but found: " + listType);
}
return errored;
}
private boolean containsAnyType(BType type) {
if (type == symTable.anyType) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().contains(symTable.anyType);
}
return false;
}
private BType getCompatibleRawTemplateType(BType expType, Location pos) {
if (expType.tag != TypeTags.UNION) {
return expType;
}
BUnionType unionType = (BUnionType) expType;
List<BType> compatibleTypes = new ArrayList<>();
for (BType type : unionType.getMemberTypes()) {
if (types.isAssignable(type, symTable.rawTemplateType)) {
compatibleTypes.add(type);
}
}
if (compatibleTypes.size() == 0) {
return expType;
}
if (compatibleTypes.size() > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType,
expType);
return symTable.semanticError;
}
return compatibleTypes.get(0);
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
checkExpr(intRangeExpression.startExpr, env, symTable.intType);
checkExpr(intRangeExpression.endExpr, env, symTable.intType);
resultType = new BArrayType(symTable.intType);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {
if (expType.tag != TypeTags.TYPEDESC) {
dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc);
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType());
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv));
});
LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.semanticError)) {
actualType = symTable.semanticError;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, matchExprTypes);
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
checkWithinQueryExpr = isWithinQuery();
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean cleanPrevEnvs = false;
if (prevEnvs.empty()) {
prevEnvs.push(env);
cleanPrevEnvs = true;
}
if (breakToParallelQueryEnv) {
queryEnvs.push(prevEnvs.peek());
} else {
queryEnvs.push(env);
}
queryFinalClauses.push(queryExpr.getSelectClause());
List<BLangNode> clauses = queryExpr.getQueryClauses();
BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection();
clauses.forEach(clause -> clause.accept(this));
BType actualType = resolveQueryType(queryEnvs.peek(), ((BLangSelectClause) queryFinalClauses.peek()).expression,
collectionNode.getBType(), expType, queryExpr);
actualType = (actualType == symTable.semanticError) ? actualType :
types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
if (cleanPrevEnvs) {
prevEnvs.pop();
}
if (actualType.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) actualType;
tableType.constraintPos = queryExpr.pos;
tableType.isTypeInlineDefined = true;
if (!validateTableType(tableType)) {
resultType = symTable.semanticError;
return;
}
}
checkWithinQueryExpr = false;
resultType = actualType;
}
private boolean isWithinQuery() {
return !queryEnvs.isEmpty() && !queryFinalClauses.isEmpty();
}
private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType,
BType targetType, BLangQueryExpr queryExpr) {
List<BType> resultTypes = types.getAllTypes(targetType).stream()
.filter(t -> !types.isAssignable(t, symTable.errorType))
.filter(t -> !types.isAssignable(t, symTable.nilType))
.collect(Collectors.toList());
if (resultTypes.isEmpty()) {
resultTypes.add(symTable.noType);
}
BType actualType = symTable.semanticError;
List<BType> selectTypes = new ArrayList<>();
List<BType> resolvedTypes = new ArrayList<>();
BType selectType, resolvedType;
for (BType type : resultTypes) {
switch (type.tag) {
case TypeTags.ARRAY:
selectType = checkExpr(selectExp, env, ((BArrayType) type).eType);
resolvedType = new BArrayType(selectType);
break;
case TypeTags.TABLE:
selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint,
true, true));
resolvedType = symTable.tableType;
break;
case TypeTags.STREAM:
selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint,
true, true));
resolvedType = symTable.streamType;
break;
case TypeTags.STRING:
case TypeTags.XML:
selectType = checkExpr(selectExp, env, type);
resolvedType = selectType;
break;
case TypeTags.NONE:
default:
selectType = checkExpr(selectExp, env, type);
resolvedType = getNonContextualQueryType(selectType, collectionType);
break;
}
if (selectType != symTable.semanticError) {
if (resolvedType.tag == TypeTags.STREAM) {
queryExpr.isStream = true;
}
if (resolvedType.tag == TypeTags.TABLE) {
queryExpr.isTable = true;
}
selectTypes.add(selectType);
resolvedTypes.add(resolvedType);
}
}
if (selectTypes.size() == 1) {
BType errorType = getErrorType(collectionType, queryExpr);
selectType = selectTypes.get(0);
if (queryExpr.isStream) {
return new BStreamType(TypeTags.STREAM, selectType, errorType, null);
} else if (queryExpr.isTable) {
actualType = getQueryTableType(queryExpr, selectType);
} else {
actualType = resolvedTypes.get(0);
}
if (errorType != null && errorType.tag != TypeTags.NIL) {
return BUnionType.create(null, actualType, errorType);
} else {
return actualType;
}
} else if (selectTypes.size() > 1) {
dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes);
return actualType;
} else {
return actualType;
}
}
private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) {
final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
if (!queryExpr.fieldNameIdentifierList.isEmpty()) {
validateKeySpecifier(queryExpr.fieldNameIdentifierList, constraintType);
markReadOnlyForConstraintType(constraintType);
tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream()
.map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList());
return BUnionType.create(null, tableType, symTable.errorType);
}
return tableType;
}
private void validateKeySpecifier(List<IdentifierNode> fieldList, BType constraintType) {
for (IdentifierNode identifier : fieldList) {
BField field = types.getTableConstraintField(constraintType, identifier.getValue());
if (field == null) {
dlog.error(identifier.getPosition(), DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER,
identifier.getValue(), constraintType);
} else if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
field.symbol.flags |= Flags.READONLY;
}
}
}
private void markReadOnlyForConstraintType(BType constraintType) {
if (constraintType.tag != TypeTags.RECORD) {
return;
}
BRecordType recordType = (BRecordType) constraintType;
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
return;
}
}
if (recordType.sealed) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr) {
if (collectionType.tag == TypeTags.SEMANTIC_ERROR) {
return null;
}
BType returnType = null, errorType = null;
switch (collectionType.tag) {
case TypeTags.STREAM:
errorType = ((BStreamType) collectionType).completionType;
break;
case TypeTags.OBJECT:
returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType);
break;
default:
BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
if (itrSymbol == this.symTable.notFoundSymbol) {
return null;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol;
returnType = types.getResultTypeOfNextInvocation((BObjectType) invokableSymbol.retType);
}
List<BType> errorTypes = new ArrayList<>();
if (returnType != null) {
types.getAllTypes(returnType).stream()
.filter(t -> types.isAssignable(t, symTable.errorType))
.forEach(errorTypes::add);
}
if (checkWithinQueryExpr && queryExpr.isStream) {
if (errorTypes.isEmpty()) {
errorTypes.add(symTable.nilType);
}
errorTypes.add(symTable.errorType);
}
if (!errorTypes.isEmpty()) {
if (errorTypes.size() == 1) {
errorType = errorTypes.get(0);
} else {
errorType = BUnionType.create(null, errorTypes.toArray(new BType[0]));
}
}
return errorType;
}
private BType getNonContextualQueryType(BType staticType, BType basicType) {
BType resultType;
switch (basicType.tag) {
case TypeTags.TABLE:
resultType = symTable.tableType;
break;
case TypeTags.STREAM:
resultType = symTable.streamType;
break;
case TypeTags.XML:
resultType = new BXMLType(staticType, null);
break;
case TypeTags.STRING:
resultType = symTable.stringType;
break;
default:
resultType = new BArrayType(staticType);
break;
}
return resultType;
}
@Override
public void visit(BLangQueryAction queryAction) {
if (prevEnvs.empty()) {
prevEnvs.push(env);
} else {
prevEnvs.push(prevEnvs.peek());
}
queryEnvs.push(prevEnvs.peek());
BLangDoClause doClause = queryAction.getDoClause();
queryFinalClauses.push(doClause);
List<BLangNode> clauses = queryAction.getQueryClauses();
clauses.forEach(clause -> clause.accept(this));
semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek()));
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
prevEnvs.pop();
}
@Override
public void visit(BLangFromClause fromClause) {
boolean prevBreakToParallelEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop());
fromClause.env = fromEnv;
queryEnvs.push(fromEnv);
checkExpr(fromClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(fromClause);
handleInputClauseVariables(fromClause, queryEnvs.peek());
this.breakToParallelQueryEnv = prevBreakToParallelEnv;
}
@Override
public void visit(BLangJoinClause joinClause) {
boolean prevBreakEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop());
joinClause.env = joinEnv;
queryEnvs.push(joinEnv);
checkExpr(joinClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(joinClause);
handleInputClauseVariables(joinClause, queryEnvs.peek());
if (joinClause.onClause != null) {
((BLangOnClause) joinClause.onClause).accept(this);
}
this.breakToParallelQueryEnv = prevBreakEnv;
}
@Override
public void visit(BLangLetClause letClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop());
letClause.env = letEnv;
queryEnvs.push(letEnv);
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv);
}
}
@Override
public void visit(BLangWhereClause whereClause) {
whereClause.env = handleFilterClauses(whereClause.expression);
}
@Override
public void visit(BLangSelectClause selectClause) {
SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop());
selectClause.env = selectEnv;
queryEnvs.push(selectEnv);
}
@Override
public void visit(BLangDoClause doClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop());
doClause.env = letEnv;
queryEnvs.push(letEnv);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType);
if (!types.isAssignable(exprType, symTable.errorType)) {
dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED,
symTable.errorType, exprType);
}
}
@Override
public void visit(BLangLimitClause limitClause) {
BType exprType = checkExpr(limitClause.expression, queryEnvs.peek());
if (!types.isAssignable(exprType, symTable.intType)) {
dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.intType, exprType);
}
}
@Override
public void visit(BLangOnClause onClause) {
BType lhsType, rhsType;
BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek());
onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode);
lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv);
onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode);
rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek());
if (!types.isAssignable(lhsType, rhsType)) {
dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType);
}
}
@Override
public void visit(BLangOrderByClause orderByClause) {
orderByClause.env = queryEnvs.peek();
for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) {
BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env);
if (!types.isOrderedType(exprType, false)) {
dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED);
}
}
}
@Override
public void visit(BLangDo doNode) {
if (doNode.onFailClause != null) {
doNode.onFailClause.accept(this);
}
}
public void visit(BLangOnFailClause onFailClause) {
onFailClause.body.stmts.forEach(stmt -> stmt.accept(this));
}
private SymbolEnv handleFilterClauses (BLangExpression filterExpression) {
checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType);
BType actualType = filterExpression.getBType();
if (TypeTags.TUPLE == actualType.tag) {
dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.booleanType, actualType);
}
SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, queryFinalClauses.peek(), queryEnvs.pop());
queryEnvs.push(filterEnv);
return filterEnv;
}
private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) {
if (bLangInputClause.variableDefinitionNode == null) {
return;
}
BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable();
if (bLangInputClause.isDeclaredWithVar) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv);
if (types.isAssignable(bLangInputClause.varType, typeNodeType)) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
if (typeNodeType != symTable.semanticError) {
dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
bLangInputClause.varType, typeNodeType);
}
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) {
String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic";
BLangExpression exprWithCheckingKeyword = checkedExpr.expr;
boolean firstVisit = exprWithCheckingKeyword.getBType() == null;
BType checkExprCandidateType;
if (expType == symTable.noType) {
checkExprCandidateType = symTable.noType;
} else {
BType exprType = getCandidateType(checkedExpr, expType);
if (exprType == symTable.semanticError) {
checkExprCandidateType = BUnionType.create(null, expType, symTable.errorType);
} else {
checkExprCandidateType = addDefaultErrorIfNoErrorComponentFound(expType);
}
}
if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(expType)) {
rewriteWithEnsureTypeFunc(checkedExpr, checkExprCandidateType);
}
BType exprType = checkExpr(checkedExpr.expr, env, checkExprCandidateType);
if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = checkedExpr.getBType();
exprType = checkedExpr.expr.getBType();
}
}
boolean isErrorType = types.isAssignable(exprType, symTable.errorType);
if (exprType.tag != TypeTags.UNION && !isErrorType) {
if (exprType.tag == TypeTags.READONLY) {
checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{
add(symTable.errorType);
}};
resultType = symTable.anyAndReadonly;
return;
} else if (exprType != symTable.semanticError) {
dlog.error(checkedExpr.expr.pos,
DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS,
operatorType);
}
checkedExpr.setBType(symTable.semanticError);
return;
}
List<BType> errorTypes = new ArrayList<>();
List<BType> nonErrorTypes = new ArrayList<>();
if (!isErrorType) {
for (BType memberType : ((BUnionType) exprType).getMemberTypes()) {
if (memberType.tag == TypeTags.READONLY) {
errorTypes.add(symTable.errorType);
nonErrorTypes.add(symTable.anyAndReadonly);
continue;
}
if (types.isAssignable(memberType, symTable.errorType)) {
errorTypes.add(memberType);
continue;
}
nonErrorTypes.add(memberType);
}
} else {
errorTypes.add(exprType);
}
checkedExpr.equivalentErrorTypeList = errorTypes;
if (errorTypes.isEmpty()) {
dlog.error(checkedExpr.expr.pos,
DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
checkedExpr.setBType(symTable.semanticError);
return;
}
BType actualType;
if (nonErrorTypes.size() == 0) {
actualType = symTable.neverType;
} else if (nonErrorTypes.size() == 1) {
actualType = nonErrorTypes.get(0);
} else {
actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes));
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type) {
BType rhsType = getCandidateType(checkedExpr, type);
if (rhsType == symTable.semanticError) {
rhsType = getCandidateType(checkedExpr, rhsType);
}
BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType);
if (!types.isLax(candidateLaxType)) {
return;
}
ArrayList<BLangExpression> argExprs = new ArrayList<>();
BType typedescType = new BTypedescType(expType, null);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = expType;
typedescExpr.setBType(typedescType);
argExprs.add(typedescExpr);
BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE,
argExprs, checkedExpr.expr, checkedExpr.pos);
invocation.symbol = symResolver.lookupLangLibMethod(type,
names.fromString(invocation.name.value));
invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
checkedExpr.expr = invocation;
}
private BType getCandidateLaxType(BLangNode expr, BType rhsType) {
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
return types.getSafeType(rhsType, false, true);
}
return rhsType;
}
private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
checkedExpr.expr.cloneAttempt++;
BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr);
BType rhsType;
if (checkExprCandidateType == symTable.semanticError) {
rhsType = checkExpr(clone, env);
} else {
rhsType = checkExpr(clone, env, checkExprCandidateType);
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return rhsType;
}
private BType addDefaultErrorIfNoErrorComponentFound(BType type) {
for (BType t : types.getAllTypes(type)) {
if (types.isAssignable(t, symTable.errorType)) {
return type;
}
}
return BUnionType.create(null, type, symTable.errorType);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
resultType = serviceConstructorExpr.serviceNode.symbol.type;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env));
checkExpr(typeTestExpr.expr, env);
resultType = types.checkType(typeTestExpr, symTable.booleanType, expType);
}
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc);
BType actualType = symTable.semanticError;
BSymbol symbol =
this.symResolver.resolveAnnotation(annotAccessExpr.pos, env,
names.fromString(annotAccessExpr.pkgAlias.getValue()),
names.fromString(annotAccessExpr.annotationName.getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION,
annotAccessExpr.annotationName.getValue());
} else {
annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol;
BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType :
((BAnnotationSymbol) symbol).attachedType.type;
actualType = BUnionType.create(null, annotType, symTable.nilType);
}
this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType);
}
private boolean isValidVariableReference(BLangExpression varRef) {
switch (varRef.getKind()) {
case SIMPLE_VARIABLE_REF:
case RECORD_VARIABLE_REF:
case TUPLE_VARIABLE_REF:
case ERROR_VARIABLE_REF:
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case XML_ATTRIBUTE_ACCESS_EXPR:
return true;
default:
dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType());
return false;
}
}
private BType getEffectiveReadOnlyType(Location pos, BType origTargetType) {
if (origTargetType == symTable.readonlyType) {
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
return ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType) expType,
env, symTable, anonymousModelHelper, names,
new HashSet<>());
}
if (origTargetType.tag != TypeTags.UNION) {
return origTargetType;
}
boolean hasReadOnlyType = false;
LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>();
for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) {
if (memberType == symTable.readonlyType) {
hasReadOnlyType = true;
continue;
}
nonReadOnlyTypes.add(memberType);
}
if (!hasReadOnlyType) {
return origTargetType;
}
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes);
nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType)
expType,
env, symTable, anonymousModelHelper,
names, new HashSet<>()));
return nonReadOnlyUnion;
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.semanticError;
bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError));
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
valueTypeNode.pos = symTable.builtinPos;
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.setBType(bType);
}
}
public void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.semanticError);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias);
} else {
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
funcSymbol = symbol;
}
if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) &&
(symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
funcSymbol = symbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) {
BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) {
if (!missingNodesHelper.isMissingNode(funcName)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName);
}
iExpr.argExprs.forEach(arg -> checkExpr(arg, env));
resultType = symTable.semanticError;
return;
}
if (isFunctionPointer(funcSymbol)) {
iExpr.functionPointerInvocation = true;
markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID);
if (langLibPackageID) {
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
if (langLibPackageID && !iExpr.argExprs.isEmpty()) {
checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol);
}
}
protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) {
BLangInvokableNode encInvokable = env.enclInvokable;
if (symbol.closure || (symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
env.node.getKind() != NodeKind.ARROW_EXPR && env.node.getKind() != NodeKind.EXPR_FUNCTION_BODY &&
encInvokable != null && !encInvokable.flagSet.contains(Flag.LAMBDA)) {
return;
}
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA)
&& !isFunctionArgument(symbol, encInvokable.requiredParams)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
if (env.node.getKind() == NodeKind.ARROW_EXPR
&& !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol) {
resolvedSymbol.closure = true;
((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && encInvokable != null &&
!encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
}
private boolean isNotFunction(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION
|| (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
return false;
}
if (isFunctionPointer(funcSymbol)) {
return false;
}
return true;
}
private boolean isFunctionPointer(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE
&& funcSymbol.kind == SymbolKind.FUNCTION
&& (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE;
}
private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr,
BType expectedType) {
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(errorConstructorExpr.namedArgs.size());
for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) {
BType target = checkErrCtrTargetTypeAndSetSymbol(namedArgsExpression, expectedType);
BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression);
BType type = checkExpr(clone, env, target);
if (type == symTable.semanticError) {
checkExpr(namedArgsExpression, env);
} else {
checkExpr(namedArgsExpression, env, target);
}
namedArgs.add(namedArgsExpression);
}
return namedArgs;
}
private BType checkErrCtrTargetTypeAndSetSymbol(BLangNamedArgsExpression namedArgsExpression, BType expectedType) {
if (expectedType == symTable.semanticError) {
return symTable.semanticError;
}
if (expectedType.tag == TypeTags.MAP) {
return ((BMapType) expectedType).constraint;
}
if (expectedType.tag != TypeTags.RECORD) {
return symTable.semanticError;
}
BRecordType recordType = (BRecordType) expectedType;
BField targetField = recordType.fields.get(namedArgsExpression.name.value);
if (targetField != null) {
namedArgsExpression.varSymbol = targetField.symbol;
return targetField.type;
}
if (!recordType.sealed && !recordType.fields.isEmpty()) {
dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name,
recordType);
}
return recordType.sealed ? symTable.noType : recordType.restFieldType;
}
private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) {
if (objectType.getKind() == TypeKind.SERVICE &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION);
return;
}
Name funcName =
names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol =
symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
iExpr.pos, env, names.fromIdNode(iExpr.name), (BObjectTypeSymbol) objectType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
funcSymbol = invocableField;
iExpr.functionPointerInvocation = true;
}
}
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value,
objectType);
resultType = symTable.semanticError;
return;
}
} else {
iExpr.symbol = funcSymbol;
}
if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
checkInvocationParamAndReturnType(iExpr);
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) {
if (checkInvalidActionInvocation(aInv)) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, aInv.expr.getBType());
this.resultType = symTable.semanticError;
aInv.symbol = symTable.notFoundSymbol;
return;
}
Name remoteMethodQName = names
.fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value));
Name actionName = names.fromIdNode(aInv.name);
BSymbol remoteFuncSymbol = symResolver
.resolveObjectMethod(aInv.pos, env, remoteMethodQName, (BObjectTypeSymbol) expType.tsymbol);
if (remoteFuncSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
aInv.pos, env, names.fromIdNode(aInv.name), (BObjectTypeSymbol) expType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
remoteFuncSymbol = invocableField;
aInv.functionPointerInvocation = true;
}
}
if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) {
dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType);
resultType = symTable.semanticError;
return;
}
if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName);
this.resultType = symTable.semanticError;
return;
}
if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) &&
Symbols.isFlagOn(expType.flags, Flags.CLIENT) &&
types.isNeverTypeOrStructureTypeWithARequiredNeverMember
((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL);
}
aInv.symbol = remoteFuncSymbol;
checkInvocationParamAndReturnType(aInv);
}
private boolean checkInvalidActionInvocation(BLangInvocation.BLangActionInvocation aInv) {
return aInv.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(((((BLangSimpleVarRef) aInv.expr).symbol.tag & SymTag.ENDPOINT) !=
SymTag.ENDPOINT) && !aInv.async);
}
private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) {
return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol;
}
private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(iExpr.name.value);
BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName);
if (funcSymbol == symTable.notFoundSymbol) {
return symTable.notFoundSymbol;
}
iExpr.symbol = funcSymbol;
iExpr.langLibInvocation = true;
SymbolEnv enclEnv = this.env;
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
iExpr.argExprs.add(0, iExpr.expr);
checkInvocationParamAndReturnType(iExpr);
this.env = enclEnv;
return funcSymbol;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams,
Set<String> requiredParamNames) {
if (openIncRecordParams.size() != 1) {
return null;
}
LinkedHashMap<String, BField> fields = ((BRecordType) openIncRecordParams.get(0).type).fields;
for (String paramName : requiredParamNames) {
if (!fields.containsKey(paramName)) {
return null;
}
}
return openIncRecordParams.get(0);
}
private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol,
List<BVarSymbol> incRecordParams) {
Set<String> requiredParamNames = new HashSet<>();
List<BVarSymbol> openIncRecordParams = new ArrayList<>();
for (BVarSymbol paramSymbol : invokableSymbol.params) {
if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) &&
paramSymbol.type.getKind() == TypeKind.RECORD) {
boolean recordWithDisallowFieldsOnly = true;
LinkedHashMap<String, BField> fields = ((BRecordType) paramSymbol.type).fields;
for (String fieldName : fields.keySet()) {
BField field = fields.get(fieldName);
if (field.symbol.type.tag != TypeTags.NEVER) {
recordWithDisallowFieldsOnly = false;
incRecordParams.add(field.symbol);
requiredParamNames.add(fieldName);
}
}
if (recordWithDisallowFieldsOnly && ((BRecordType) paramSymbol.type).restFieldType != symTable.noType) {
openIncRecordParams.add(paramSymbol);
}
} else {
requiredParamNames.add(paramSymbol.name.value);
}
}
return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);
return symTable.semanticError;
}
if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);
List<BType> paramTypes = ((BInvokableType) invokableSymbol.type).getParameterTypes();
List<BVarSymbol> incRecordParams = new ArrayList<>();
BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,
incRecordParams);
int parameterCountForPositionalArgs = paramTypes.size();
int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();
iExpr.requiredArgs = new ArrayList<>();
for (BVarSymbol symbol : invokableSymbol.params) {
if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||
symbol.type.tag != TypeTags.RECORD) {
continue;
}
LinkedHashMap<String, BField> fields = ((BRecordType) symbol.type).fields;
if (fields.isEmpty()) {
continue;
}
for (String field : fields.keySet()) {
if (fields.get(field).type.tag != TypeTags.NEVER) {
parameterCountForNamedArgs = parameterCountForNamedArgs - 1;
break;
}
}
}
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
foundNamedArg = true;
if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCountForPositionalArgs) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,
incRecordParamAllowAdditionalFields);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol;
BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type;
BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol;
List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params);
List<BLangExpression> nonRestArgs = iExpr.requiredArgs;
List<BVarSymbol> valueProvidedParams = new ArrayList<>();
List<BVarSymbol> requiredParams = new ArrayList<>();
List<BVarSymbol> requiredIncRecordParams = new ArrayList<>();
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.isDefaultable) {
continue;
}
requiredParams.add(nonRestParam);
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) {
requiredIncRecordParams.add(incRecordParam);
}
}
int i = 0;
for (; i < nonRestArgs.size(); i++) {
BLangExpression arg = nonRestArgs.get(i);
if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) {
BType expectedType = paramTypes.get(i);
BType actualType = arg.getBType();
if (expectedType == symTable.charStringType) {
arg.cloneAttempt++;
BLangExpression clonedArg = nodeCloner.cloneNode(arg);
BType argType = checkExprSilent(clonedArg, expectedType, env);
if (argType != symTable.semanticError) {
actualType = argType;
}
}
types.checkType(arg.pos, actualType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
types.setImplicitCastExpr(arg, arg.getBType(), expectedType);
}
if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
if (i < nonRestParams.size()) {
BVarSymbol param = nonRestParams.get(i);
checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation);
valueProvidedParams.add(param);
requiredParams.remove(param);
continue;
}
break;
}
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangIdentifier argName = ((NamedArgNode) arg).getName();
BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr,
nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields);
if (varSym == null) {
dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName);
break;
}
requiredParams.remove(varSym);
requiredIncRecordParams.remove(varSym);
if (valueProvidedParams.contains(varSym)) {
dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value);
continue;
}
checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation);
((BLangNamedArgsExpression) arg).varSymbol = varSym;
valueProvidedParams.add(varSym);
}
}
BVarSymbol restParam = invokableTypeSymbol.restParam;
boolean errored = false;
if (!requiredParams.isEmpty() && vararg == null) {
for (BVarSymbol requiredParam : requiredParams) {
if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name,
iExpr.name.value);
errored = true;
}
}
}
if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) {
for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) {
for (BVarSymbol requiredParam : requiredParams) {
if (requiredParam.type == requiredIncRecordParam.owner.type) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER,
requiredIncRecordParam.name, iExpr.name.value);
errored = true;
}
}
}
}
if (restParam == null &&
(!iExpr.restArgs.isEmpty() ||
(vararg != null && valueProvidedParams.size() == nonRestParams.size()))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
errored = true;
}
if (errored) {
return symTable.semanticError;
}
BType listTypeRestArg = restParam == null ? null : restParam.type;
BRecordType mappingTypeRestArg = null;
if (vararg != null && nonRestArgs.size() < nonRestParams.size()) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
List<BType> tupleMemberTypes = new ArrayList<>();
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL);
mappingTypeRestArg = new BRecordType(recordSymbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
BType tupleRestType = null;
BVarSymbol fieldSymbol;
for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) {
BType paramType = paramTypes.get(j);
BVarSymbol nonRestParam = nonRestParams.get(j);
Name paramName = nonRestParam.name;
tupleMemberTypes.add(paramType);
boolean required = requiredParams.contains(nonRestParam);
fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName,
nonRestParam.getOriginalName(), pkgID, paramType, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(paramName.value, new BField(paramName, null, fieldSymbol));
}
if (listTypeRestArg != null) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
tupleRestType = ((BArrayType) listTypeRestArg).eType;
} else if (listTypeRestArg.tag == TypeTags.TUPLE) {
BTupleType restTupleType = (BTupleType) listTypeRestArg;
tupleMemberTypes.addAll(restTupleType.tupleTypes);
if (restTupleType.restType != null) {
tupleRestType = restTupleType.restType;
}
}
}
BTupleType tupleType = new BTupleType(tupleMemberTypes);
tupleType.restType = tupleRestType;
listTypeRestArg = tupleType;
mappingTypeRestArg.sealed = true;
mappingTypeRestArg.restFieldType = symTable.noType;
mappingTypeRestArg.fields = fields;
recordSymbol.type = mappingTypeRestArg;
mappingTypeRestArg.tsymbol = recordSymbol;
}
if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return symTable.semanticError;
}
BType restType = null;
if (vararg != null && !iExpr.restArgs.isEmpty()) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
}
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
iExpr.restArgs.add(vararg);
restType = this.resultType;
} else if (vararg != null) {
iExpr.restArgs.add(vararg);
if (mappingTypeRestArg != null) {
LinkedHashSet<BType> restTypes = new LinkedHashSet<>();
restTypes.add(listTypeRestArg);
restTypes.add(mappingTypeRestArg);
BType actualType = BUnionType.create(null, restTypes);
checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation);
} else {
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
}
restType = this.resultType;
} else if (!iExpr.restArgs.isEmpty()) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
} else {
BTupleType tupleType = (BTupleType) listTypeRestArg;
List<BType> tupleMemberTypes = tupleType.tupleTypes;
BType tupleRestType = tupleType.restType;
int tupleMemCount = tupleMemberTypes.size();
for (int j = 0; j < iExpr.restArgs.size(); j++) {
BLangExpression restArg = iExpr.restArgs.get(j);
BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType;
checkTypeParamExpr(restArg, this.env, memType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
}
}
BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType());
if (restType != symTable.semanticError &&
Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) &&
Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) {
retType = unifier.build(retType, expType, iExpr, types, symTable, dlog);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID);
String sortFuncName = "sort";
if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) {
checkArrayLibSortFuncArgs(iExpr);
}
if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) {
return this.generateFutureType(invokableSymbol, retType);
} else {
return retType;
}
}
private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) {
if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
if (iExpr.argExprs.size() != 3) {
return;
}
BLangExpression keyFunction = iExpr.argExprs.get(2);
BType keyFunctionType = keyFunction.getBType();
if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (keyFunctionType.tag == TypeTags.NIL) {
if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
return;
}
Location pos;
BType returnType;
if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
pos = keyFunction.pos;
returnType = keyFunction.getBType().getReturnType();
} else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) {
BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction);
pos = arrowFunction.body.expr.pos;
returnType = arrowFunction.body.expr.getBType();
if (returnType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
} else {
BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction;
pos = keyLambdaFunction.function.pos;
returnType = keyLambdaFunction.function.getBType().getReturnType();
}
if (!types.isOrderedType(returnType, false)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType);
}
}
private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr,
List<BVarSymbol> nonRestParams,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.getName().value.equals(argName.value)) {
return nonRestParam;
}
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (incRecordParam.getName().value.equals(argName.value)) {
return incRecordParam;
}
}
if (incRecordParamAllowAdditionalFields != null) {
BRecordType incRecordType = (BRecordType) incRecordParamAllowAdditionalFields.type;
checkExpr(expr, env, incRecordType.restFieldType);
if (!incRecordType.fields.containsKey(argName.value)) {
return new BVarSymbol(0, names.fromIdNode(argName), names.originalNameFromIdNode(argName),
null, symTable.noType, null, argName.pos, VIRTUAL);
}
}
return null;
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) {
boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX);
return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart);
}
private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral);
}
private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
if (typeParamAnalyzer.notRequireTypeParams(env)) {
checkExpr(arg, env, expectedType);
return;
}
if (requireTypeInference(arg, inferTypeForNumericLiteral)) {
BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env);
BType inferredType = checkExpr(arg, env, expType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType);
return;
}
checkExpr(arg, env, expectedType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType);
}
private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) {
switch (expr.getKind()) {
case GROUP_EXPR:
return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral);
case ARROW_EXPR:
case LIST_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
return true;
case ELVIS_EXPR:
case TERNARY_EXPR:
case NUMERIC_LITERAL:
return inferTypeForNumericLiteral;
default:
return false;
}
}
private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) {
BType fieldType = symTable.semanticError;
boolean keyValueField = field.isKeyValueField();
boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP;
boolean readOnlyConstructorField = false;
String fieldName = null;
Location pos = null;
BLangExpression valueExpr = null;
if (keyValueField) {
valueExpr = ((BLangRecordKeyValueField) field).valueExpr;
} else if (!spreadOpField) {
valueExpr = (BLangRecordVarNameField) field;
}
switch (mappingType.tag) {
case TypeTags.RECORD:
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(key.expr, key.computedKey,
(BRecordType) mappingType);
fieldType = typeSymbolPair.determinedType;
key.fieldSymbol = typeSymbolPair.fieldSymbol;
readOnlyConstructorField = keyValField.readonly;
pos = key.expr.pos;
fieldName = getKeyValueFieldName(keyValField);
} else if (spreadOpField) {
BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
checkExpr(spreadExpr, this.env);
BType spreadExprType = spreadExpr.getBType();
if (spreadExprType.tag == TypeTags.MAP) {
return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint,
getAllFieldType((BRecordType) mappingType),
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
if (spreadExprType.tag != TypeTags.RECORD) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadExprType);
return symTable.semanticError;
}
boolean errored = false;
for (BField bField : ((BRecordType) spreadExprType).fields.values()) {
BType specFieldType = bField.type;
BSymbol fieldSymbol = symResolver.resolveStructField(spreadExpr.pos, this.env, bField.name,
mappingType.tsymbol);
BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, fieldSymbol, bField.name,
(BRecordType) mappingType);
if (expectedFieldType != symTable.semanticError &&
!types.isAssignable(specFieldType, expectedFieldType)) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD,
expectedFieldType, bField.name, specFieldType);
if (!errored) {
errored = true;
}
}
}
return errored ? symTable.semanticError : symTable.noType;
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(varNameField, false,
(BRecordType) mappingType);
fieldType = typeSymbolPair.determinedType;
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
break;
case TypeTags.MAP:
if (spreadOpField) {
BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
BType spreadOpType = checkExpr(spreadExp, this.env);
BType spreadOpMemberType;
switch (spreadOpType.tag) {
case TypeTags.RECORD:
List<BType> types = new ArrayList<>();
BRecordType recordType = (BRecordType) spreadOpType;
for (BField recField : recordType.fields.values()) {
types.add(recField.type);
}
if (!recordType.sealed) {
types.add(recordType.restFieldType);
}
spreadOpMemberType = getRepresentativeBroadType(types);
break;
case TypeTags.MAP:
spreadOpMemberType = ((BMapType) spreadOpType).constraint;
break;
default:
dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadOpType);
return symTable.semanticError;
}
return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
boolean validMapKey;
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey);
readOnlyConstructorField = keyValField.readonly;
pos = key.pos;
fieldName = getKeyValueFieldName(keyValField);
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError;
break;
}
if (readOnlyConstructorField) {
if (types.isSelectivelyImmutableType(fieldType)) {
fieldType =
ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType) fieldType,
env, symTable, anonymousModelHelper, names,
new HashSet<>());
} else if (!types.isInherentlyImmutableType(fieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType);
fieldType = symTable.semanticError;
}
}
if (spreadOpField) {
valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
}
BLangExpression exprToCheck = valueExpr;
if (this.nonErrorLoggingCheck) {
exprToCheck = nodeCloner.cloneNode(valueExpr);
} else {
((BLangNode) field).setBType(fieldType);
}
return checkExpr(exprToCheck, this.env, fieldType);
}
private TypeSymbolPair checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey,
BRecordType recordType) {
Name fieldName;
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return new TypeSymbolPair(null, symTable.semanticError);
}
LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (recordType.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(recordType.restFieldType);
}
return new TypeSymbolPair(null, BUnionType.create(null, fieldTypes));
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) {
fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value);
} else {
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return new TypeSymbolPair(null, symTable.semanticError);
}
BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env, fieldName, recordType.tsymbol);
BType type = checkRecordLiteralKeyByName(keyExpr.pos, fieldSymbol, fieldName, recordType);
return new TypeSymbolPair(fieldSymbol instanceof BVarSymbol ? (BVarSymbol) fieldSymbol : null, type);
}
private BType checkRecordLiteralKeyByName(Location location, BSymbol fieldSymbol, Name key,
BRecordType recordType) {
if (fieldSymbol != symTable.notFoundSymbol) {
return fieldSymbol.type;
}
if (recordType.sealed) {
dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key,
recordType.tsymbol.type.getKind().typeName(), recordType);
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType getAllFieldType(BRecordType recordType) {
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BField field : recordType.fields.values()) {
possibleTypes.add(field.type);
}
BType restFieldType = recordType.restFieldType;
if (restFieldType != null && restFieldType != symTable.noType) {
possibleTypes.add(restFieldType);
}
return BUnionType.create(null, possibleTypes);
}
private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) {
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return false;
}
return true;
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ||
(keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) {
return true;
}
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return false;
}
private BType addNilForNillableAccessType(BType actualType) {
if (actualType.isNullable()) {
return actualType;
}
return BUnionType.create(null, actualType, symTable.nilType);
}
private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return symTable.semanticError;
}
if (recordType.sealed) {
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess,
Name fieldName, BObjectType objectType) {
BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos,
this.env, fieldName, objectType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(bLangFieldBasedAccess.field.pos,
DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol);
return symTable.semanticError;
}
if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) &&
!Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) {
fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol);
fieldSymbol.flags &= ~Flags.ISOLATED;
fieldSymbol.type.flags &= ~Flags.ISOLATED;
}
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkTupleFieldType(BType tupleType, int indexValue) {
BTupleType bTupleType = (BTupleType) tupleType;
if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) {
return bTupleType.restType;
} else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) {
return symTable.semanticError;
}
return bTupleType.tupleTypes.get(indexValue);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName == null) {
return;
}
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME &&
startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH);
}
private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) {
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
BType type = expr.getBType();
if (type == symTable.semanticError) {
continue;
}
if (!types.isNonNilSimpleBasicTypeOrString(type)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType), type);
}
}
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
List<BLangExpression> tempConcatExpressions = new ArrayList<>();
for (BLangExpression expr : exprs) {
BType exprType;
if (expr.getKind() == NodeKind.QUERY_EXPR) {
exprType = checkExpr(expr, xmlElementEnv, expType);
} else {
exprType = checkExpr(expr, xmlElementEnv);
}
if (TypeTags.isXMLTypeTag(exprType.tag)) {
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
tempConcatExpressions = new ArrayList<>();
}
newChildren.add(expr);
continue;
}
BType type = expr.getBType();
if (type.tag >= TypeTags.JSON &&
!TypeTags.isIntegerTypeTag(type.tag) && !TypeTags.isStringTypeTag(type.tag)) {
if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType, symTable.xmlType), type);
}
continue;
}
tempConcatExpressions.add(expr);
}
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
}
return newChildren;
}
private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments = exprs;
xmlTextLiteral.pos = exprs.get(0).pos;
xmlTextLiteral.setBType(symTable.xmlType);
return xmlTextLiteral;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = BUnionType.create(null, actualType);
if (returnsNull(accessExpr)) {
unionType.add(symTable.nilType);
}
BType parentType = accessExpr.expr.getBType();
if (accessExpr.errorSafeNavigation
&& (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION
&& ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) {
unionType.add(symTable.errorType);
}
if (unionType.getMemberTypes().size() == 1) {
return unionType.getMemberTypes().toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.getBType();
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
&& accessExpr.expr.getBType().tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.OBJECT) {
return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return symTable.semanticError;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType == symTable.semanticError) {
return fieldType;
}
return addNilForNillableAccessType(fieldType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.isEmpty()) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private RecordUnionDiagnostics checkRecordUnion(BLangFieldBasedAccess fieldAccessExpr, Set<BType> memberTypes,
Name fieldName) {
RecordUnionDiagnostics recordUnionDiagnostics = new RecordUnionDiagnostics();
for (BType memberType : memberTypes) {
BRecordType recordMember = (BRecordType) memberType;
if (recordMember.getFields().containsKey(fieldName.getValue())) {
BType individualFieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, recordMember);
if (individualFieldType == symTable.semanticError) {
recordUnionDiagnostics.optionalInRecords.add(recordMember);
}
} else {
recordUnionDiagnostics.undeclaredInRecords.add(recordMember);
}
}
return recordUnionDiagnostics;
}
private void logRhsFieldAccExprErrors(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BRecordType recordVarRefType = (BRecordType) varRefType;
boolean isFieldDeclared = recordVarRefType.getFields().containsKey(fieldName.getValue());
if (isFieldDeclared) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.FIELD_ACCESS_CANNOT_BE_USED_TO_ACCESS_OPTIONAL_FIELDS);
} else if (recordVarRefType.sealed) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_RECORD, fieldName, varRefType);
} else {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_IN_RECORD_TYPE, fieldName,
varRefType);
}
} else {
LinkedHashSet<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
RecordUnionDiagnostics recUnionInfo = checkRecordUnion(fieldAccessExpr, memberTypes, fieldName);
if (recUnionInfo.hasUndeclaredAndOptional()) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.UNDECLARED_AND_OPTIONAL_FIELDS_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords),
recUnionInfo.recordsToString(recUnionInfo.optionalInRecords));
} else if (recUnionInfo.hasUndeclared()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords));
} else if (recUnionInfo.hasOptional()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.optionalInRecords));
}
}
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.semanticError;
if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) {
actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) {
actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (actualType != symTable.semanticError) {
fieldAccessExpr.originalType = actualType;
return actualType;
}
if (!fieldAccessExpr.isLValue) {
logRhsFieldAccExprErrors(fieldAccessExpr, varRefType, fieldName);
return actualType;
}
actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE,
fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType);
}
} else if (types.isLax(varRefType)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT,
varRefType);
return symTable.semanticError;
}
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
BType laxFieldAccessType = getLaxFieldAccessType(varRefType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
}
actualType = symTable.xmlType;
fieldAccessExpr.originalType = actualType;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS,
varRefType);
}
return actualType;
}
private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr;
String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value;
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix));
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE,
nsPrefixedFieldAccess.nsPrefix);
} else if (nsSymbol.getKind() == SymbolKind.PACKAGE) {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst(
nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value,
(BPackageSymbol) nsSymbol, fieldAccessExpr.pos);
} else {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol;
}
}
private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) {
return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType);
}
private BType getLaxFieldAccessType(BType exprType) {
switch (exprType.tag) {
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return symTable.stringType;
case TypeTags.MAP:
return ((BMapType) exprType).constraint;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) exprType;
if (types.isSameType(symTable.jsonType, unionType)) {
return symTable.jsonType;
}
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType)));
return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes);
}
return symTable.semanticError;
}
private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType actualType = symTable.semanticError;
boolean nillableExprType = false;
BType effectiveType = varRefType;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
}
}
if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) {
actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName);
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD,
varRefType, fieldName);
}
fieldAccessExpr.nilSafeNavigation = nillableExprType;
fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isLax(effectiveType)) {
BType laxFieldAccessType = getLaxFieldAccessType(effectiveType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType);
}
if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private boolean accessCouldResultInError(BType type) {
if (type.tag == TypeTags.JSON) {
return true;
}
if (type.tag == TypeTags.MAP) {
return false;
}
if (type.tag == TypeTags.XML) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError);
} else {
return false;
}
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) {
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType());
boolean nillableExprType = false;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
if (nillableExprType) {
varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
if (!types.isSubTypeOfMapping(varRefType)) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
}
}
}
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.semanticError;
if (types.isSubTypeOfMapping(varRefType)) {
checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType);
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.STRING && isConst(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD,
fieldName, indexBasedAccessExpr.expr.getBType());
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
indexBasedAccessExpr.nilSafeNavigation = nillableExprType;
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isSubTypeOfList(varRefType)) {
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType);
indexBasedAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.INT && isConst(indexExpr)) {
dlog.error(indexBasedAccessExpr.indexExpr.pos,
DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr));
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
} else if (types.isAssignable(varRefType, symTable.stringType)) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
indexBasedAccessExpr.originalType = symTable.charStringType;
actualType = symTable.charStringType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (indexBasedAccessExpr.isLValue) {
indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
return actualType;
}
BType type = checkExpr(indexExpr, this.env, symTable.intType);
if (type == symTable.semanticError) {
return type;
}
indexBasedAccessExpr.originalType = varRefType;
actualType = varRefType;
} else if (varRefType.tag == TypeTags.TABLE) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS,
varRefType);
return symTable.semanticError;
}
BTableType tableType = (BTableType) indexBasedAccessExpr.expr.getBType();
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (tableType.keyTypeConstraint == null) {
keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.getBType()).
fieldNameList, ((BTableType) indexBasedAccessExpr.expr.getBType()).constraint);
if (keyTypeConstraint == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE,
indexBasedAccessExpr.expr);
return symTable.semanticError;
}
}
if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) {
checkExpr(indexExpr, this.env, keyTypeConstraint);
if (indexExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
} else {
List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr)
indexBasedAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes;
if (keyConstraintTypes.size() != multiKeyExpressionList.size()) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
for (int i = 0; i < multiKeyExpressionList.size(); i++) {
BLangExpression keyExpr = multiKeyExpressionList.get(i);
checkExpr(keyExpr, this.env, keyConstraintTypes.get(i));
if (keyExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
}
}
if (expType.tag != TypeTags.NONE) {
BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType);
if (resultType == symTable.semanticError) {
return symTable.semanticError;
}
}
BType constraint = tableType.constraint;
actualType = addNilForNillableAccessType(constraint);
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (varRefType == symTable.semanticError) {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
return symTable.semanticError;
} else {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private Long getConstIndex(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value :
(Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private String getConstFieldName(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value :
(String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType,
BArrayType arrayType) {
BType actualType = symTable.semanticError;
switch (indexExprType.tag) {
case TypeTags.INT:
BLangExpression indexExpr = indexBasedAccess.indexExpr;
if (!isConst(indexExpr) || arrayType.state == BArrayState.OPEN) {
actualType = arrayType.eType;
break;
}
actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType;
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) indexExprType;
boolean validIndexExists = false;
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
if (indexValue >= 0 &&
(arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) {
validIndexExists = true;
break;
}
}
if (!validIndexExists) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.UNION:
List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(matchedType -> (BFiniteType) matchedType)
.collect(Collectors.toList());
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType);
if (elementType == symTable.semanticError) {
return symTable.semanticError;
}
actualType = arrayType.eType;
}
return actualType;
}
private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.ARRAY) {
return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type);
}
if (type.tag == TypeTags.TUPLE) {
return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType());
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.INT:
if (isConst(indexExpr)) {
actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue());
} else {
BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType();
LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>());
actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null,
tupleTypes);
}
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
BType fieldType = checkTupleFieldType(tuple, indexValue);
if (fieldType.tag != TypeTags.SEMANTIC_ERROR) {
possibleTypes.add(fieldType);
}
}
if (possibleTypes.size() == 0) {
return symTable.semanticError;
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) {
tupleType.tupleTypes
.forEach(memberType -> {
if (memberType.tag == TypeTags.UNION) {
collectMemberTypes((BUnionType) memberType, memberTypes);
} else {
memberTypes.add(memberType);
}
});
return memberTypes;
}
private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.MAP) {
BType constraint = ((BMapType) type).constraint;
return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint);
}
if (type.tag == TypeTags.RECORD) {
return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType());
}
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.STRING:
if (isConst(indexExpr)) {
String fieldName = IdentifierUtils.escapeSpecialCharacters(getConstFieldName(indexExpr));
actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType != symTable.semanticError) {
return actualType;
}
actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
return actualType;
}
if (actualType == symTable.neverType) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
if (accessExpr.isLValue) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
LinkedHashSet<BType> fieldTypes = record.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (record.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(record.restFieldType);
}
if (fieldTypes.stream().noneMatch(BType::isNullable)) {
fieldTypes.add(symTable.nilType);
}
actualType = BUnionType.create(null, fieldTypes);
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
String fieldName = (String) ((BLangLiteral) finiteMember).value;
BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
}
if (fieldType != symTable.semanticError) {
fieldType = addNilForNillableAccessType(fieldType);
}
}
if (fieldType.tag == TypeTags.SEMANTIC_ERROR) {
continue;
}
possibleTypes.add(fieldType);
}
if (possibleTypes.isEmpty()) {
return symTable.semanticError;
}
if (possibleTypes.stream().noneMatch(BType::isNullable)) {
possibleTypes.add(symTable.nilType);
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.getMemberTypes());
} else {
return Lists.of(type);
}
}
private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType());
LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.getBType();
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) {
return new LinkedHashSet<BType>() {
{
add(symTable.semanticError);
}
};
}
assignable = this.types.isAssignable(type, pattern.variable.getBType());
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) {
if (encounteredTypes.contains(type)) {
return false;
}
encounteredTypes.add(type);
switch (type.tag) {
case TypeTags.UNION:
for (BType bType1 : ((BUnionType) type).getMemberTypes()) {
if (couldHoldTableValues(bType1, encounteredTypes)) {
return true;
}
}
return false;
case TypeTags.MAP:
return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (couldHoldTableValues(field.type, encounteredTypes)) {
return true;
}
}
return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes);
case TypeTags.ARRAY:
return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes);
case TypeTags.TUPLE:
for (BType bType : ((BTupleType) type).getTupleTypes()) {
if (couldHoldTableValues(bType, encounteredTypes)) {
return true;
}
}
return false;
}
return false;
}
private boolean isConst(BLangExpression expression) {
if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) {
return true;
}
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
}
private Name getCurrentCompUnit(BLangNode node) {
return names.fromString(node.pos.lineRange().filePath());
}
private BType getRepresentativeBroadType(List<BType> inferredTypeList) {
for (int i = 0; i < inferredTypeList.size(); i++) {
BType type = inferredTypeList.get(i);
if (type.tag == TypeTags.SEMANTIC_ERROR) {
return type;
}
for (int j = i + 1; j < inferredTypeList.size(); j++) {
BType otherType = inferredTypeList.get(j);
if (otherType.tag == TypeTags.SEMANTIC_ERROR) {
return otherType;
}
if (types.isAssignable(otherType, type)) {
inferredTypeList.remove(j);
j -= 1;
continue;
}
if (types.isAssignable(type, otherType)) {
inferredTypeList.remove(i);
i -= 1;
break;
}
}
}
if (inferredTypeList.size() == 1) {
return inferredTypeList.get(0);
}
return BUnionType.create(null, inferredTypeList.toArray(new BType[0]));
}
private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>();
List<BType> restFieldTypes = new ArrayList<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValue.key;
BLangExpression expression = keyValue.valueExpr;
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, env, symTable.stringType);
BType exprType = checkExpr(expression, env, expType);
if (isUniqueType(restFieldTypes, exprType)) {
restFieldTypes.add(exprType);
}
} else {
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr),
keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) :
checkExpr(expression, env, expType),
true, keyValue.readonly);
}
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType);
int typeTag = type.tag;
if (typeTag == TypeTags.MAP) {
BType constraintType = ((BMapType) type).constraint;
if (isUniqueType(restFieldTypes, constraintType)) {
restFieldTypes.add(constraintType);
}
}
if (type.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) type;
for (BField recField : recordType.fields.values()) {
addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type,
!Symbols.isOptional(recField.symbol), false);
}
if (!recordType.sealed) {
BType restFieldType = recordType.restFieldType;
if (isUniqueType(restFieldTypes, restFieldType)) {
restFieldTypes.add(restFieldType);
}
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField),
varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) :
checkExpr(varNameField, env, expType),
true, varNameField.readonly);
}
}
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
boolean allReadOnlyNonRestFields = true;
for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) {
FieldInfo fieldInfo = entry.getValue();
List<BType> types = fieldInfo.types;
if (types.contains(symTable.semanticError)) {
return symTable.semanticError;
}
String key = entry.getKey();
Name fieldName = names.fromString(key);
BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0]));
Set<Flag> flags = new HashSet<>();
if (fieldInfo.required) {
flags.add(Flag.REQUIRED);
} else {
flags.add(Flag.OPTIONAL);
}
if (fieldInfo.readonly) {
flags.add(Flag.READONLY);
} else if (allReadOnlyNonRestFields) {
allReadOnlyNonRestFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = fields;
if (restFieldTypes.contains(symTable.semanticError)) {
return symTable.semanticError;
}
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else if (restFieldTypes.size() == 1) {
recordType.restFieldType = restFieldTypes.get(0);
} else {
recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0]));
}
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) {
recordType.flags |= Flags.READONLY;
recordSymbol.flags |= Flags.READONLY;
}
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
return recordType;
}
private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location,
SymbolOrigin origin) {
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(Flags.ANONYMOUS,
names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)),
pkgID, null, env.scope.owner, location, origin);
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, location);
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
return recordSymbol;
}
private String getKeyName(BLangExpression key) {
return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value;
}
private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString,
BType exprType, boolean required, boolean readonly) {
if (!nonRestFieldTypes.containsKey(keyString)) {
nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required,
readonly));
return;
}
FieldInfo fieldInfo = nonRestFieldTypes.get(keyString);
List<BType> typeList = fieldInfo.types;
if (isUniqueType(typeList, exprType)) {
typeList.add(exprType);
}
if (required && !fieldInfo.required) {
fieldInfo.required = true;
}
}
private boolean isUniqueType(List<BType> typeList, BType type) {
boolean isRecord = type.tag == TypeTags.RECORD;
for (BType bType : typeList) {
if (isRecord) {
if (type == bType) {
return false;
}
} else if (types.isSameType(type, bType)) {
return false;
}
}
return true;
}
private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType,
BType expType) {
if (expType == symTable.semanticError) {
return expType;
}
boolean unionExpType = expType.tag == TypeTags.UNION;
if (expType == mutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) {
return mutableXmlSubType;
}
BXMLSubType immutableXmlSubType = (BXMLSubType)
ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable,
anonymousModelHelper, names);
if (expType == immutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) {
return immutableXmlSubType;
}
if (!unionExpType) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
List<BType> compatibleTypes = new ArrayList<>();
for (BType memberType : ((BUnionType) expType).getMemberTypes()) {
if (compatibleTypes.contains(memberType)) {
continue;
}
if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) {
compatibleTypes.add(memberType);
continue;
}
if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) {
compatibleTypes.add(mutableXmlSubType);
continue;
}
if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) {
compatibleTypes.add(immutableXmlSubType);
}
}
if (compatibleTypes.isEmpty()) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
if (compatibleTypes.size() == 1) {
return compatibleTypes.get(0);
}
dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType);
return symTable.semanticError;
}
private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) {
for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) {
BType childType = modifiedChild.getBType();
if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) {
continue;
}
modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types,
(SelectivelyImmutableReferenceType) childType,
env, symTable, anonymousModelHelper, names));
if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild);
}
}
}
private void logUndefinedSymbolError(Location pos, String name) {
if (!missingNodesHelper.isMissingNode(name)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name);
}
}
private void markTypeAsIsolated(BType actualType) {
actualType.flags |= Flags.ISOLATED;
actualType.tsymbol.flags |= Flags.ISOLATED;
}
private boolean isObjectConstructorExpr(BLangTypeInit cIExpr, BType actualType) {
return cIExpr.getType() != null && Symbols.isFlagOn(actualType.tsymbol.flags, Flags.ANONYMOUS);
}
private BLangClassDefinition getClassDefinitionForObjectConstructorExpr(BLangTypeInit cIExpr, SymbolEnv env) {
List<BLangClassDefinition> classDefinitions = env.enclPkg.classDefinitions;
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) cIExpr.getType();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(userDefinedType.pos, env,
names.fromIdNode(userDefinedType.pkgAlias),
names.fromIdNode(userDefinedType.typeName));
for (BLangClassDefinition classDefinition : classDefinitions) {
if (classDefinition.symbol == symbol) {
return classDefinition;
}
}
return null;
}
private void handleObjectConstrExprForReadOnly(BLangTypeInit cIExpr, BObjectType actualObjectType,
BLangClassDefinition classDefForConstructor, SymbolEnv env,
boolean logErrors) {
boolean hasNeverReadOnlyField = false;
for (BField field : actualObjectType.fields.values()) {
BType fieldType = field.type;
if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) {
analyzeObjectConstructor(classDefForConstructor, env);
hasNeverReadOnlyField = true;
if (!logErrors) {
return;
}
dlog.error(field.pos,
DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE,
fieldType);
}
}
if (hasNeverReadOnlyField) {
return;
}
classDefForConstructor.flagSet.add(Flag.READONLY);
actualObjectType.flags |= Flags.READONLY;
actualObjectType.tsymbol.flags |= Flags.READONLY;
ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types,
anonymousModelHelper, symTable, names, cIExpr.pos);
analyzeObjectConstructor(classDefForConstructor, env);
}
private void markConstructedObjectIsolatedness(BObjectType actualObjectType) {
if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) {
markTypeAsIsolated(actualObjectType);
return;
}
for (BField field : actualObjectType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) ||
!types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) {
return;
}
}
markTypeAsIsolated(actualObjectType);
}
private void markLeafNode(BLangAccessExpression accessExpression) {
BLangNode parent = accessExpression.parent;
if (parent == null) {
accessExpression.leafNode = true;
return;
}
NodeKind kind = parent.getKind();
while (kind == NodeKind.GROUP_EXPR) {
parent = parent.parent;
if (parent == null) {
accessExpression.leafNode = true;
break;
}
kind = parent.getKind();
}
if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) {
accessExpression.leafNode = true;
}
}
private static class FieldInfo {
List<BType> types;
boolean required;
boolean readonly;
private FieldInfo(List<BType> types, boolean required, boolean readonly) {
this.types = types;
this.required = required;
this.readonly = readonly;
}
}
private static class TypeSymbolPair {
private BVarSymbol fieldSymbol;
private BType determinedType;
public TypeSymbolPair(BVarSymbol fieldSymbol, BType determinedType) {
this.fieldSymbol = fieldSymbol;
this.determinedType = determinedType;
}
}
private static class RecordUnionDiagnostics {
Set<BRecordType> undeclaredInRecords = new LinkedHashSet<>();
Set<BRecordType> optionalInRecords = new LinkedHashSet<>();
boolean hasUndeclaredAndOptional() {
return undeclaredInRecords.size() > 0 && optionalInRecords.size() > 0;
}
boolean hasUndeclared() {
return undeclaredInRecords.size() > 0;
}
boolean hasOptional() {
return optionalInRecords.size() > 0;
}
String recordsToString(Set<BRecordType> recordTypeSet) {
StringBuilder recordNames = new StringBuilder();
int recordSetSize = recordTypeSet.size();
int index = 0;
for (BRecordType recordType : recordTypeSet) {
index++;
recordNames.append(recordType.tsymbol.getName().getValue());
if (recordSetSize > 1) {
if (index == recordSetSize - 1) {
recordNames.append("', and '");
} else if (index < recordSetSize) {
recordNames.append("', '");
}
}
}
return recordNames.toString();
}
}
}
|
https://stackoverflow.com/questions/35792590/how-to-check-number-of-digits-from-bigdecimal There is a trick to this as well.. ```Java xInBigDecimal = xInBigDecimal.stripTrailingZeros(); int y = xInBigDecimal.precision() - xInBigDecimal.scale() if (y > digitsTmp) { return 0; } ```
|
public static double round(double x, long fractionDigits) {
if (isSpecialValue(x)) {
return x;
}
if (fractionDigits == 0) {
return Math.rint(x);
}
if (fractionDigits > Integer.MAX_VALUE) {
return x;
}
if (fractionDigits < Integer.MIN_VALUE) {
return 0;
}
int fractionDigitsAsInt = (int) fractionDigits;
BigDecimal xInBigDecimal = BigDecimal.valueOf(x);
BigDecimal xTmp = xInBigDecimal;
int digitsTmp = fractionDigitsAsInt;
if (digitsTmp > 0) {
while (digitsTmp-- > 0) {
if (xTmp.remainder(BigDecimal.ONE).compareTo(BigDecimal.ZERO) == 0) {
return x;
}
xTmp = xTmp.multiply(BigDecimal.TEN);
}
} else {
while (digitsTmp++ < 0) {
if (xTmp.compareTo(BigDecimal.ZERO) == 0) {
return 0;
}
xTmp = xTmp.divide(BigDecimal.TEN, 0, RoundingMode.DOWN);
}
}
return xInBigDecimal.setScale(fractionDigitsAsInt, RoundingMode.HALF_EVEN).doubleValue();
}
|
while (digitsTmp++ < 0) {
|
public static double round(double x, long fractionDigits) {
if (Double.isInfinite(x) || Double.isNaN(x) || x == 0.0d) {
return x;
}
if (fractionDigits == 0) {
return Math.rint(x);
}
if (fractionDigits > Integer.MAX_VALUE) {
return x;
}
if (fractionDigits < Integer.MIN_VALUE) {
return 0;
}
int fractionDigitsAsInt = (int) fractionDigits;
BigDecimal xInBigDecimal = BigDecimal.valueOf(x);
int scale = xInBigDecimal.scale();
if (fractionDigitsAsInt > 0) {
if (fractionDigitsAsInt > scale) {
return x;
}
} else if (-fractionDigitsAsInt > (xInBigDecimal.precision() - scale)) {
return 0;
}
return xInBigDecimal.setScale(fractionDigitsAsInt, RoundingMode.HALF_EVEN).doubleValue();
}
|
class Round {
private static boolean isSpecialValue(double x) {
return Double.isInfinite(x) || Double.isNaN(x) || x == 0.0d;
}
}
|
class Round {
}
|
We should move the cases into the `types`. In line 225, we will create test cases with or without nullbility.
|
private static List<LogicalType> generateTestData() {
List<LogicalType> types =
Arrays.asList(
new BooleanType(),
new TinyIntType(),
new SmallIntType(),
new IntType(),
new BigIntType(),
new FloatType(),
new DoubleType(),
new DateType(),
CharType.ofEmptyLiteral(),
new CharType(),
new CharType(5),
VarCharType.ofEmptyLiteral(),
new VarCharType(),
new VarCharType(5),
BinaryType.ofEmptyLiteral(),
new BinaryType(),
new BinaryType(100),
VarBinaryType.ofEmptyLiteral(),
new VarBinaryType(),
new VarBinaryType(100),
new DecimalType(10),
new DecimalType(15, 5),
new TimeType(),
new TimeType(3),
new TimestampType(),
new TimestampType(3),
new TimestampType(false, 3),
new ZonedTimestampType(),
new ZonedTimestampType(3),
new LocalZonedTimestampType(),
new LocalZonedTimestampType(3),
new LocalZonedTimestampType(false, 3),
new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3),
new MapType(new BigIntType(), new IntType(false)),
new MapType(CharType.ofEmptyLiteral(), CharType.ofEmptyLiteral()),
new MapType(VarCharType.ofEmptyLiteral(), VarCharType.ofEmptyLiteral()),
new MapType(BinaryType.ofEmptyLiteral(), BinaryType.ofEmptyLiteral()),
new MapType(VarBinaryType.ofEmptyLiteral(), VarBinaryType.ofEmptyLiteral()),
new MapType(new TimestampType(false, 3), new LocalZonedTimestampType()),
new ArrayType(new IntType(false)),
new ArrayType(new TimestampType()),
new ArrayType(new LocalZonedTimestampType(false, 3)),
new ArrayType(CharType.ofEmptyLiteral()),
new ArrayType(VarCharType.ofEmptyLiteral()),
new ArrayType(BinaryType.ofEmptyLiteral()),
new ArrayType(VarBinaryType.ofEmptyLiteral()),
new MultisetType(new IntType(false)),
new MultisetType(new TimestampType()),
new MultisetType(new TimestampType(true, 3)),
new MultisetType(CharType.ofEmptyLiteral()),
new MultisetType(VarCharType.ofEmptyLiteral()),
new MultisetType(BinaryType.ofEmptyLiteral()),
new MultisetType(VarBinaryType.ofEmptyLiteral()),
RowType.of(new BigIntType(), new IntType(false), new VarCharType(200)),
RowType.of(
new LogicalType[] {
new BigIntType(), new IntType(false), new VarCharType(200)
},
new String[] {"f1", "f2", "f3"}),
RowType.of(
new TimestampType(false, 3), new LocalZonedTimestampType(false, 3)),
RowType.of(
CharType.ofEmptyLiteral(),
VarCharType.ofEmptyLiteral(),
BinaryType.ofEmptyLiteral(),
VarBinaryType.ofEmptyLiteral()),
new RowType(
Arrays.asList(
new RowType.RowField("ID", new BigIntType(), "ID desc"),
new RowType.RowField(
"Name", new VarCharType(20), "Name desc"))),
new RawType<>(LocalDateTime.class, LocalDateTimeSerializer.INSTANCE),
new RawType<>(
Row.class,
ExternalSerializer.of(
DataTypes.ROW(DataTypes.INT(), DataTypes.STRING()))));
List<LogicalType> testTypes =
Stream.concat(
types.stream().map(type -> type.copy(true)),
types.stream().map(type -> type.copy(false)))
.collect(Collectors.toList());
testTypes.add(new NullType());
testTypes.add(new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.MONTH));
testTypes.add(
new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.YEAR_TO_MONTH));
testTypes.add(new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.YEAR));
testTypes.add(new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.DAY));
testTypes.add(new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.DAY_TO_HOUR));
testTypes.add(new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.DAY_TO_MINUTE));
testTypes.add(new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.DAY_TO_SECOND));
testTypes.add(new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.HOUR));
testTypes.add(
new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.HOUR_TO_MINUTE));
testTypes.add(
new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.HOUR_TO_SECOND));
testTypes.add(new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.MINUTE));
testTypes.add(
new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.MINUTE_TO_SECOND));
testTypes.add(new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.SECOND));
return testTypes;
}
|
testTypes.add(new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.MONTH));
|
private static List<LogicalType> generateTestData() {
List<LogicalType> types =
Arrays.asList(
new BooleanType(),
new TinyIntType(),
new SmallIntType(),
new IntType(),
new BigIntType(),
new FloatType(),
new DoubleType(),
new DateType(),
CharType.ofEmptyLiteral(),
new CharType(),
new CharType(5),
VarCharType.ofEmptyLiteral(),
new VarCharType(),
new VarCharType(5),
BinaryType.ofEmptyLiteral(),
new BinaryType(),
new BinaryType(100),
VarBinaryType.ofEmptyLiteral(),
new VarBinaryType(),
new VarBinaryType(100),
new DecimalType(10),
new DecimalType(15, 5),
new TimeType(),
new TimeType(3),
new TimestampType(),
new TimestampType(3),
new TimestampType(false, 3),
new ZonedTimestampType(),
new ZonedTimestampType(3),
new LocalZonedTimestampType(),
new LocalZonedTimestampType(3),
new LocalZonedTimestampType(false, 3),
new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3),
new MapType(new BigIntType(), new IntType(false)),
new MapType(CharType.ofEmptyLiteral(), CharType.ofEmptyLiteral()),
new MapType(VarCharType.ofEmptyLiteral(), VarCharType.ofEmptyLiteral()),
new MapType(BinaryType.ofEmptyLiteral(), BinaryType.ofEmptyLiteral()),
new MapType(VarBinaryType.ofEmptyLiteral(), VarBinaryType.ofEmptyLiteral()),
new MapType(new TimestampType(false, 3), new LocalZonedTimestampType()),
new ArrayType(new IntType(false)),
new ArrayType(new TimestampType()),
new ArrayType(new LocalZonedTimestampType(false, 3)),
new ArrayType(CharType.ofEmptyLiteral()),
new ArrayType(VarCharType.ofEmptyLiteral()),
new ArrayType(BinaryType.ofEmptyLiteral()),
new ArrayType(VarBinaryType.ofEmptyLiteral()),
new MultisetType(new IntType(false)),
new MultisetType(new TimestampType()),
new MultisetType(new TimestampType(true, 3)),
new MultisetType(CharType.ofEmptyLiteral()),
new MultisetType(VarCharType.ofEmptyLiteral()),
new MultisetType(BinaryType.ofEmptyLiteral()),
new MultisetType(VarBinaryType.ofEmptyLiteral()),
RowType.of(new BigIntType(), new IntType(false), new VarCharType(200)),
RowType.of(
new LogicalType[] {
new BigIntType(), new IntType(false), new VarCharType(200)
},
new String[] {"f1", "f2", "f3"}),
RowType.of(
new TimestampType(false, 3), new LocalZonedTimestampType(false, 3)),
RowType.of(
CharType.ofEmptyLiteral(),
VarCharType.ofEmptyLiteral(),
BinaryType.ofEmptyLiteral(),
VarBinaryType.ofEmptyLiteral()),
new RowType(
Arrays.asList(
new RowType.RowField("ID", new BigIntType(), "ID desc"),
new RowType.RowField(
"Name", new VarCharType(20), "Name desc"))),
new RawType<>(LocalDateTime.class, LocalDateTimeSerializer.INSTANCE),
new RawType<>(
Row.class,
ExternalSerializer.of(
DataTypes.ROW(DataTypes.INT(), DataTypes.STRING()))),
new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.MONTH),
new YearMonthIntervalType(
YearMonthIntervalType.YearMonthResolution.YEAR_TO_MONTH),
new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.YEAR),
new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.DAY),
new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.DAY_TO_HOUR),
new DayTimeIntervalType(
DayTimeIntervalType.DayTimeResolution.DAY_TO_MINUTE),
new DayTimeIntervalType(
DayTimeIntervalType.DayTimeResolution.DAY_TO_SECOND),
new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.HOUR),
new DayTimeIntervalType(
DayTimeIntervalType.DayTimeResolution.HOUR_TO_MINUTE),
new DayTimeIntervalType(
DayTimeIntervalType.DayTimeResolution.HOUR_TO_SECOND),
new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.MINUTE),
new DayTimeIntervalType(
DayTimeIntervalType.DayTimeResolution.MINUTE_TO_SECOND),
new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.SECOND));
List<LogicalType> testTypes =
Stream.concat(
types.stream().map(type -> type.copy(true)),
types.stream().map(type -> type.copy(false)))
.collect(Collectors.toList());
testTypes.add(new NullType());
return testTypes;
}
|
class LogicalTypeJsonSerDeTest {
private final ObjectMapper mapper = buildObjectMapper();
@ParameterizedTest
@MethodSource("generateTestData")
void testLogicalTypeJsonSerDe(LogicalType logicalType) throws IOException {
String json = mapper.writeValueAsString(logicalType);
LogicalType actualType = mapper.readValue(json, LogicalType.class);
assertThat(actualType).isEqualTo(logicalType);
}
@Test
void testSerializeUnsupportedType() {
LogicalType unsupportedType =
StructuredType.newBuilder(DataTypeJsonSerdeTest.PojoClass.class)
.attributes(
Arrays.asList(
new StructuredType.StructuredAttribute(
"f0", new IntType(true)),
new StructuredType.StructuredAttribute(
"f1", new BigIntType(true)),
new StructuredType.StructuredAttribute(
"f2", new VarCharType(200), "desc")))
.build();
assertThatThrownBy(() -> mapper.writeValueAsString(unsupportedType))
.satisfies(
FlinkAssertions.anyCauseMatches(
UnsupportedOperationException.class,
String.format(
"Unable to serialize logical type '%s'. Please check the documentation for supported types.",
unsupportedType.asSummaryString())));
}
@Test
void testDeserializeUnsupportedType() {
String unsupportedTypeString = "STRUCTURED_TYPE";
String json =
String.format(
"{\"%s\": \"%s\", \"%s\": %s}",
"type", unsupportedTypeString, "nullable", "true");
assertThatThrownBy(() -> mapper.readValue(json, LogicalType.class))
.satisfies(
FlinkAssertions.anyCauseMatches(
UnsupportedOperationException.class,
String.format(
"Unable to deserialize a logical type of type root '%s'. Please check the documentation for supported types.",
unsupportedTypeString)));
}
@Test
void testDeserializeUnsupportedJson() {
String json = String.format("{\"%s\": \"%s\"}", "unknown", "whatever");
assertThatThrownBy(() -> mapper.readValue(json, LogicalType.class))
.satisfies(
FlinkAssertions.anyCauseMatches(
UnsupportedOperationException.class,
"Cannot parse this Json String"));
}
private ObjectMapper buildObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
SimpleModule module = new SimpleModule();
module.addSerializer(new LogicalTypeJsonSerializer());
module.addDeserializer(LogicalType.class, new LogicalTypeJsonDeserializer());
mapper.registerModule(module);
return mapper;
}
/** Testing class. */
public static class PojoClass {
public int f0;
public long f1;
public String f2;
}
}
|
class LogicalTypeJsonSerDeTest {
private final ObjectMapper mapper = buildObjectMapper();
@ParameterizedTest
@MethodSource("generateTestData")
void testLogicalTypeJsonSerDe(LogicalType logicalType) throws IOException {
String json = mapper.writeValueAsString(logicalType);
LogicalType actualType = mapper.readValue(json, LogicalType.class);
assertThat(actualType).isEqualTo(logicalType);
}
@Test
void testSerializeUnsupportedType() {
LogicalType unsupportedType =
StructuredType.newBuilder(DataTypeJsonSerdeTest.PojoClass.class)
.attributes(
Arrays.asList(
new StructuredType.StructuredAttribute(
"f0", new IntType(true)),
new StructuredType.StructuredAttribute(
"f1", new BigIntType(true)),
new StructuredType.StructuredAttribute(
"f2", new VarCharType(200), "desc")))
.build();
assertThatThrownBy(() -> mapper.writeValueAsString(unsupportedType))
.satisfies(
FlinkAssertions.anyCauseMatches(
UnsupportedOperationException.class,
String.format(
"Unable to serialize logical type '%s'. Please check the documentation for supported types.",
unsupportedType.asSummaryString())));
}
@Test
void testDeserializeUnsupportedType() {
String unsupportedTypeString = "STRUCTURED_TYPE";
String json =
String.format(
"{\"%s\": \"%s\", \"%s\": %s}",
"type", unsupportedTypeString, "nullable", "true");
assertThatThrownBy(() -> mapper.readValue(json, LogicalType.class))
.satisfies(
FlinkAssertions.anyCauseMatches(
UnsupportedOperationException.class,
String.format(
"Unable to deserialize a logical type of type root '%s'. Please check the documentation for supported types.",
unsupportedTypeString)));
}
@Test
void testDeserializeUnsupportedJson() {
String json = String.format("{\"%s\": \"%s\"}", "unknown", "whatever");
assertThatThrownBy(() -> mapper.readValue(json, LogicalType.class))
.satisfies(
FlinkAssertions.anyCauseMatches(
UnsupportedOperationException.class,
"Cannot parse this Json String"));
}
private ObjectMapper buildObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
SimpleModule module = new SimpleModule();
module.addSerializer(new LogicalTypeJsonSerializer());
module.addDeserializer(LogicalType.class, new LogicalTypeJsonDeserializer());
mapper.registerModule(module);
return mapper;
}
/** Testing class. */
public static class PojoClass {
public int f0;
public long f1;
public String f2;
}
}
|
```suggestion throw new RpcLoaderException( ```
|
public RpcSystem loadRpcSystem(Configuration config) {
try {
LOG.debug(
"Using Fallback AkkaRpcSystemLoader; this loader will invoke maven to retrieve the dependencies of flink-rpc-akka.");
final ClassLoader flinkClassLoader = RpcSystem.class.getClassLoader();
final Path akkaRpcModuleDirectory =
findAkkaRpcModuleDirectory(getCurrentWorkingDirectory());
final Path akkaRpcModuleClassesDirectory =
akkaRpcModuleDirectory.resolve(Paths.get("target", "classes"));
final Path akkaRpcModuleDependenciesDirectory =
akkaRpcModuleDirectory.resolve(Paths.get("target", "dependencies"));
if (!Files.exists(akkaRpcModuleDependenciesDirectory)) {
int exitCode =
downloadDependencies(
akkaRpcModuleDirectory, akkaRpcModuleDependenciesDirectory);
if (exitCode != 0) {
throw new RuntimeException(
"Can not download dependencies, please see the output log for reason.");
}
} else {
LOG.debug(
"Re-using previously downloaded flink-rpc-akka dependencies. If you are experiencing strange issues, try clearing '{}'.",
akkaRpcModuleDependenciesDirectory);
}
final List<URL> urls = new ArrayList<>();
urls.add(akkaRpcModuleClassesDirectory.toUri().toURL());
try (final Stream<Path> files = Files.list(akkaRpcModuleDependenciesDirectory)) {
final List<Path> collect =
files.filter(path -> path.getFileName().toString().endsWith(".jar"))
.collect(Collectors.toList());
for (Path path : collect) {
urls.add(path.toUri().toURL());
}
}
final SubmoduleClassLoader submoduleClassLoader =
new SubmoduleClassLoader(urls.toArray(new URL[0]), flinkClassLoader);
return new CleanupOnCloseRpcSystem(
ServiceLoader.load(RpcSystem.class, submoduleClassLoader).iterator().next(),
submoduleClassLoader,
null);
} catch (Exception e) {
throw new RpcLoaderException(
String.format(
"Could not initialize RPC system. Run '%s' on the command-line instead.",
AkkaRpcSystemLoader.HINT_USAGE),
e);
}
}
|
throw new RuntimeException(
|
public RpcSystem loadRpcSystem(Configuration config) {
try {
LOG.debug(
"Using Fallback AkkaRpcSystemLoader; this loader will invoke maven to retrieve the dependencies of flink-rpc-akka.");
final ClassLoader flinkClassLoader = RpcSystem.class.getClassLoader();
final Path akkaRpcModuleDirectory =
findAkkaRpcModuleDirectory(getCurrentWorkingDirectory());
final Path akkaRpcModuleClassesDirectory =
akkaRpcModuleDirectory.resolve(Paths.get("target", "classes"));
final Path akkaRpcModuleDependenciesDirectory =
akkaRpcModuleDirectory.resolve(Paths.get("target", "dependencies"));
if (!Files.exists(akkaRpcModuleDependenciesDirectory)) {
int exitCode =
downloadDependencies(
akkaRpcModuleDirectory, akkaRpcModuleDependenciesDirectory);
if (exitCode != 0) {
throw new RpcLoaderException(
"Could not download dependencies of flink-rpc-akka, please see the log output for details.");
}
} else {
LOG.debug(
"Re-using previously downloaded flink-rpc-akka dependencies. If you are experiencing strange issues, try clearing '{}'.",
akkaRpcModuleDependenciesDirectory);
}
final List<URL> urls = new ArrayList<>();
urls.add(akkaRpcModuleClassesDirectory.toUri().toURL());
try (final Stream<Path> files = Files.list(akkaRpcModuleDependenciesDirectory)) {
final List<Path> collect =
files.filter(path -> path.getFileName().toString().endsWith(".jar"))
.collect(Collectors.toList());
for (Path path : collect) {
urls.add(path.toUri().toURL());
}
}
final SubmoduleClassLoader submoduleClassLoader =
new SubmoduleClassLoader(urls.toArray(new URL[0]), flinkClassLoader);
return new CleanupOnCloseRpcSystem(
ServiceLoader.load(RpcSystem.class, submoduleClassLoader).iterator().next(),
submoduleClassLoader,
null);
} catch (Exception e) {
throw new RpcLoaderException(
String.format(
"Could not initialize RPC system. Run '%s' on the command-line instead.",
AkkaRpcSystemLoader.HINT_USAGE),
e);
}
}
|
class FallbackAkkaRpcSystemLoader implements RpcSystemLoader {
private static final Logger LOG = LoggerFactory.getLogger(FallbackAkkaRpcSystemLoader.class);
private static final String MODULE_FLINK_RPC = "flink-rpc";
private static final String MODULE_FLINK_RPC_AKKA = "flink-rpc-akka";
@Override
private static Path getCurrentWorkingDirectory() {
return Paths.get("").toAbsolutePath();
}
private static Path findAkkaRpcModuleDirectory(Path currentParentCandidate) throws IOException {
try (Stream<Path> directoryContents = Files.list(currentParentCandidate)) {
final Optional<Path> flinkRpcModuleDirectory =
directoryContents
.filter(path -> path.getFileName().toString().equals(MODULE_FLINK_RPC))
.findFirst();
if (flinkRpcModuleDirectory.isPresent()) {
return flinkRpcModuleDirectory
.map(path -> path.resolve(Paths.get(MODULE_FLINK_RPC_AKKA)))
.get();
}
}
return findAkkaRpcModuleDirectory(currentParentCandidate.getParent());
}
private static int downloadDependencies(Path workingDirectory, Path targetDirectory)
throws IOException, InterruptedException {
final String mvnExecutable = OperatingSystem.isWindows() ? "mvn.bat" : "mvn";
final ProcessBuilder mvn =
new ProcessBuilder()
.directory(workingDirectory.toFile())
.command(
mvnExecutable,
"dependency:copy-dependencies",
"-DincludeScope=runtime",
"-DoutputDirectory=" + targetDirectory)
.redirectOutput(ProcessBuilder.Redirect.INHERIT);
return mvn.start().waitFor();
}
}
|
class FallbackAkkaRpcSystemLoader implements RpcSystemLoader {
private static final Logger LOG = LoggerFactory.getLogger(FallbackAkkaRpcSystemLoader.class);
private static final String MODULE_FLINK_RPC = "flink-rpc";
private static final String MODULE_FLINK_RPC_AKKA = "flink-rpc-akka";
@Override
private static Path getCurrentWorkingDirectory() {
return Paths.get("").toAbsolutePath();
}
private static Path findAkkaRpcModuleDirectory(Path currentParentCandidate) throws IOException {
try (Stream<Path> directoryContents = Files.list(currentParentCandidate)) {
final Optional<Path> flinkRpcModuleDirectory =
directoryContents
.filter(path -> path.getFileName().toString().equals(MODULE_FLINK_RPC))
.findFirst();
if (flinkRpcModuleDirectory.isPresent()) {
return flinkRpcModuleDirectory
.map(path -> path.resolve(Paths.get(MODULE_FLINK_RPC_AKKA)))
.get();
}
}
return findAkkaRpcModuleDirectory(currentParentCandidate.getParent());
}
private static int downloadDependencies(Path workingDirectory, Path targetDirectory)
throws IOException, InterruptedException {
final String mvnExecutable = OperatingSystem.isWindows() ? "mvn.bat" : "mvn";
final ProcessBuilder mvn =
new ProcessBuilder()
.directory(workingDirectory.toFile())
.command(
mvnExecutable,
"dependency:copy-dependencies",
"-DincludeScope=runtime",
"-DoutputDirectory=" + targetDirectory)
.redirectOutput(ProcessBuilder.Redirect.INHERIT);
return mvn.start().waitFor();
}
}
|
Is this possible? Don't above checks preclude this condition?
|
String resolve() throws IOException {
String from =
String.format("%s/v%s/%s.zip", GITHUB_DOWNLOAD_PREFIX, getSDKVersion(), buildFileName());
if (!Strings.isNullOrEmpty(options.getPrismLocation())) {
checkArgument(
!options.getPrismLocation().startsWith(GITHUB_TAG_PREFIX),
"Provided --prismLocation URL is not an Apache Beam Github "
+ "Release page URL or download URL: ",
from);
from = options.getPrismLocation();
}
String fromFileName = getNameWithoutExtension(from);
Path to = Paths.get(userHome(), PRISM_BIN_PATH, fromFileName);
if (Files.exists(to)) {
return to.toString();
}
createDirectoryIfNeeded(to);
if (from.startsWith("http")) {
String result = resolve(new URL(from), to);
checkState(Files.exists(to), "Resolved location does not exist: %s", result);
return result;
}
String result = resolve(Paths.get(from), to);
checkState(Files.exists(to), "Resolved location does not exist: %s", result);
return result;
}
|
if (from.startsWith("http")) {
|
String resolve() throws IOException {
String from =
String.format("%s/v%s/%s.zip", GITHUB_DOWNLOAD_PREFIX, getSDKVersion(), buildFileName());
if (!Strings.isNullOrEmpty(options.getPrismLocation())) {
checkArgument(
!options.getPrismLocation().startsWith(GITHUB_TAG_PREFIX),
"Provided --prismLocation URL is not an Apache Beam Github "
+ "Release page URL or download URL: ",
from);
from = options.getPrismLocation();
}
String fromFileName = getNameWithoutExtension(from);
Path to = Paths.get(userHome(), PRISM_BIN_PATH, fromFileName);
if (Files.exists(to)) {
return to.toString();
}
createDirectoryIfNeeded(to);
if (from.startsWith("http")) {
String result = resolve(new URL(from), to);
checkState(Files.exists(to), "Resolved location does not exist: %s", result);
return result;
}
String result = resolve(Paths.get(from), to);
checkState(Files.exists(to), "Resolved location does not exist: %s", result);
return result;
}
|
class PrismLocator {
static final String OS_NAME_PROPERTY = "os.name";
static final String ARCH_PROPERTY = "os.arch";
static final String USER_HOME_PROPERTY = "user.home";
private static final String ZIP_EXT = "zip";
private static final String SHA512_EXT = "sha512";
private static final ReleaseInfo RELEASE_INFO = ReleaseInfo.getReleaseInfo();
private static final String PRISM_BIN_PATH = ".apache_beam/cache/prism/bin";
private static final Set<PosixFilePermission> PERMS =
PosixFilePermissions.fromString("rwxr-xr-x");
private static final String GITHUB_DOWNLOAD_PREFIX =
"https:
private static final String GITHUB_TAG_PREFIX = "https:
private final PrismPipelineOptions options;
PrismLocator(PrismPipelineOptions options) {
this.options = options;
}
/**
* Downloads and prepares a Prism executable for use with the {@link PrismRunner}, executed by the
* {@link PrismExecutor}. The returned {@link String} is the absolute path to the Prism
* executable.
*/
private String resolve(URL from, Path to) throws IOException {
if (from.toString().startsWith(GITHUB_DOWNLOAD_PREFIX)) {
URL shaSumReference = new URL(from + "." + SHA512_EXT);
validateShaSum512(shaSumReference, from);
}
BiConsumer<URL, Path> downloadFn = PrismLocator::download;
if (from.getPath().endsWith(ZIP_EXT)) {
downloadFn = PrismLocator::unzip;
}
downloadFn.accept(from, to);
Files.setPosixFilePermissions(to, PERMS);
return to.toString();
}
private String resolve(Path from, Path to) throws IOException {
BiConsumer<InputStream, Path> copyFn = PrismLocator::copy;
if (from.endsWith(ZIP_EXT)) {
copyFn = PrismLocator::unzip;
}
copyFn.accept(from.toUri().toURL().openStream(), to);
ByteStreams.copy(from.toUri().toURL().openStream(), Files.newOutputStream(to));
Files.setPosixFilePermissions(to, PERMS);
return to.toString();
}
String buildFileName() {
String version = getSDKVersion();
return String.format("apache_beam-v%s-prism-%s-%s", version, os(), arch());
}
private static void unzip(URL from, Path to) {
try {
unzip(from.openStream(), to);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void unzip(InputStream from, Path to) {
try (OutputStream out = Files.newOutputStream(to)) {
ZipInputStream zis = new ZipInputStream(from);
for (ZipEntry entry = zis.getNextEntry(); entry != null; entry = zis.getNextEntry()) {
InputStream in = ByteStreams.limit(zis, entry.getSize());
ByteStreams.copy(in, out);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void copy(InputStream from, Path to) {
try {
ByteStreams.copy(from, Files.newOutputStream(to));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void download(URL from, Path to) {
try {
ByteStreams.copy(from.openStream(), Files.newOutputStream(to));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void validateShaSum512(URL shaSumReference, URL source) throws IOException {
try (InputStream in = shaSumReference.openStream()) {
String rawContent = new String(ByteStreams.toByteArray(in), StandardCharsets.UTF_8);
checkState(!Strings.isNullOrEmpty(rawContent));
String reference = "";
Iterator<String> split = Splitter.onPattern("\\s+").split(rawContent).iterator();
if (split.hasNext()) {
reference = split.next();
}
checkState(!Strings.isNullOrEmpty(reference));
HashCode toVerify = Hashing.sha512().hashBytes(ByteStreams.toByteArray(source.openStream()));
checkState(
reference.equals(toVerify.toString()),
"Expected sha512 derived from: %s does not equal expected: %s, got: %s",
source,
reference,
toVerify.toString());
}
}
private static String getNameWithoutExtension(String path) {
return org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.io.Files
.getNameWithoutExtension(path);
}
private String getSDKVersion() {
if (Strings.isNullOrEmpty(options.getPrismVersionOverride())) {
return RELEASE_INFO.getSdkVersion();
}
return options.getPrismVersionOverride();
}
private static String os() {
String result = mustGetPropertyAsLowerCase(OS_NAME_PROPERTY);
if (result.contains("mac")) {
return "darwin";
}
return result;
}
private static String arch() {
String result = mustGetPropertyAsLowerCase(ARCH_PROPERTY);
if (result.contains("aarch")) {
return "arm64";
}
return result;
}
private static String userHome() {
return mustGetPropertyAsLowerCase(USER_HOME_PROPERTY);
}
private static String mustGetPropertyAsLowerCase(String name) {
return checkStateNotNull(System.getProperty(name), "System property: " + name + " not set")
.toLowerCase();
}
private static void createDirectoryIfNeeded(Path path) throws IOException {
Path parent = path.getParent();
if (parent == null) {
return;
}
if (parent.toFile().exists()) {
return;
}
Files.createDirectories(parent);
}
}
|
class PrismLocator {
static final String OS_NAME_PROPERTY = "os.name";
static final String ARCH_PROPERTY = "os.arch";
static final String USER_HOME_PROPERTY = "user.home";
private static final String ZIP_EXT = "zip";
private static final ReleaseInfo RELEASE_INFO = ReleaseInfo.getReleaseInfo();
private static final String PRISM_BIN_PATH = ".apache_beam/cache/prism/bin";
private static final Set<PosixFilePermission> PERMS =
PosixFilePermissions.fromString("rwxr-xr-x");
private static final String GITHUB_DOWNLOAD_PREFIX =
"https:
private static final String GITHUB_TAG_PREFIX = "https:
private final PrismPipelineOptions options;
PrismLocator(PrismPipelineOptions options) {
this.options = options;
}
/**
* Downloads and prepares a Prism executable for use with the {@link PrismRunner}. The returned
* {@link String} is the absolute path to the Prism executable.
*/
static Path prismBinDirectory() {
return Paths.get(userHome(), PRISM_BIN_PATH);
}
private String resolve(URL from, Path to) throws IOException {
BiConsumer<URL, Path> downloadFn = PrismLocator::download;
if (from.getPath().endsWith(ZIP_EXT)) {
downloadFn = PrismLocator::unzip;
}
downloadFn.accept(from, to);
Files.setPosixFilePermissions(to, PERMS);
return to.toString();
}
private String resolve(Path from, Path to) throws IOException {
BiConsumer<InputStream, Path> copyFn = PrismLocator::copy;
if (from.endsWith(ZIP_EXT)) {
copyFn = PrismLocator::unzip;
}
copyFn.accept(from.toUri().toURL().openStream(), to);
ByteStreams.copy(from.toUri().toURL().openStream(), Files.newOutputStream(to));
Files.setPosixFilePermissions(to, PERMS);
return to.toString();
}
String buildFileName() {
String version = getSDKVersion();
return String.format("apache_beam-v%s-prism-%s-%s", version, os(), arch());
}
private static void unzip(URL from, Path to) {
try {
unzip(from.openStream(), to);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void unzip(InputStream from, Path to) {
try (OutputStream out = Files.newOutputStream(to)) {
ZipInputStream zis = new ZipInputStream(from);
for (ZipEntry entry = zis.getNextEntry(); entry != null; entry = zis.getNextEntry()) {
InputStream in = ByteStreams.limit(zis, entry.getSize());
ByteStreams.copy(in, out);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void copy(InputStream from, Path to) {
try {
ByteStreams.copy(from, Files.newOutputStream(to));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void download(URL from, Path to) {
try {
ByteStreams.copy(from.openStream(), Files.newOutputStream(to));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static String getNameWithoutExtension(String path) {
return org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.io.Files
.getNameWithoutExtension(path);
}
private String getSDKVersion() {
if (Strings.isNullOrEmpty(options.getPrismVersionOverride())) {
return RELEASE_INFO.getSdkVersion();
}
return options.getPrismVersionOverride();
}
private static String os() {
String result = mustGetPropertyAsLowerCase(OS_NAME_PROPERTY);
if (result.contains("mac")) {
return "darwin";
}
return result;
}
private static String arch() {
String result = mustGetPropertyAsLowerCase(ARCH_PROPERTY);
if (result.contains("aarch")) {
return "arm64";
}
return result;
}
private static String userHome() {
return mustGetPropertyAsLowerCase(USER_HOME_PROPERTY);
}
private static String mustGetPropertyAsLowerCase(String name) {
return checkStateNotNull(System.getProperty(name), "System property: " + name + " not set")
.toLowerCase();
}
private static void createDirectoryIfNeeded(Path path) throws IOException {
Path parent = path.getParent();
if (parent == null) {
return;
}
Files.createDirectories(parent);
}
}
|
should include WindowExpression inner trival-agg
|
public Rule build() {
return logicalAggregate().whenNot(LogicalAggregate::isNormalized).then(aggregate -> {
Set<Expression> groupingByExprs =
ImmutableSet.copyOf(aggregate.getGroupByExpressions());
List<NamedExpression> aggregateOutput = aggregate.getOutputExpressions();
List<AggregateFunction> aggFuncs = Lists.newArrayList();
aggregateOutput.forEach(o -> o.accept(CollectNonWindowedAggFuncs.INSTANCE, aggFuncs));
Set<SubqueryExpr> subqueryExprs = ExpressionUtils.mutableCollect(aggFuncs.stream()
.filter(aggFunc -> !aggFunc.isDistinct()).collect(Collectors.toList()),
SubqueryExpr.class::isInstance);
Set<Slot> allAggFuncInputSlots = aggFuncs.stream()
.flatMap(agg -> agg.getInputSlots().stream()).collect(Collectors.toSet());
Set<Expression> distinctAggChildExprs = aggFuncs.stream()
.filter(agg -> agg.isDistinct()).flatMap(agg -> agg.children().stream())
.filter(child -> !(child instanceof SlotReference && child instanceof Literal))
.collect(Collectors.toSet());
Set<Alias> existsAlias =
ExpressionUtils.mutableCollect(aggregateOutput, Alias.class::isInstance);
Set<Expression> allPushDownExprs =
Sets.union(Sets.union(groupingByExprs, subqueryExprs), distinctAggChildExprs);
NormalizeToSlotContext bottomSlotContext =
NormalizeToSlotContext.buildContext(existsAlias, allPushDownExprs);
Set<NamedExpression> bottomProjects =
bottomSlotContext.pushDownToNamedExpression(allPushDownExprs);
Plan bottomPlan;
if (!bottomProjects.isEmpty()) {
bottomPlan = new LogicalProject<>(
ImmutableList.copyOf(Sets.union(bottomProjects, allAggFuncInputSlots)),
aggregate.child());
} else {
bottomPlan = aggregate.child();
}
List<Expression> normalizedGroupExprs =
bottomSlotContext.normalizeToUseSlotRef(groupingByExprs);
List<AggregateFunction> normalizedAggFuncs =
bottomSlotContext.normalizeToUseSlotRef(aggFuncs);
NormalizeToSlotContext normalizedAggFuncsToSlotContext =
NormalizeToSlotContext.buildContext(existsAlias, normalizedAggFuncs);
List<NamedExpression> normalizedAggOutput = ImmutableList.<NamedExpression>builder()
.addAll(bottomProjects.stream().map(NamedExpression::toSlot).iterator())
.addAll(normalizedAggFuncsToSlotContext
.pushDownToNamedExpression(normalizedAggFuncs))
.build();
LogicalAggregate newAggregate =
aggregate.withNormalized(normalizedGroupExprs, normalizedAggOutput, bottomPlan);
List<NamedExpression> upperProjects = normalizeOutput(aggregateOutput,
bottomSlotContext, normalizedAggFuncsToSlotContext);
return new LogicalProject<>(upperProjects, newAggregate);
}).toRule(RuleType.NORMALIZE_AGGREGATE);
}
|
public Rule build() {
return logicalAggregate().whenNot(LogicalAggregate::isNormalized).then(aggregate -> {
Set<Expression> groupingByExprs =
ImmutableSet.copyOf(aggregate.getGroupByExpressions());
List<NamedExpression> aggregateOutput = aggregate.getOutputExpressions();
List<AggregateFunction> aggFuncs = Lists.newArrayList();
aggregateOutput.forEach(o -> o.accept(CollectNonWindowedAggFuncs.INSTANCE, aggFuncs));
Map<Boolean, Set<Expression>> categorizedNoDistinctAggsChildren = aggFuncs.stream()
.filter(aggFunc -> !aggFunc.isDistinct())
.flatMap(agg -> agg.children().stream())
.collect(Collectors.groupingBy(
child -> child.containsType(SubqueryExpr.class, WindowExpression.class),
Collectors.toSet()));
Map<Boolean, Set<Expression>> categorizedDistinctAggsChildren = aggFuncs.stream()
.filter(aggFunc -> aggFunc.isDistinct()).flatMap(agg -> agg.children().stream())
.collect(Collectors.groupingBy(
child -> !(child instanceof SlotReference || child instanceof Literal),
Collectors.toSet()));
Set<Expression> needPushSelf = Sets.union(
categorizedNoDistinctAggsChildren.getOrDefault(true, new HashSet<>()),
categorizedDistinctAggsChildren.getOrDefault(true, new HashSet<>()));
Set<Slot> needPushInputSlots = ExpressionUtils.getInputSlotSet(Sets.union(
categorizedNoDistinctAggsChildren.getOrDefault(false, new HashSet<>()),
categorizedDistinctAggsChildren.getOrDefault(false, new HashSet<>())));
Set<Alias> existsAlias =
ExpressionUtils.mutableCollect(aggregateOutput, Alias.class::isInstance);
Set<Expression> allPushDownExprs =
Sets.union(groupingByExprs, Sets.union(needPushSelf, needPushInputSlots));
NormalizeToSlotContext bottomSlotContext =
NormalizeToSlotContext.buildContext(existsAlias, allPushDownExprs);
Set<NamedExpression> pushedGroupByExprs =
bottomSlotContext.pushDownToNamedExpression(groupingByExprs);
Set<NamedExpression> pushedTrivalAggChildren =
bottomSlotContext.pushDownToNamedExpression(needPushSelf);
Set<NamedExpression> pushedTrivalAggInputSlots =
bottomSlotContext.pushDownToNamedExpression(needPushInputSlots);
Set<NamedExpression> bottomProjects = Sets.union(pushedGroupByExprs,
Sets.union(pushedTrivalAggChildren, pushedTrivalAggInputSlots));
Plan bottomPlan;
if (!bottomProjects.isEmpty()) {
bottomPlan = new LogicalProject<>(ImmutableList.copyOf(bottomProjects),
aggregate.child());
} else {
bottomPlan = aggregate.child();
}
List<Expression> normalizedGroupExprs =
bottomSlotContext.normalizeToUseSlotRef(groupingByExprs);
List<AggregateFunction> normalizedAggFuncs =
bottomSlotContext.normalizeToUseSlotRef(aggFuncs);
NormalizeToSlotContext normalizedAggFuncsToSlotContext =
NormalizeToSlotContext.buildContext(existsAlias, normalizedAggFuncs);
List<NamedExpression> normalizedAggOutput = ImmutableList.<NamedExpression>builder()
.addAll(pushedGroupByExprs.stream().map(NamedExpression::toSlot).iterator())
.addAll(normalizedAggFuncsToSlotContext
.pushDownToNamedExpression(normalizedAggFuncs))
.build();
LogicalAggregate newAggregate =
aggregate.withNormalized(normalizedGroupExprs, normalizedAggOutput, bottomPlan);
List<NamedExpression> upperProjects = normalizeOutput(aggregateOutput,
bottomSlotContext, normalizedAggFuncsToSlotContext);
return new LogicalProject<>(upperProjects, newAggregate);
}).toRule(RuleType.NORMALIZE_AGGREGATE);
}
|
class NormalizeAggregate extends OneRewriteRuleFactory implements NormalizeToSlot {
@Override
private List<NamedExpression> normalizeOutput(List<NamedExpression> aggregateOutput,
NormalizeToSlotContext groupByToSlotContext, NormalizeToSlotContext normalizedAggFuncsToSlotContext) {
List<NamedExpression> upperProjects = groupByToSlotContext
.normalizeToUseSlotRefWithoutWindowFunction(aggregateOutput);
upperProjects = normalizedAggFuncsToSlotContext.normalizeToUseSlotRefWithoutWindowFunction(upperProjects);
Builder<NamedExpression> builder = new ImmutableList.Builder<>();
for (int i = 0; i < aggregateOutput.size(); i++) {
NamedExpression e = upperProjects.get(i);
if (e instanceof Alias && e.child(0) instanceof SlotReference) {
SlotReference slotReference = (SlotReference) e.child(0);
if (slotReference.getExprId().equals(e.getExprId())) {
e = slotReference;
}
}
if (!e.getExprId().equals(aggregateOutput.get(i).getExprId())) {
e = new Alias(aggregateOutput.get(i).getExprId(), e, aggregateOutput.get(i).getName());
}
builder.add(e);
}
return builder.build();
}
private static class CollectNonWindowedAggFuncs extends DefaultExpressionVisitor<Void, List<AggregateFunction>> {
private static final CollectNonWindowedAggFuncs INSTANCE = new CollectNonWindowedAggFuncs();
@Override
public Void visitWindow(WindowExpression windowExpression, List<AggregateFunction> context) {
for (Expression child : windowExpression.getExpressionsInWindowSpec()) {
child.accept(this, context);
}
return null;
}
@Override
public Void visitAggregateFunction(AggregateFunction aggregateFunction, List<AggregateFunction> context) {
context.add(aggregateFunction);
return null;
}
}
}
|
class NormalizeAggregate extends OneRewriteRuleFactory implements NormalizeToSlot {
@Override
private List<NamedExpression> normalizeOutput(List<NamedExpression> aggregateOutput,
NormalizeToSlotContext groupByToSlotContext, NormalizeToSlotContext normalizedAggFuncsToSlotContext) {
List<NamedExpression> upperProjects = groupByToSlotContext
.normalizeToUseSlotRefWithoutWindowFunction(aggregateOutput);
upperProjects = normalizedAggFuncsToSlotContext.normalizeToUseSlotRefWithoutWindowFunction(upperProjects);
Builder<NamedExpression> builder = new ImmutableList.Builder<>();
for (int i = 0; i < aggregateOutput.size(); i++) {
NamedExpression e = upperProjects.get(i);
if (e instanceof Alias && e.child(0) instanceof SlotReference) {
SlotReference slotReference = (SlotReference) e.child(0);
if (slotReference.getExprId().equals(e.getExprId())) {
e = slotReference;
}
}
if (!e.getExprId().equals(aggregateOutput.get(i).getExprId())) {
e = new Alias(aggregateOutput.get(i).getExprId(), e, aggregateOutput.get(i).getName());
}
builder.add(e);
}
return builder.build();
}
private static class CollectNonWindowedAggFuncs extends DefaultExpressionVisitor<Void, List<AggregateFunction>> {
private static final CollectNonWindowedAggFuncs INSTANCE = new CollectNonWindowedAggFuncs();
@Override
public Void visitWindow(WindowExpression windowExpression, List<AggregateFunction> context) {
for (Expression child : windowExpression.getExpressionsInWindowSpec()) {
child.accept(this, context);
}
return null;
}
@Override
public Void visitAggregateFunction(AggregateFunction aggregateFunction, List<AggregateFunction> context) {
context.add(aggregateFunction);
return null;
}
}
}
|
|
Same as above. Async client should be built with autocomplete enabled for the processor. We may have to find a different way to disable it on the `receive()` method on the sync client.
|
public ServiceBusReceiverClient buildClient() {
return new ServiceBusReceiverClient(buildAsyncClient(false), retryOptions.getTryTimeout());
}
|
return new ServiceBusReceiverClient(buildAsyncClient(false), retryOptions.getTryTimeout());
|
public ServiceBusReceiverClient buildClient() {
return new ServiceBusReceiverClient(buildAsyncClient(false), retryOptions.getTryTimeout());
}
|
class ServiceBusSessionReceiverClientBuilder {
private boolean enableAutoComplete = true;
private Integer maxConcurrentSessions = null;
private int prefetchCount = DEFAULT_PREFETCH_COUNT;
private String queueName;
private ReceiveMode receiveMode = ReceiveMode.PEEK_LOCK;
private String sessionId;
private String subscriptionName;
private String topicName;
private ServiceBusSessionReceiverClientBuilder() {
}
/**
* Disables auto-complete and auto-abandon of received messages. By default, a successfully processed message is
* {@link ServiceBusReceiverAsyncClient
* the message is processed, it is {@link ServiceBusReceiverAsyncClient
* abandoned}.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
*/
public ServiceBusSessionReceiverClientBuilder disableAutoComplete() {
this.enableAutoComplete = false;
return this;
}
/**
* Enables session processing roll-over by processing at most {@code maxConcurrentSessions}.
*
* @param maxConcurrentSessions Maximum number of concurrent sessions to process at any given time.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
* @throws IllegalArgumentException if {@code maxConcurrentSessions} is less than 1.
*/
public ServiceBusSessionReceiverClientBuilder maxConcurrentSessions(int maxConcurrentSessions) {
if (maxConcurrentSessions < 1) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"maxConcurrentSessions cannot be less than 1."));
}
this.maxConcurrentSessions = maxConcurrentSessions;
return this;
}
/**
* Sets the prefetch count of the receiver. For both {@link ReceiveMode
* ReceiveMode
*
* Prefetch speeds up the message flow by aiming to have a message readily available for local retrieval when
* and before the application asks for one using {@link ServiceBusReceiverAsyncClient
* Setting a non-zero value will prefetch that number of messages. Setting the value to zero turns prefetch
* off.
*
* @param prefetchCount The prefetch count.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
*/
public ServiceBusSessionReceiverClientBuilder prefetchCount(int prefetchCount) {
this.prefetchCount = prefetchCount;
return this;
}
/**
* Sets the name of the queue to create a receiver for.
*
* @param queueName Name of the queue.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
*/
public ServiceBusSessionReceiverClientBuilder queueName(String queueName) {
this.queueName = queueName;
return this;
}
/**
* Sets the receive mode for the receiver.
*
* @param receiveMode Mode for receiving messages.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
*/
public ServiceBusSessionReceiverClientBuilder receiveMode(ReceiveMode receiveMode) {
this.receiveMode = receiveMode;
return this;
}
/**
* Sets the session id.
*
* @param sessionId session id.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
*/
public ServiceBusSessionReceiverClientBuilder sessionId(String sessionId) {
this.sessionId = sessionId;
return this;
}
/**
* Sets the name of the subscription in the topic to listen to. <b>{@link
* </b>
*
* @param subscriptionName Name of the subscription.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
* @see
*/
public ServiceBusSessionReceiverClientBuilder subscriptionName(String subscriptionName) {
this.subscriptionName = subscriptionName;
return this;
}
/**
* Sets the name of the topic. <b>{@link
*
* @param topicName Name of the topic.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
* @see
*/
public ServiceBusSessionReceiverClientBuilder topicName(String topicName) {
this.topicName = topicName;
return this;
}
/**
* Creates an <b>asynchronous</b>, <b>session-aware</b> Service Bus receiver responsible for reading {@link
* ServiceBusMessage messages} from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverAsyncClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
public ServiceBusReceiverAsyncClient buildAsyncClient() {
return buildAsyncClient(true);
}
/**
* Creates a <b>synchronous</b>, <b>session-aware</b> Service Bus receiver responsible for reading {@link
* ServiceBusMessage messages} from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
private ServiceBusReceiverAsyncClient buildAsyncClient(boolean isAutoCompleteAllowed) {
final MessagingEntityType entityType = validateEntityPaths(logger, connectionStringEntityName, topicName,
queueName);
final String entityPath = getEntityPath(logger, entityType, queueName, topicName, subscriptionName,
SubQueue.NONE);
validateAndThrow(prefetchCount);
if (!isAutoCompleteAllowed && enableAutoComplete) {
logger.warning(
"'enableAutoComplete' is not supported in synchronous client except through callback receive.");
enableAutoComplete = false;
} else if (isAutoCompleteAllowed && receiveMode == ReceiveMode.RECEIVE_AND_DELETE) {
throw logger.logExceptionAsError(new IllegalStateException(
"'enableAutoComplete' is not valid for RECEIVE_AND_DELETE mode."));
}
final ServiceBusConnectionProcessor connectionProcessor = getOrCreateConnectionProcessor(messageSerializer);
final ReceiverOptions receiverOptions = new ReceiverOptions(receiveMode, prefetchCount, enableAutoComplete,
sessionId, isRollingSessionReceiver(), maxConcurrentSessions, true);
if (CoreUtils.isNullOrEmpty(sessionId)) {
final UnnamedSessionManager sessionManager = new UnnamedSessionManager(entityPath, entityType,
connectionProcessor, connectionProcessor.getRetryOptions().getTryTimeout(), tracerProvider,
messageSerializer, receiverOptions);
return new ServiceBusReceiverAsyncClient(connectionProcessor.getFullyQualifiedNamespace(), entityPath,
entityType, receiverOptions, connectionProcessor, ServiceBusConstants.OPERATION_TIMEOUT,
tracerProvider, messageSerializer, ServiceBusClientBuilder.this::onClientClose, sessionManager);
} else {
return new ServiceBusReceiverAsyncClient(connectionProcessor.getFullyQualifiedNamespace(), entityPath,
entityType, receiverOptions, connectionProcessor, ServiceBusConstants.OPERATION_TIMEOUT,
tracerProvider, messageSerializer, ServiceBusClientBuilder.this::onClientClose);
}
}
/**
* This is a rolling session receiver only if maxConcurrentSessions is > 0 AND sessionId is null or empty. If
* there is a sessionId, this is going to be a single, named session receiver.
*
* @return {@code true} if this is an unnamed rolling session receiver; {@code false} otherwise.
*/
private boolean isRollingSessionReceiver() {
if (maxConcurrentSessions == null) {
return false;
}
if (maxConcurrentSessions < 1) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Maximum number of concurrent sessions must be positive."));
}
return CoreUtils.isNullOrEmpty(sessionId);
}
}
|
class ServiceBusSessionReceiverClientBuilder {
private boolean enableAutoComplete = true;
private Integer maxConcurrentSessions = null;
private int prefetchCount = DEFAULT_PREFETCH_COUNT;
private String queueName;
private ReceiveMode receiveMode = ReceiveMode.PEEK_LOCK;
private String sessionId;
private String subscriptionName;
private String topicName;
private Duration maxAutoLockRenewDuration = MAX_LOCK_RENEW_DEFAULT_DURATION;
private ServiceBusSessionReceiverClientBuilder() {
}
/**
* Disables auto-complete and auto-abandon of received messages. By default, a successfully processed message is
* {@link ServiceBusReceiverAsyncClient
* the message is processed, it is {@link ServiceBusReceiverAsyncClient
* abandoned}.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
*/
public ServiceBusSessionReceiverClientBuilder disableAutoComplete() {
this.enableAutoComplete = false;
return this;
}
/**
* Sets the amount of time to continue auto-renewing the session lock. Setting {@link Duration
* {@code null} disables auto-renewal. For {@link ReceiveMode
* auto-renewal is disabled.
*
* @param maxAutoLockRenewDuration the amount of time to continue auto-renewing the session lock.
* {@link Duration
*
* @return The updated {@link ServiceBusSessionReceiverClientBuilder} object.
* @throws IllegalArgumentException If {code maxAutoLockRenewDuration} is negative.
*/
public ServiceBusSessionReceiverClientBuilder maxAutoLockRenewDuration(Duration maxAutoLockRenewDuration) {
validateAndThrow(maxAutoLockRenewDuration);
this.maxAutoLockRenewDuration = maxAutoLockRenewDuration;
return this;
}
/**
* Enables session processing roll-over by processing at most {@code maxConcurrentSessions}.
*
* @param maxConcurrentSessions Maximum number of concurrent sessions to process at any given time.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
* @throws IllegalArgumentException if {@code maxConcurrentSessions} is less than 1.
*/
public ServiceBusSessionReceiverClientBuilder maxConcurrentSessions(int maxConcurrentSessions) {
if (maxConcurrentSessions < 1) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"maxConcurrentSessions cannot be less than 1."));
}
this.maxConcurrentSessions = maxConcurrentSessions;
return this;
}
/**
* Sets the prefetch count of the receiver. For both {@link ReceiveMode
* ReceiveMode
*
* Prefetch speeds up the message flow by aiming to have a message readily available for local retrieval when
* and before the application asks for one using {@link ServiceBusReceiverAsyncClient
* Setting a non-zero value will prefetch that number of messages. Setting the value to zero turns prefetch
* off.
*
* @param prefetchCount The prefetch count.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
* @throws IllegalArgumentException If {code prefetchCount} is negative.
*/
public ServiceBusSessionReceiverClientBuilder prefetchCount(int prefetchCount) {
validateAndThrow(prefetchCount);
this.prefetchCount = prefetchCount;
return this;
}
/**
* Sets the name of the queue to create a receiver for.
*
* @param queueName Name of the queue.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
*/
public ServiceBusSessionReceiverClientBuilder queueName(String queueName) {
this.queueName = queueName;
return this;
}
/**
* Sets the receive mode for the receiver.
*
* @param receiveMode Mode for receiving messages.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
*/
public ServiceBusSessionReceiverClientBuilder receiveMode(ReceiveMode receiveMode) {
this.receiveMode = receiveMode;
return this;
}
/**
* Sets the session id.
*
* @param sessionId session id.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
*/
public ServiceBusSessionReceiverClientBuilder sessionId(String sessionId) {
this.sessionId = sessionId;
return this;
}
/**
* Sets the name of the subscription in the topic to listen to. <b>{@link
* </b>
*
* @param subscriptionName Name of the subscription.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
* @see
*/
public ServiceBusSessionReceiverClientBuilder subscriptionName(String subscriptionName) {
this.subscriptionName = subscriptionName;
return this;
}
/**
* Sets the name of the topic. <b>{@link
*
* @param topicName Name of the topic.
*
* @return The modified {@link ServiceBusSessionReceiverClientBuilder} object.
* @see
*/
public ServiceBusSessionReceiverClientBuilder topicName(String topicName) {
this.topicName = topicName;
return this;
}
/**
* Creates an <b>asynchronous</b>, <b>session-aware</b> Service Bus receiver responsible for reading {@link
* ServiceBusMessage messages} from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverAsyncClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
public ServiceBusReceiverAsyncClient buildAsyncClient() {
return buildAsyncClient(true);
}
/**
* Creates a <b>synchronous</b>, <b>session-aware</b> Service Bus receiver responsible for reading {@link
* ServiceBusMessage messages} from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
private ServiceBusReceiverAsyncClient buildAsyncClient(boolean isAutoCompleteAllowed) {
final MessagingEntityType entityType = validateEntityPaths(logger, connectionStringEntityName, topicName,
queueName);
final String entityPath = getEntityPath(logger, entityType, queueName, topicName, subscriptionName,
SubQueue.NONE);
if (!isAutoCompleteAllowed && enableAutoComplete) {
logger.warning(
"'enableAutoComplete' is not supported in synchronous client except through callback receive.");
enableAutoComplete = false;
} else if (enableAutoComplete && receiveMode == ReceiveMode.RECEIVE_AND_DELETE) {
throw logger.logExceptionAsError(new IllegalStateException(
"'enableAutoComplete' is not valid for RECEIVE_AND_DELETE mode."));
}
if (receiveMode == ReceiveMode.RECEIVE_AND_DELETE) {
maxAutoLockRenewDuration = Duration.ZERO;
}
final ServiceBusConnectionProcessor connectionProcessor = getOrCreateConnectionProcessor(messageSerializer);
final ReceiverOptions receiverOptions = new ReceiverOptions(receiveMode, prefetchCount,
maxAutoLockRenewDuration, enableAutoComplete, sessionId, isRollingSessionReceiver(),
maxConcurrentSessions);
final ServiceBusSessionManager sessionManager = new ServiceBusSessionManager(entityPath, entityType,
connectionProcessor, tracerProvider, messageSerializer, receiverOptions);
return new ServiceBusReceiverAsyncClient(connectionProcessor.getFullyQualifiedNamespace(), entityPath,
entityType, receiverOptions, connectionProcessor, ServiceBusConstants.OPERATION_TIMEOUT,
tracerProvider, messageSerializer, ServiceBusClientBuilder.this::onClientClose, sessionManager);
}
/**
* This is a rolling session receiver only if maxConcurrentSessions is > 0 AND sessionId is null or empty. If
* there is a sessionId, this is going to be a single, named session receiver.
*
* @return {@code true} if this is an unnamed rolling session receiver; {@code false} otherwise.
*/
private boolean isRollingSessionReceiver() {
if (maxConcurrentSessions == null) {
return false;
}
if (maxConcurrentSessions < 1) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Maximum number of concurrent sessions must be positive."));
}
return CoreUtils.isNullOrEmpty(sessionId);
}
}
|
**1. For a partitioned table:** `create table testsparkstats (id int, name string) partitioned by(dt string) tored as orc;` **2. insert two partition dt=2024 and dt=2025** `insert into testsparkstats values(1243,'ttetw','2024'),(8945,'erw','2024'),(456,'df','2025'),(568,'tom','2025');` **3. analyze column statistics against the partitioned table** **For `column `level statistics, spark will store all the column statistics in the table properties whether the table is partitioned or not, that is to say, the two statements have same effect (spark will treat partitioned table as unpartitioned table when doing `column `statistics)** 1) analyze the partition column statistics dt=2024, and this will get all table's statistics and stored in the table properties `analyze table testsparkstats partition (dt='2024') compute statistics for all columns;` 2) analyze partitoned table's level statistics `analyze table testsparkstats compute statistics for all columns;` **4. Check the table properties from hms metadata** mysql> select * from TABLE_PARAMS where TBL_ID=1082; +--------+--------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | TBL_ID | PARAM_KEY | PARAM_VALUE | +--------+--------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | 1082 | spark.sql.create.version | 3.3.1 | | 1082 | spark.sql.sources.schema | {"type":"struct","fields":[{"name":"id","type":"integer","nullable":true,"metadata":{}},{"name":"name","type":"string","nullable":true,"metadata":{}},{"name":"dt","type":"string","nullable":true,"metadata":{}}]} | | 1082 | spark.sql.sources.schema.numPartCols | 1 | | 1082 | spark.sql.sources.schema.partCol.0 | dt | | 1082 | spark.sql.statistics.colStats.dt.avgLen | 4 | | 1082 | spark.sql.statistics.colStats.dt.distinctCount | 2 | | 1082 | spark.sql.statistics.colStats.dt.maxLen | 4 | | 1082 | spark.sql.statistics.colStats.dt.nullCount | 0 | | 1082 | spark.sql.statistics.colStats.dt.version | 2 | | 1082 | spark.sql.statistics.colStats.id.avgLen | 4 | | 1082 | spark.sql.statistics.colStats.id.distinctCount | 4 | | 1082 | spark.sql.statistics.colStats.id.max | 8945 | | 1082 | spark.sql.statistics.colStats.id.maxLen | 4 | | 1082 | spark.sql.statistics.colStats.id.min | 456 | | 1082 | spark.sql.statistics.colStats.id.nullCount | 0 | | 1082 | spark.sql.statistics.colStats.id.version | 2 | | 1082 | spark.sql.statistics.colStats.name.avgLen | 4 | | 1082 | spark.sql.statistics.colStats.name.distinctCount | 4 | | 1082 | spark.sql.statistics.colStats.name.maxLen | 5 | | 1082 | spark.sql.statistics.colStats.name.nullCount | 0 | | 1082 | spark.sql.statistics.colStats.name.version | 2 | | 1082 | spark.sql.statistics.numRows | 4 | | 1082 | spark.sql.statistics.totalSize | 762 | | 1082 | transient_lastDdlTime | 1706411426 | +--------+--------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 24 rows in set (0.00 sec) `
|
public HivePartitionStats getTableStatistics(String dbName, String tblName) {
org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName);
HiveCommonStats commonStats = toHiveCommonStats(table.getParameters());
long totalRowNums = commonStats.getRowNums();
if (totalRowNums == -1) {
return HivePartitionStats.empty();
}
List<String> dataColumns = table.getSd().getCols().stream()
.map(FieldSchema::getName)
.collect(toImmutableList());
List<ColumnStatisticsObj> statisticsObjs = client.getTableColumnStats(dbName, tblName, dataColumns);
if (statisticsObjs.isEmpty()) {
statisticsObjs = HiveMetastoreApiConverter.getColStatsFromSparkParams(table);
}
Map<String, HiveColumnStats> columnStatistics =
HiveMetastoreApiConverter.toSinglePartitionColumnStats(statisticsObjs, totalRowNums);
return new HivePartitionStats(commonStats, columnStatistics);
}
|
if (statisticsObjs.isEmpty()) {
|
public HivePartitionStats getTableStatistics(String dbName, String tblName) {
org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName);
HiveCommonStats commonStats = toHiveCommonStats(table.getParameters());
long totalRowNums = commonStats.getRowNums();
if (totalRowNums == -1) {
return HivePartitionStats.empty();
}
List<String> dataColumns = table.getSd().getCols().stream()
.map(FieldSchema::getName)
.collect(toImmutableList());
List<ColumnStatisticsObj> statisticsObjs = client.getTableColumnStats(dbName, tblName, dataColumns);
if (statisticsObjs.isEmpty() && Config.enable_reuse_spark_column_statistics) {
try {
if (table.getParameters().keySet().stream().anyMatch(k -> k.startsWith("spark.sql.statistics.colStats."))) {
statisticsObjs = HiveMetastoreApiConverter.getColStatsFromSparkParams(table);
}
} catch (Exception e) {
LOG.warn("Failed to get column stats from table [{}.{}]", dbName, tblName);
}
}
Map<String, HiveColumnStats> columnStatistics =
HiveMetastoreApiConverter.toSinglePartitionColumnStats(statisticsObjs, totalRowNums);
return new HivePartitionStats(commonStats, columnStatistics);
}
|
class HiveMetastore implements IHiveMetastore {
private static final Logger LOG = LogManager.getLogger(CachingHiveMetastore.class);
private final HiveMetaClient client;
private final String catalogName;
private final MetastoreType metastoreType;
public HiveMetastore(HiveMetaClient client, String catalogName, MetastoreType metastoreType) {
this.client = client;
this.catalogName = catalogName;
this.metastoreType = metastoreType;
}
@Override
public List<String> getAllDatabaseNames() {
return client.getAllDatabaseNames();
}
@Override
public void createDb(String dbName, Map<String, String> properties) {
String location = properties.getOrDefault(LOCATION_PROPERTY, "");
long dbId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt();
Database database = new Database(dbId, dbName, location);
client.createDatabase(HiveMetastoreApiConverter.toMetastoreApiDatabase(database));
}
@Override
public void dropDb(String dbName, boolean deleteData) {
client.dropDatabase(dbName, deleteData);
}
@Override
public List<String> getAllTableNames(String dbName) {
return client.getAllTableNames(dbName);
}
@Override
public Database getDb(String dbName) {
org.apache.hadoop.hive.metastore.api.Database db = client.getDb(dbName);
return HiveMetastoreApiConverter.toDatabase(db);
}
@Override
public void createTable(String dbName, Table table) {
org.apache.hadoop.hive.metastore.api.Table hiveTable = toMetastoreApiTable((HiveTable) table);
client.createTable(hiveTable);
}
@Override
public void dropTable(String dbName, String tableName) {
client.dropTable(dbName, tableName);
}
public Table getTable(String dbName, String tableName) {
org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tableName);
StorageDescriptor sd = table.getSd();
if (sd == null) {
throw new StarRocksConnectorException("Table is missing storage descriptor");
}
if (!HiveMetastoreApiConverter.isHudiTable(table.getSd().getInputFormat())) {
validateHiveTableType(table.getTableType());
if (AcidUtils.isFullAcidTable(table)) {
throw new StarRocksConnectorException(
String.format("%s.%s is a hive transactional table(full acid), sr didn't support it yet", dbName,
tableName));
}
if (table.getTableType().equalsIgnoreCase("VIRTUAL_VIEW")) {
return HiveMetastoreApiConverter.toHiveView(table, catalogName);
} else {
return HiveMetastoreApiConverter.toHiveTable(table, catalogName);
}
} else {
return HiveMetastoreApiConverter.toHudiTable(table, catalogName);
}
}
@Override
public boolean tableExists(String dbName, String tableName) {
return client.tableExists(dbName, tableName);
}
@Override
public List<String> getPartitionKeysByValue(String dbName, String tableName, List<Optional<String>> partitionValues) {
if (partitionValues.isEmpty()) {
return client.getPartitionKeys(dbName, tableName);
} else {
List<String> partitionValuesStr = partitionValues.stream()
.map(v -> v.orElse("")).collect(Collectors.toList());
return client.getPartitionKeysByValue(dbName, tableName, partitionValuesStr);
}
}
@Override
public boolean partitionExists(Table table, List<String> partitionValues) {
HiveTable hiveTable = (HiveTable) table;
String dbName = hiveTable.getDbName();
String tableName = hiveTable.getTableName();
if (metastoreType == MetastoreType.GLUE && hiveTable.hasBooleanTypePartitionColumn()) {
List<String> allPartitionNames = client.getPartitionKeys(dbName, tableName);
String hivePartitionName = toHivePartitionName(hiveTable.getPartitionColumnNames(), partitionValues);
return allPartitionNames.contains(hivePartitionName);
} else {
return !client.getPartitionKeysByValue(dbName, tableName, partitionValues).isEmpty();
}
}
@Override
public Partition getPartition(String dbName, String tblName, List<String> partitionValues) {
StorageDescriptor sd;
Map<String, String> params;
if (partitionValues.size() > 0) {
org.apache.hadoop.hive.metastore.api.Partition partition =
client.getPartition(dbName, tblName, partitionValues);
sd = partition.getSd();
params = partition.getParameters();
} else {
org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName);
sd = table.getSd();
params = table.getParameters();
}
return HiveMetastoreApiConverter.toPartition(sd, params);
}
public Map<String, Partition> getPartitionsByNames(String dbName, String tblName, List<String> partitionNames) {
List<org.apache.hadoop.hive.metastore.api.Partition> partitions = new ArrayList<>();
for (int start = 0; start < partitionNames.size(); start += Config.max_hive_partitions_per_rpc) {
int end = Math.min(start + Config.max_hive_partitions_per_rpc, partitionNames.size());
List<String> namesPerRPC = partitionNames.subList(start, end);
List<org.apache.hadoop.hive.metastore.api.Partition> partsPerRPC =
client.getPartitionsByNames(dbName, tblName, namesPerRPC);
partitions.addAll(partsPerRPC);
}
Map<String, List<String>> partitionNameToPartitionValues = partitionNames.stream()
.collect(Collectors.toMap(Function.identity(), PartitionUtil::toPartitionValues));
Map<List<String>, Partition> partitionValuesToPartition = partitions.stream()
.collect(Collectors.toMap(
org.apache.hadoop.hive.metastore.api.Partition::getValues,
partition -> HiveMetastoreApiConverter.toPartition(partition.getSd(), partition.getParameters())));
ImmutableMap.Builder<String, Partition> resultBuilder = ImmutableMap.builder();
for (Map.Entry<String, List<String>> entry : partitionNameToPartitionValues.entrySet()) {
Partition partition = partitionValuesToPartition.get(entry.getValue());
resultBuilder.put(entry.getKey(), partition);
}
return resultBuilder.build();
}
@Override
public void addPartitions(String dbName, String tableName, List<HivePartitionWithStats> partitions) {
List<org.apache.hadoop.hive.metastore.api.Partition> hivePartitions = partitions.stream()
.map(HiveMetastoreApiConverter::toMetastoreApiPartition)
.collect(Collectors.toList());
client.addPartitions(dbName, tableName, hivePartitions);
}
@Override
public void dropPartition(String dbName, String tableName, List<String> partValues, boolean deleteData) {
client.dropPartition(dbName, tableName, partValues, deleteData);
}
public void updateTableStatistics(String dbName, String tableName, Function<HivePartitionStats, HivePartitionStats> update) {
org.apache.hadoop.hive.metastore.api.Table originTable = client.getTable(dbName, tableName);
if (originTable == null) {
throw new StarRocksConnectorException("Table '%s.%s' not found", dbName, tableName);
}
org.apache.hadoop.hive.metastore.api.Table newTable = originTable.deepCopy();
HiveCommonStats curCommonStats = toHiveCommonStats(originTable.getParameters());
HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>());
HivePartitionStats updatedStats = update.apply(curPartitionStats);
HiveCommonStats commonStats = updatedStats.getCommonStats();
Map<String, String> originParams = newTable.getParameters();
originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000));
newTable.setParameters(updateStatisticsParameters(originParams, commonStats));
client.alterTable(dbName, tableName, newTable);
}
public void updatePartitionStatistics(String dbName, String tableName, String partitionName,
Function<HivePartitionStats, HivePartitionStats> update) {
List<org.apache.hadoop.hive.metastore.api.Partition> partitions = client.getPartitionsByNames(
dbName, tableName, ImmutableList.of(partitionName));
if (partitions.size() != 1) {
throw new StarRocksConnectorException("Metastore returned multiple partitions for name: " + partitionName);
}
org.apache.hadoop.hive.metastore.api.Partition originPartition = getOnlyElement(partitions);
HiveCommonStats curCommonStats = toHiveCommonStats(originPartition.getParameters());
HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>());
HivePartitionStats updatedStats = update.apply(curPartitionStats);
org.apache.hadoop.hive.metastore.api.Partition modifiedPartition = originPartition.deepCopy();
HiveCommonStats commonStats = updatedStats.getCommonStats();
Map<String, String> originParams = modifiedPartition.getParameters();
originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000));
modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), commonStats));
client.alterPartition(dbName, tableName, modifiedPartition);
}
public Map<String, HivePartitionStats> getPartitionStatistics(Table table, List<String> partitionNames) {
HiveMetaStoreTable hmsTbl = (HiveMetaStoreTable) table;
String dbName = hmsTbl.getDbName();
String tblName = hmsTbl.getTableName();
List<String> dataColumns = hmsTbl.getDataColumnNames();
Map<String, Partition> partitions = getPartitionsByNames(hmsTbl.getDbName(), hmsTbl.getTableName(), partitionNames);
Map<String, HiveCommonStats> partitionCommonStats = partitions.entrySet().stream()
.collect(toImmutableMap(Map.Entry::getKey, entry -> toHiveCommonStats(entry.getValue().getParameters())));
Map<String, Long> partitionRowNums = partitionCommonStats.entrySet().stream()
.collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getRowNums()));
ImmutableMap.Builder<String, HivePartitionStats> resultBuilder = ImmutableMap.builder();
Map<String, List<ColumnStatisticsObj>> partitionNameToColumnStatsObj =
client.getPartitionColumnStats(dbName, tblName, partitionNames, dataColumns);
Map<String, Map<String, HiveColumnStats>> partitionColumnStats = HiveMetastoreApiConverter
.toPartitionColumnStatistics(partitionNameToColumnStatsObj, partitionRowNums);
for (String partitionName : partitionCommonStats.keySet()) {
HiveCommonStats commonStats = partitionCommonStats.get(partitionName);
Map<String, HiveColumnStats> columnStatistics = partitionColumnStats
.getOrDefault(partitionName, ImmutableMap.of());
resultBuilder.put(partitionName, new HivePartitionStats(commonStats, columnStatistics));
}
return resultBuilder.build();
}
public long getCurrentEventId() {
return client.getCurrentNotificationEventId().getEventId();
}
public NotificationEventResponse getNextEventResponse(long lastSyncedEventId, String catalogName,
final boolean getAllEvents)
throws MetastoreNotificationFetchException {
try {
int batchSize = getAllEvents ? -1 : Config.hms_events_batch_size_per_rpc;
NotificationEventResponse response = client.getNextNotification(lastSyncedEventId, batchSize, null);
if (response.getEvents().size() == 0) {
LOG.info("Event size is 0 when pulling events on catalog [{}]", catalogName);
return null;
}
LOG.info(String.format("Received %d events. Start event id : %d. Last synced id : %d on catalog : %s",
response.getEvents().size(), response.getEvents().get(0).getEventId(),
lastSyncedEventId, catalogName));
return response;
} catch (MetastoreNotificationFetchException e) {
LOG.error("Unable to fetch notifications from metastore. Last synced event id is {}", lastSyncedEventId, e);
throw new MetastoreNotificationFetchException("Unable to fetch notifications from metastore. " +
"Last synced event id is " + lastSyncedEventId, e);
}
}
}
|
class HiveMetastore implements IHiveMetastore {
private static final Logger LOG = LogManager.getLogger(CachingHiveMetastore.class);
private final HiveMetaClient client;
private final String catalogName;
private final MetastoreType metastoreType;
public HiveMetastore(HiveMetaClient client, String catalogName, MetastoreType metastoreType) {
this.client = client;
this.catalogName = catalogName;
this.metastoreType = metastoreType;
}
@Override
public List<String> getAllDatabaseNames() {
return client.getAllDatabaseNames();
}
@Override
public void createDb(String dbName, Map<String, String> properties) {
String location = properties.getOrDefault(LOCATION_PROPERTY, "");
long dbId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt();
Database database = new Database(dbId, dbName, location);
client.createDatabase(HiveMetastoreApiConverter.toMetastoreApiDatabase(database));
}
@Override
public void dropDb(String dbName, boolean deleteData) {
client.dropDatabase(dbName, deleteData);
}
@Override
public List<String> getAllTableNames(String dbName) {
return client.getAllTableNames(dbName);
}
@Override
public Database getDb(String dbName) {
org.apache.hadoop.hive.metastore.api.Database db = client.getDb(dbName);
return HiveMetastoreApiConverter.toDatabase(db);
}
@Override
public void createTable(String dbName, Table table) {
org.apache.hadoop.hive.metastore.api.Table hiveTable = toMetastoreApiTable((HiveTable) table);
client.createTable(hiveTable);
}
@Override
public void dropTable(String dbName, String tableName) {
client.dropTable(dbName, tableName);
}
public Table getTable(String dbName, String tableName) {
org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tableName);
StorageDescriptor sd = table.getSd();
if (sd == null) {
throw new StarRocksConnectorException("Table is missing storage descriptor");
}
if (!HiveMetastoreApiConverter.isHudiTable(table.getSd().getInputFormat())) {
validateHiveTableType(table.getTableType());
if (AcidUtils.isFullAcidTable(table)) {
throw new StarRocksConnectorException(
String.format("%s.%s is a hive transactional table(full acid), sr didn't support it yet", dbName,
tableName));
}
if (table.getTableType().equalsIgnoreCase("VIRTUAL_VIEW")) {
return HiveMetastoreApiConverter.toHiveView(table, catalogName);
} else {
return HiveMetastoreApiConverter.toHiveTable(table, catalogName);
}
} else {
return HiveMetastoreApiConverter.toHudiTable(table, catalogName);
}
}
@Override
public boolean tableExists(String dbName, String tableName) {
return client.tableExists(dbName, tableName);
}
@Override
public List<String> getPartitionKeysByValue(String dbName, String tableName, List<Optional<String>> partitionValues) {
if (partitionValues.isEmpty()) {
return client.getPartitionKeys(dbName, tableName);
} else {
List<String> partitionValuesStr = partitionValues.stream()
.map(v -> v.orElse("")).collect(Collectors.toList());
return client.getPartitionKeysByValue(dbName, tableName, partitionValuesStr);
}
}
@Override
public boolean partitionExists(Table table, List<String> partitionValues) {
HiveTable hiveTable = (HiveTable) table;
String dbName = hiveTable.getDbName();
String tableName = hiveTable.getTableName();
if (metastoreType == MetastoreType.GLUE && hiveTable.hasBooleanTypePartitionColumn()) {
List<String> allPartitionNames = client.getPartitionKeys(dbName, tableName);
String hivePartitionName = toHivePartitionName(hiveTable.getPartitionColumnNames(), partitionValues);
return allPartitionNames.contains(hivePartitionName);
} else {
return !client.getPartitionKeysByValue(dbName, tableName, partitionValues).isEmpty();
}
}
@Override
public Partition getPartition(String dbName, String tblName, List<String> partitionValues) {
StorageDescriptor sd;
Map<String, String> params;
if (partitionValues.size() > 0) {
org.apache.hadoop.hive.metastore.api.Partition partition =
client.getPartition(dbName, tblName, partitionValues);
sd = partition.getSd();
params = partition.getParameters();
} else {
org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName);
sd = table.getSd();
params = table.getParameters();
}
return HiveMetastoreApiConverter.toPartition(sd, params);
}
public Map<String, Partition> getPartitionsByNames(String dbName, String tblName, List<String> partitionNames) {
List<org.apache.hadoop.hive.metastore.api.Partition> partitions = new ArrayList<>();
for (int start = 0; start < partitionNames.size(); start += Config.max_hive_partitions_per_rpc) {
int end = Math.min(start + Config.max_hive_partitions_per_rpc, partitionNames.size());
List<String> namesPerRPC = partitionNames.subList(start, end);
List<org.apache.hadoop.hive.metastore.api.Partition> partsPerRPC =
client.getPartitionsByNames(dbName, tblName, namesPerRPC);
partitions.addAll(partsPerRPC);
}
Map<String, List<String>> partitionNameToPartitionValues = partitionNames.stream()
.collect(Collectors.toMap(Function.identity(), PartitionUtil::toPartitionValues));
Map<List<String>, Partition> partitionValuesToPartition = partitions.stream()
.collect(Collectors.toMap(
org.apache.hadoop.hive.metastore.api.Partition::getValues,
partition -> HiveMetastoreApiConverter.toPartition(partition.getSd(), partition.getParameters())));
ImmutableMap.Builder<String, Partition> resultBuilder = ImmutableMap.builder();
for (Map.Entry<String, List<String>> entry : partitionNameToPartitionValues.entrySet()) {
Partition partition = partitionValuesToPartition.get(entry.getValue());
resultBuilder.put(entry.getKey(), partition);
}
return resultBuilder.build();
}
@Override
public void addPartitions(String dbName, String tableName, List<HivePartitionWithStats> partitions) {
List<org.apache.hadoop.hive.metastore.api.Partition> hivePartitions = partitions.stream()
.map(HiveMetastoreApiConverter::toMetastoreApiPartition)
.collect(Collectors.toList());
client.addPartitions(dbName, tableName, hivePartitions);
}
@Override
public void dropPartition(String dbName, String tableName, List<String> partValues, boolean deleteData) {
client.dropPartition(dbName, tableName, partValues, deleteData);
}
public void updateTableStatistics(String dbName, String tableName, Function<HivePartitionStats, HivePartitionStats> update) {
org.apache.hadoop.hive.metastore.api.Table originTable = client.getTable(dbName, tableName);
if (originTable == null) {
throw new StarRocksConnectorException("Table '%s.%s' not found", dbName, tableName);
}
org.apache.hadoop.hive.metastore.api.Table newTable = originTable.deepCopy();
HiveCommonStats curCommonStats = toHiveCommonStats(originTable.getParameters());
HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>());
HivePartitionStats updatedStats = update.apply(curPartitionStats);
HiveCommonStats commonStats = updatedStats.getCommonStats();
Map<String, String> originParams = newTable.getParameters();
originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000));
newTable.setParameters(updateStatisticsParameters(originParams, commonStats));
client.alterTable(dbName, tableName, newTable);
}
public void updatePartitionStatistics(String dbName, String tableName, String partitionName,
Function<HivePartitionStats, HivePartitionStats> update) {
List<org.apache.hadoop.hive.metastore.api.Partition> partitions = client.getPartitionsByNames(
dbName, tableName, ImmutableList.of(partitionName));
if (partitions.size() != 1) {
throw new StarRocksConnectorException("Metastore returned multiple partitions for name: " + partitionName);
}
org.apache.hadoop.hive.metastore.api.Partition originPartition = getOnlyElement(partitions);
HiveCommonStats curCommonStats = toHiveCommonStats(originPartition.getParameters());
HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>());
HivePartitionStats updatedStats = update.apply(curPartitionStats);
org.apache.hadoop.hive.metastore.api.Partition modifiedPartition = originPartition.deepCopy();
HiveCommonStats commonStats = updatedStats.getCommonStats();
Map<String, String> originParams = modifiedPartition.getParameters();
originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000));
modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), commonStats));
client.alterPartition(dbName, tableName, modifiedPartition);
}
public Map<String, HivePartitionStats> getPartitionStatistics(Table table, List<String> partitionNames) {
HiveMetaStoreTable hmsTbl = (HiveMetaStoreTable) table;
String dbName = hmsTbl.getDbName();
String tblName = hmsTbl.getTableName();
List<String> dataColumns = hmsTbl.getDataColumnNames();
Map<String, Partition> partitions = getPartitionsByNames(hmsTbl.getDbName(), hmsTbl.getTableName(), partitionNames);
Map<String, HiveCommonStats> partitionCommonStats = partitions.entrySet().stream()
.collect(toImmutableMap(Map.Entry::getKey, entry -> toHiveCommonStats(entry.getValue().getParameters())));
Map<String, Long> partitionRowNums = partitionCommonStats.entrySet().stream()
.collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getRowNums()));
ImmutableMap.Builder<String, HivePartitionStats> resultBuilder = ImmutableMap.builder();
Map<String, List<ColumnStatisticsObj>> partitionNameToColumnStatsObj =
client.getPartitionColumnStats(dbName, tblName, partitionNames, dataColumns);
Map<String, Map<String, HiveColumnStats>> partitionColumnStats = HiveMetastoreApiConverter
.toPartitionColumnStatistics(partitionNameToColumnStatsObj, partitionRowNums);
for (String partitionName : partitionCommonStats.keySet()) {
HiveCommonStats commonStats = partitionCommonStats.get(partitionName);
Map<String, HiveColumnStats> columnStatistics = partitionColumnStats
.getOrDefault(partitionName, ImmutableMap.of());
resultBuilder.put(partitionName, new HivePartitionStats(commonStats, columnStatistics));
}
return resultBuilder.build();
}
public long getCurrentEventId() {
return client.getCurrentNotificationEventId().getEventId();
}
public NotificationEventResponse getNextEventResponse(long lastSyncedEventId, String catalogName,
final boolean getAllEvents)
throws MetastoreNotificationFetchException {
try {
int batchSize = getAllEvents ? -1 : Config.hms_events_batch_size_per_rpc;
NotificationEventResponse response = client.getNextNotification(lastSyncedEventId, batchSize, null);
if (response.getEvents().size() == 0) {
LOG.info("Event size is 0 when pulling events on catalog [{}]", catalogName);
return null;
}
LOG.info(String.format("Received %d events. Start event id : %d. Last synced id : %d on catalog : %s",
response.getEvents().size(), response.getEvents().get(0).getEventId(),
lastSyncedEventId, catalogName));
return response;
} catch (MetastoreNotificationFetchException e) {
LOG.error("Unable to fetch notifications from metastore. Last synced event id is {}", lastSyncedEventId, e);
throw new MetastoreNotificationFetchException("Unable to fetch notifications from metastore. " +
"Last synced event id is " + lastSyncedEventId, e);
}
}
}
|
Shall we change this to report a diagnostic?
|
public void init(CompilerPluginContext pluginContext) {
pluginContext.addCodeAnalyzer(new CodeAnalyzer() {
@Override
public void init(CodeAnalysisContext codeAnalysisContext) {
System.out.println("Hello from the analyzer");
}
});
pluginContext.addCodeGenerator(new CodeGenerator() {
@Override
public void init(CodeGeneratorContext codeGeneratorContext) {
System.out.println("Hello from the generator");
}
});
pluginContext.addCodeModifier(new CodeModifier() {
@Override
public void init(CodeModifierContext codeModifierContext) {
System.out.println("Hello from the modifier");
}
});
}
|
System.out.println("Hello from the generator");
|
public void init(CompilerPluginContext pluginContext) {
pluginContext.addCodeAnalyzer(new CodeAnalyzer() {
@Override
public void init(CodeAnalysisContext codeAnalysisContext) {
appendToOutputFile(CodeType.ANALYZER);
}
});
pluginContext.addCodeGenerator(new CodeGenerator() {
@Override
public void init(CodeGeneratorContext codeGeneratorContext) {
appendToOutputFile(CodeType.GENERATOR);
}
});
pluginContext.addCodeModifier(new CodeModifier() {
@Override
public void init(CodeModifierContext codeModifierContext) {
appendToOutputFile(CodeType.MODIFIER);
}
});
}
|
class CombinedCompilerPlugin extends CompilerPlugin {
@Override
}
|
class CombinedCompilerPlugin extends CompilerPlugin {
String lockFilePath = "./src/test/resources/compiler_plugin_tests/" +
"package_comp_plugin_with_analyzer_generator_modifier/target/combined_plugin_output.lock";
@Override
private void appendToOutputFile(CodeType codeType) {
String filePath = "./src/test/resources/compiler_plugin_tests/" +
"package_comp_plugin_with_analyzer_generator_modifier/target/combined_plugin_output.txt";
File outputFile = new File(filePath);
acquireLock();
try (FileOutputStream fileStream = new FileOutputStream(outputFile, true);
Writer writer = new OutputStreamWriter(fileStream, StandardCharsets.UTF_8)) {
writer.write("Initialized " + codeType.getName() + "\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
releaseLock();
}
private void acquireLock() {
try {
File lockFile = new File(lockFilePath);
while (lockFile.exists()) {
Thread.sleep(100);
}
if (!lockFile.createNewFile()) {
throw new RuntimeException("Error while creating the lock file: " + lockFilePath);
}
} catch (IOException | InterruptedException e) {
throw new RuntimeException(
"Error while creating the lock file: " + lockFilePath + " " + e.getMessage());
}
}
private void releaseLock() {
try {
Files.delete(Paths.get(lockFilePath));
} catch (IOException e) {
throw new RuntimeException(
"Error while deleting the lock file: " + lockFilePath + " " + e.getMessage());
}
}
}
|
Can we wrap using ZstdOutputStream instead of first encoding the entire value to bytes? Ditto for decoding.
|
public void encode(T value, OutputStream os) throws IOException {
ZstdCompressCtx ctx = new ZstdCompressCtx();
try {
ctx.setLevel(level);
ctx.setMagicless(true);
ctx.setDictID(false);
ctx.loadDict(dict);
byte[] encoded = CoderUtils.encodeToByteArray(innerCoder, value);
ctx.setPledgedSrcSize(encoded.length);
byte[] compressed = ctx.compress(encoded);
ByteArrayCoder.of().encode(compressed, os);
} finally {
ctx.close();
}
}
|
byte[] encoded = CoderUtils.encodeToByteArray(innerCoder, value);
|
public void encode(T value, OutputStream os) throws IOException {
ZstdCompressCtx ctx = new ZstdCompressCtx();
try {
ctx.setLevel(level);
ctx.setMagicless(true);
ctx.setDictID(false);
ctx.loadDict(dict);
byte[] encoded = CoderUtils.encodeToByteArray(innerCoder, value);
byte[] compressed = ctx.compress(encoded);
ByteArrayCoder.of().encode(compressed, os);
} finally {
ctx.close();
}
}
|
class ZstdCoder<T> extends StructuredCoder<T> {
private final Coder<T> innerCoder;
private final @Nullable byte[] dict;
private final int level;
/** Wraps the given coder into a {@link ZstdCoder}. */
public static <T> ZstdCoder<T> of(Coder<T> innerCoder, byte[] dict, int level) {
return new ZstdCoder<>(innerCoder, dict, level);
}
/** Wraps the given coder into a {@link ZstdCoder}. */
public static <T> ZstdCoder<T> of(Coder<T> innerCoder, byte[] dict) {
return new ZstdCoder<>(innerCoder, dict, Zstd.defaultCompressionLevel());
}
/** Wraps the given coder into a {@link ZstdCoder}. */
public static <T> ZstdCoder<T> of(Coder<T> innerCoder, int level) {
return new ZstdCoder<>(innerCoder, null, level);
}
/** Wraps the given coder into a {@link ZstdCoder}. */
public static <T> ZstdCoder<T> of(Coder<T> innerCoder) {
return new ZstdCoder<>(innerCoder, null, Zstd.defaultCompressionLevel());
}
private ZstdCoder(Coder<T> innerCoder, @Nullable byte[] dict, int level) {
this.innerCoder = innerCoder;
this.dict = dict;
this.level = level;
}
@Override
@Override
public T decode(InputStream is) throws IOException {
ZstdDecompressCtx ctx = new ZstdDecompressCtx();
try {
ctx.setMagicless(true);
ctx.loadDict(dict);
byte[] compressed = ByteArrayCoder.of().decode(is);
int decompressedSize = (int) Zstd.decompressedSize(compressed, 0, compressed.length, true);
byte[] encoded = ctx.decompress(compressed, decompressedSize);
return CoderUtils.decodeFromByteArray(innerCoder, encoded);
} finally {
ctx.close();
}
}
@Override
public List<? extends Coder<?>> getCoderArguments() {
return ImmutableList.of(innerCoder);
}
@Override
public void verifyDeterministic() throws NonDeterministicException {
innerCoder.verifyDeterministic();
}
}
|
class ZstdCoder<T> extends Coder<T> {
private final Coder<T> innerCoder;
private final @Nullable byte[] dict;
private final int level;
/** Wraps the given coder into a {@link ZstdCoder}. */
public static <T> ZstdCoder<T> of(Coder<T> innerCoder, byte[] dict, int level) {
return new ZstdCoder<>(innerCoder, dict, level);
}
/** Wraps the given coder into a {@link ZstdCoder}. */
public static <T> ZstdCoder<T> of(Coder<T> innerCoder, byte[] dict) {
return new ZstdCoder<>(innerCoder, dict, Zstd.defaultCompressionLevel());
}
/** Wraps the given coder into a {@link ZstdCoder}. */
public static <T> ZstdCoder<T> of(Coder<T> innerCoder, int level) {
return new ZstdCoder<>(innerCoder, null, level);
}
/** Wraps the given coder into a {@link ZstdCoder}. */
public static <T> ZstdCoder<T> of(Coder<T> innerCoder) {
return new ZstdCoder<>(innerCoder, null, Zstd.defaultCompressionLevel());
}
private ZstdCoder(Coder<T> innerCoder, @Nullable byte[] dict, int level) {
this.innerCoder = innerCoder;
this.dict = dict;
this.level = level;
}
@Override
@Override
public T decode(InputStream is) throws IOException {
ZstdDecompressCtx ctx = new ZstdDecompressCtx();
try {
ctx.setMagicless(true);
ctx.loadDict(dict);
byte[] compressed = ByteArrayCoder.of().decode(is);
int decompressedSize = (int) Zstd.decompressedSize(compressed, 0, compressed.length, true);
byte[] encoded = ctx.decompress(compressed, decompressedSize);
return CoderUtils.decodeFromByteArray(innerCoder, encoded);
} finally {
ctx.close();
}
}
@Override
public List<? extends Coder<?>> getCoderArguments() {
return ImmutableList.of(innerCoder);
}
/**
* {@inheritDoc}
*
* <p>{@link ZstdCoder} is deterministic if the inner coder is deterministic.
*/
@Override
public void verifyDeterministic() throws NonDeterministicException {
innerCoder.verifyDeterministic();
}
/**
* {@inheritDoc}
*
* <p>{@link ZstdCoder} is consistent with equals if the inner coder is consistent with equals.
*
* @return The same value as the inner coder.
*/
@Override
public boolean consistentWithEquals() {
return innerCoder.consistentWithEquals();
}
/**
* {@inheritDoc}
*
* <p>{@link ZstdCoder} uses the structural value of the inner coder.
*
* @return The structural value of the inner coder.
*/
@Override
public Object structuralValue(T value) {
return innerCoder.structuralValue(value);
}
/**
* {@inheritDoc}
*
* @return {@code true} if the two {@link ZstdCoder} instances have the same class, inner coder,
* dictionary and compression level.
*/
@Override
public boolean equals(@Nullable Object o) {
if (o == this) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ZstdCoder<?> that = (ZstdCoder<?>) o;
return innerCoder.equals(that.innerCoder)
&& Arrays.equals(dict, that.dict)
&& level == that.level;
}
@Override
public int hashCode() {
return Objects.hash(innerCoder, Arrays.hashCode(dict), level);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("innerCoder", innerCoder)
.add("dict", dict == null ? null : "base64:" + BaseEncoding.base64().encode(dict))
.add("level", level)
.toString();
}
}
|
The WARNING log may misunderstand the user. I think the behavior is normal when some tables are dropped before publishing.
|
public void updateReplicaVersions(List<TTabletVersionPair> tabletVersions) {
if (span != null) {
span.addEvent("update_replica_version_start");
span.setAttribute("num_replicas", tabletVersions.size());
}
TabletInvertedIndex tablets = GlobalStateMgr.getCurrentInvertedIndex();
List<Long> tabletIds = tabletVersions.stream().map(tv -> tv.tablet_id).collect(Collectors.toList());
List<Replica> replicas = tablets.getReplicasOnBackendByTabletIds(tabletIds, backendId);
if (replicas == null) {
LOG.warn("backend not found backendid={}", backendId);
return;
}
Database db = GlobalStateMgr.getCurrentState().getDb(dbId);
if (db == null) {
LOG.warn("db not found dbid={}", dbId);
return;
}
List<Long> droppedTablets = new ArrayList<>();
for (int i = 0; i < tabletVersions.size(); i++) {
if (replicas.get(i) == null) {
droppedTablets.add(tabletVersions.get(i).tablet_id);
}
}
if (!droppedTablets.isEmpty()) {
LOG.warn("during publish version some tablets were dropped(maybe by alter), tabletIds={}", droppedTablets);
}
db.writeLock();
try {
for (int i = 0; i < tabletVersions.size(); i++) {
TTabletVersionPair tabletVersion = tabletVersions.get(i);
Replica replica = replicas.get(i);
if (replica == null) {
continue;
}
replica.updateVersion(tabletVersion.version);
}
} finally {
db.writeUnlock();
if (span != null) {
span.addEvent("update_replica_version_finish");
}
}
}
|
if (!droppedTablets.isEmpty()) {
|
public void updateReplicaVersions(List<TTabletVersionPair> tabletVersions) {
if (span != null) {
span.addEvent("update_replica_version_start");
span.setAttribute("num_replicas", tabletVersions.size());
}
TabletInvertedIndex tablets = GlobalStateMgr.getCurrentInvertedIndex();
List<Long> tabletIds = tabletVersions.stream().map(tv -> tv.tablet_id).collect(Collectors.toList());
List<Replica> replicas = tablets.getReplicasOnBackendByTabletIds(tabletIds, backendId);
if (replicas == null) {
LOG.warn("backend not found backendid={}", backendId);
return;
}
Database db = GlobalStateMgr.getCurrentState().getDb(dbId);
if (db == null) {
LOG.warn("db not found dbid={}", dbId);
return;
}
List<Long> droppedTablets = new ArrayList<>();
for (int i = 0; i < tabletVersions.size(); i++) {
if (replicas.get(i) == null) {
droppedTablets.add(tabletVersions.get(i).tablet_id);
}
}
if (!droppedTablets.isEmpty()) {
LOG.info("during publish version some tablets were dropped(maybe by alter), tabletIds={}", droppedTablets);
}
db.writeLock();
try {
for (int i = 0; i < tabletVersions.size(); i++) {
TTabletVersionPair tabletVersion = tabletVersions.get(i);
Replica replica = replicas.get(i);
if (replica == null) {
continue;
}
replica.updateVersion(tabletVersion.version);
}
} finally {
db.writeUnlock();
if (span != null) {
span.addEvent("update_replica_version_finish");
}
}
}
|
class PublishVersionTask extends AgentTask {
private static final Logger LOG = LogManager.getLogger(PublishVersionTask.class);
private long transactionId;
private List<TPartitionVersionInfo> partitionVersionInfos;
private List<Long> errorTablets;
private Set<Long> errorReplicas;
private long commitTimestamp;
private TransactionState txnState = null;
private Span span;
public PublishVersionTask(long backendId, long transactionId, long dbId, long commitTimestamp,
List<TPartitionVersionInfo> partitionVersionInfos, String traceParent, Span txnSpan,
long createTime, TransactionState state) {
super(null, backendId, TTaskType.PUBLISH_VERSION, dbId, -1L, -1L, -1L, -1L, transactionId, createTime, traceParent);
this.transactionId = transactionId;
this.partitionVersionInfos = partitionVersionInfos;
this.errorTablets = new ArrayList<Long>();
this.isFinished = false;
this.commitTimestamp = commitTimestamp;
this.txnState = state;
if (txnSpan != null) {
span = TraceManager.startSpan("publish_version_task", txnSpan);
span.setAttribute("backend_id", backendId);
span.setAttribute("num_partition", partitionVersionInfos.size());
}
}
public TPublishVersionRequest toThrift() {
if (span != null) {
span.addEvent("send_to_be");
}
TPublishVersionRequest publishVersionRequest = new TPublishVersionRequest(transactionId, partitionVersionInfos);
publishVersionRequest.setCommit_timestamp(commitTimestamp);
publishVersionRequest.setTxn_trace_parent(traceParent);
return publishVersionRequest;
}
public long getTransactionId() {
return transactionId;
}
public TransactionState getTxnState() {
return txnState;
}
public List<TPartitionVersionInfo> getPartitionVersionInfos() {
return partitionVersionInfos;
}
public synchronized List<Long> getErrorTablets() {
return errorTablets;
}
public synchronized Set<Long> getErrorReplicas() {
return errorReplicas;
}
public synchronized void setErrorTablets(List<Long> errorTablets) {
this.errorTablets.clear();
if (errorTablets != null) {
this.errorTablets.addAll(errorTablets);
}
this.errorReplicas = collectErrorReplicas();
}
public void setIsFinished(boolean isFinished) {
this.isFinished = isFinished;
if (span != null) {
span.setAttribute("num_error_replicas", errorReplicas.size());
span.setAttribute("num_error_tablets", errorTablets.size());
span.end();
}
}
public boolean isFinished() {
return isFinished;
}
private Set<Long> collectErrorReplicas() {
TabletInvertedIndex tablets = GlobalStateMgr.getCurrentInvertedIndex();
Set<Long> errorReplicas = Sets.newHashSet();
List<Long> errorTablets = this.getErrorTablets();
if (errorTablets != null && !errorTablets.isEmpty()) {
for (long tabletId : errorTablets) {
if (tablets.getTabletMeta(tabletId) == null) {
continue;
}
Replica replica = tablets.getReplica(tabletId, this.getBackendId());
if (replica != null) {
errorReplicas.add(replica.getId());
} else {
LOG.info("could not find related replica with tabletid={}, backendid={}", tabletId, this.getBackendId());
}
}
}
return errorReplicas;
}
}
|
class PublishVersionTask extends AgentTask {
private static final Logger LOG = LogManager.getLogger(PublishVersionTask.class);
private long transactionId;
private List<TPartitionVersionInfo> partitionVersionInfos;
private List<Long> errorTablets;
private Set<Long> errorReplicas;
private long commitTimestamp;
private TransactionState txnState = null;
private Span span;
public PublishVersionTask(long backendId, long transactionId, long dbId, long commitTimestamp,
List<TPartitionVersionInfo> partitionVersionInfos, String traceParent, Span txnSpan,
long createTime, TransactionState state) {
super(null, backendId, TTaskType.PUBLISH_VERSION, dbId, -1L, -1L, -1L, -1L, transactionId, createTime, traceParent);
this.transactionId = transactionId;
this.partitionVersionInfos = partitionVersionInfos;
this.errorTablets = new ArrayList<Long>();
this.isFinished = false;
this.commitTimestamp = commitTimestamp;
this.txnState = state;
if (txnSpan != null) {
span = TraceManager.startSpan("publish_version_task", txnSpan);
span.setAttribute("backend_id", backendId);
span.setAttribute("num_partition", partitionVersionInfos.size());
}
}
public TPublishVersionRequest toThrift() {
if (span != null) {
span.addEvent("send_to_be");
}
TPublishVersionRequest publishVersionRequest = new TPublishVersionRequest(transactionId, partitionVersionInfos);
publishVersionRequest.setCommit_timestamp(commitTimestamp);
publishVersionRequest.setTxn_trace_parent(traceParent);
return publishVersionRequest;
}
public long getTransactionId() {
return transactionId;
}
public TransactionState getTxnState() {
return txnState;
}
public List<TPartitionVersionInfo> getPartitionVersionInfos() {
return partitionVersionInfos;
}
public synchronized List<Long> getErrorTablets() {
return errorTablets;
}
public synchronized Set<Long> getErrorReplicas() {
return errorReplicas;
}
public synchronized void setErrorTablets(List<Long> errorTablets) {
this.errorTablets.clear();
if (errorTablets != null) {
this.errorTablets.addAll(errorTablets);
}
this.errorReplicas = collectErrorReplicas();
}
public void setIsFinished(boolean isFinished) {
this.isFinished = isFinished;
if (span != null) {
span.setAttribute("num_error_replicas", errorReplicas.size());
span.setAttribute("num_error_tablets", errorTablets.size());
span.end();
}
}
public boolean isFinished() {
return isFinished;
}
private Set<Long> collectErrorReplicas() {
TabletInvertedIndex tablets = GlobalStateMgr.getCurrentInvertedIndex();
Set<Long> errorReplicas = Sets.newHashSet();
List<Long> errorTablets = this.getErrorTablets();
if (errorTablets != null && !errorTablets.isEmpty()) {
for (long tabletId : errorTablets) {
if (tablets.getTabletMeta(tabletId) == null) {
continue;
}
Replica replica = tablets.getReplica(tabletId, this.getBackendId());
if (replica != null) {
errorReplicas.add(replica.getId());
} else {
LOG.info("could not find related replica with tabletid={}, backendid={}", tabletId, this.getBackendId());
}
}
}
return errorReplicas;
}
}
|
Yes you are correct, will update.
|
private void checkFixedLength(long length) {
if (arrayType != null && arrayType.getTag() == TypeTags.TUPLE_TAG) {
BTupleType tupleType = (BTupleType) this.arrayType;
if (tupleType.getRestType() == null
|| (tupleType.getRestType() != null && tupleType.getTupleTypes().size() > length)) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INHERENT_TYPE_VIOLATION_ERROR,
RuntimeErrors.ILLEGAL_TUPLE_SIZE, length);
}
} else if (((BArrayType) this.arrayType).getState() == ArrayState.CLOSED_SEALED) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INHERENT_TYPE_VIOLATION_ERROR,
RuntimeErrors.ILLEGAL_ARRAY_SIZE, length);
}
}
|
if (tupleType.getRestType() == null
|
private void checkFixedLength(long length) {
if (arrayType == null) {
return;
}
if (arrayType.getTag() == TypeTags.TUPLE_TAG) {
BTupleType tupleType = (BTupleType) this.arrayType;
if (tupleType.getRestType() == null) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INHERENT_TYPE_VIOLATION_ERROR,
RuntimeErrors.ILLEGAL_TUPLE_SIZE, size, length);
} else if (tupleType.getTupleTypes().size() > length) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INHERENT_TYPE_VIOLATION_ERROR,
RuntimeErrors.ILLEGAL_TUPLE_WITH_REST_TYPE_SIZE, tupleType.getTupleTypes().size(), length);
}
} else if (((BArrayType) this.arrayType).getState() == ArrayState.CLOSED_SEALED) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INHERENT_TYPE_VIOLATION_ERROR,
RuntimeErrors.ILLEGAL_ARRAY_SIZE, size, length);
}
}
|
class ArrayValue implements RefValue, CollectionValue {
static final int SYSTEM_ARRAY_MAX = Integer.MAX_VALUE - 8;
protected BType arrayType;
private volatile Status freezeStatus = new Status(State.UNFROZEN);
/**
* The maximum size of arrays to allocate.
* <p>
* This is same as Java
*/
protected int maxArraySize = SYSTEM_ARRAY_MAX;
private static final int DEFAULT_ARRAY_SIZE = 100;
protected int size = 0;
Object[] refValues;
private long[] intValues;
private boolean[] booleanValues;
private byte[] byteValues;
private double[] floatValues;
private String[] stringValues;
public BType elementType;
private BType tupleRestType;
public ArrayValue(Object[] values, BType type) {
this.refValues = values;
this.arrayType = type;
this.size = values.length;
if (type.getTag() == TypeTags.ARRAY_TAG) {
this.elementType = ((BArrayType) type).getElementType();
}
}
public ArrayValue(long[] values) {
this.intValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeInt);
}
public ArrayValue(boolean[] values) {
this.booleanValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeBoolean);
}
public ArrayValue(byte[] values) {
this.byteValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeByte);
}
public ArrayValue(double[] values) {
this.floatValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeFloat);
}
public ArrayValue(String[] values) {
this.stringValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeString);
}
public ArrayValue(BType type) {
if (type.getTag() == TypeTags.INT_TAG) {
intValues = (long[]) newArrayInstance(Long.TYPE);
setArrayElementType(type);
} else if (type.getTag() == TypeTags.BOOLEAN_TAG) {
booleanValues = (boolean[]) newArrayInstance(Boolean.TYPE);
setArrayElementType(type);
} else if (type.getTag() == TypeTags.BYTE_TAG) {
byteValues = (byte[]) newArrayInstance(Byte.TYPE);
setArrayElementType(type);
} else if (type.getTag() == TypeTags.FLOAT_TAG) {
floatValues = (double[]) newArrayInstance(Double.TYPE);
setArrayElementType(type);
} else if (type.getTag() == TypeTags.STRING_TAG) {
stringValues = (String[]) newArrayInstance(String.class);
setArrayElementType(type);
} else {
this.arrayType = type;
if (type.getTag() == TypeTags.ARRAY_TAG) {
BArrayType arrayType = (BArrayType) type;
this.elementType = arrayType.getElementType();
if (arrayType.getState() == ArrayState.CLOSED_SEALED) {
this.size = maxArraySize = arrayType.getSize();
}
initArrayValues(this.elementType);
} else if (type.getTag() == TypeTags.TUPLE_TAG) {
BTupleType tupleType = (BTupleType) type;
tupleRestType = tupleType.getRestType();
size = tupleType.getTupleTypes().size();
maxArraySize = (tupleRestType != null) ? maxArraySize : size;
refValues = (Object[]) newArrayInstance(Object.class);
AtomicInteger counter = new AtomicInteger(0);
tupleType.getTupleTypes()
.forEach(memType -> refValues[counter.getAndIncrement()] = memType.getEmptyValue());
} else if (type.getTag() == TypeTags.UNION_TAG) {
BUnionType unionType = (BUnionType) type;
this.size = maxArraySize = unionType.getMemberTypes().size();
unionType.getMemberTypes().forEach(this::initArrayValues);
} else {
refValues = (Object[]) newArrayInstance(Object.class);
}
}
}
private void initArrayValues(BType elementType) {
switch (elementType.getTag()) {
case TypeTags.INT_TAG:
intValues = (long[]) newArrayInstance(Long.TYPE);
break;
case TypeTags.FLOAT_TAG:
floatValues = (double[]) newArrayInstance(Double.TYPE);
break;
case TypeTags.STRING_TAG:
stringValues = (String[]) newArrayInstance(String.class);
break;
case TypeTags.BOOLEAN_TAG:
booleanValues = (boolean[]) newArrayInstance(Boolean.TYPE);
break;
case TypeTags.BYTE_TAG:
byteValues = (byte[]) newArrayInstance(Byte.TYPE);
break;
case TypeTags.XML_TAG:
refValues = (Object[]) newArrayInstance(Object.class);
break;
default:
refValues = (Object[]) newArrayInstance(Object.class);
}
}
public ArrayValue() {
refValues = (Object[]) newArrayInstance(Object.class);
}
public ArrayValue(BType type, long size) {
this.arrayType = type;
if (type.getTag() == TypeTags.ARRAY_TAG) {
elementType = ((BArrayType) type).getElementType();
if (size != -1) {
this.size = maxArraySize = (int) size;
}
initArrayValues(elementType);
} else if (type.getTag() == TypeTags.TUPLE_TAG) {
tupleRestType = ((BTupleType) type).getRestType();
if (size != -1) {
this.size = (int) size;
maxArraySize = (tupleRestType != null) ? maxArraySize : (int) size;
}
refValues = (Object[]) newArrayInstance(Object.class);
} else {
if (size != -1) {
this.size = maxArraySize = (int) size;
}
refValues = (Object[]) newArrayInstance(Object.class);
}
}
public Object getValue(long index) {
if (elementType != null) {
if (elementType.getTag() == TypeTags.INT_TAG) {
return getInt(index);
} else if (elementType.getTag() == TypeTags.BOOLEAN_TAG) {
return getBoolean(index);
} else if (elementType.getTag() == TypeTags.BYTE_TAG) {
return getByte(index);
} else if (elementType.getTag() == TypeTags.FLOAT_TAG) {
return getFloat(index);
} else if (elementType.getTag() == TypeTags.STRING_TAG) {
return getString(index);
} else {
return getRefValue(index);
}
}
return getRefValue(index);
}
public Object getRefValue(long index) {
rangeCheckForGet(index, size);
if (refValues == null) {
return getValue(index);
}
return refValues[(int) index];
}
public long getInt(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.INT_TAG) {
return intValues[(int) index];
} else {
return (Long) refValues[(int) index];
}
}
public boolean getBoolean(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.BOOLEAN_TAG) {
return booleanValues[(int) index];
} else {
return (Boolean) refValues[(int) index];
}
}
public byte getByte(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.BYTE_TAG) {
return byteValues[(int) index];
} else {
return (Byte) refValues[(int) index];
}
}
public double getFloat(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.FLOAT_TAG) {
return floatValues[(int) index];
} else {
return (Double) refValues[(int) index];
}
}
public String getString(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.STRING_TAG) {
return stringValues[(int) index];
} else {
return (String) refValues[(int) index];
}
}
public Object get(long index) {
rangeCheckForGet(index, size);
switch (this.elementType.getTag()) {
case TypeTags.INT_TAG:
return intValues[(int) index];
case TypeTags.BOOLEAN_TAG:
return booleanValues[(int) index];
case TypeTags.BYTE_TAG:
return byteValues[(int) index];
case TypeTags.FLOAT_TAG:
return floatValues[(int) index];
case TypeTags.STRING_TAG:
return stringValues[(int) index];
default:
return refValues[(int) index];
}
}
public void add(long index, Object value) {
handleFrozenArrayValue();
prepareForAdd(index, refValues.length);
refValues[(int) index] = value;
}
public void add(long index, long value) {
handleFrozenArrayValue();
prepareForAdd(index, intValues.length);
intValues[(int) index] = value;
}
public void add(long index, boolean value) {
if (elementType.getTag() == TypeTags.INT_TAG) {
add(index, value);
return;
}
handleFrozenArrayValue();
prepareForAdd(index, booleanValues.length);
booleanValues[(int) index] = value;
}
public void add(long index, byte value) {
handleFrozenArrayValue();
prepareForAdd(index, byteValues.length);
byteValues[(int) index] = value;
}
public void add(long index, double value) {
handleFrozenArrayValue();
prepareForAdd(index, floatValues.length);
floatValues[(int) index] = value;
}
public void add(long index, String value) {
handleFrozenArrayValue();
prepareForAdd(index, stringValues.length);
stringValues[(int) index] = value;
}
public void append(Object value) {
add(size, value);
}
public Object shift(long index) {
handleFrozenArrayValue();
Object val = get(index);
shiftArray((int) index, getArrayFromType(elementType.getTag()));
return val;
}
private void shiftArray(int index, Object arr) {
int nElemsToBeMoved = this.size - 1 - index;
if (nElemsToBeMoved >= 0) {
System.arraycopy(arr, index + 1, arr, index, nElemsToBeMoved);
}
this.size--;
}
public void unshift(long index, ArrayValue vals) {
handleFrozenArrayValue();
unshiftArray(index, vals.size, getCurrentArrayLength());
switch (elementType.getTag()) {
case TypeTags.INT_TAG:
addToIntArray(vals, (int) index);
break;
case TypeTags.BOOLEAN_TAG:
addToBooleanArray(vals, (int) index);
break;
case TypeTags.BYTE_TAG:
addToByteArray(vals, (int) index);
break;
case TypeTags.FLOAT_TAG:
addToFloatArray(vals, (int) index);
break;
case TypeTags.STRING_TAG:
addToStringArray(vals, (int) index);
break;
default:
addToRefArray(vals, (int) index);
}
}
private void addToIntArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getInt(j));
}
}
private void addToFloatArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getFloat(j));
}
}
private void addToStringArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getString(j));
}
}
private void addToByteArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
byte[] bytes = vals.getBytes();
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
this.byteValues[i] = bytes[j];
}
}
private void addToBooleanArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getBoolean(j));
}
}
private void addToRefArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getRefValue(j));
}
}
private void unshiftArray(long index, int unshiftByN, int arrLength) {
int lastIndex = size() + unshiftByN - 1;
prepareForConsecutiveMultiAdd(lastIndex, arrLength);
Object arr = getArrayFromType(elementType.getTag());
if (index > lastIndex) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.INDEX_NUMBER_TOO_LARGE, index);
}
int i = (int) index;
System.arraycopy(arr, i, arr, i + unshiftByN, this.size - i);
}
private Object getArrayFromType(int typeTag) {
switch (typeTag) {
case TypeTags.INT_TAG:
return intValues;
case TypeTags.BOOLEAN_TAG:
return booleanValues;
case TypeTags.BYTE_TAG:
return byteValues;
case TypeTags.FLOAT_TAG:
return floatValues;
case TypeTags.STRING_TAG:
return stringValues;
default:
return refValues;
}
}
private int getCurrentArrayLength() {
switch (elementType.getTag()) {
case TypeTags.INT_TAG:
return intValues.length;
case TypeTags.BOOLEAN_TAG:
return booleanValues.length;
case TypeTags.BYTE_TAG:
return byteValues.length;
case TypeTags.FLOAT_TAG:
return floatValues.length;
case TypeTags.STRING_TAG:
return stringValues.length;
default:
return refValues.length;
}
}
@Override
public String stringValue() {
if (elementType != null) {
StringJoiner sj = new StringJoiner(" ");
if (elementType.getTag() == TypeTags.INT_TAG) {
for (int i = 0; i < size; i++) {
sj.add(Long.toString(intValues[i]));
}
return sj.toString();
} else if (elementType.getTag() == TypeTags.BOOLEAN_TAG) {
for (int i = 0; i < size; i++) {
sj.add(Boolean.toString(booleanValues[i]));
}
return sj.toString();
} else if (elementType.getTag() == TypeTags.BYTE_TAG) {
for (int i = 0; i < size; i++) {
sj.add(Long.toString(Byte.toUnsignedLong(byteValues[i])));
}
return sj.toString();
} else if (elementType.getTag() == TypeTags.FLOAT_TAG) {
for (int i = 0; i < size; i++) {
sj.add(Double.toString(floatValues[i]));
}
return sj.toString();
} else if (elementType.getTag() == TypeTags.STRING_TAG) {
for (int i = 0; i < size; i++) {
sj.add(stringValues[i]);
}
return sj.toString();
}
}
if (getElementType(arrayType).getTag() == TypeTags.JSON_TAG) {
return getJSONString();
}
StringJoiner sj;
if (arrayType != null && (arrayType.getTag() == TypeTags.TUPLE_TAG)) {
sj = new StringJoiner(" ");
} else {
sj = new StringJoiner(" ");
}
for (int i = 0; i < size; i++) {
if (refValues[i] != null) {
sj.add((refValues[i] instanceof RefValue) ? ((RefValue) refValues[i]).stringValue() :
(refValues[i] instanceof String) ? (String) refValues[i] : refValues[i].toString());
} else {
sj.add("()");
}
}
return sj.toString();
}
@Override
public BType getType() {
return arrayType;
}
@Override
public int size() {
return size;
}
public boolean isEmpty() {
return size == 0;
}
@Override
public void stamp(BType type, List<TypeValuePair> unresolvedValues) {
if (type.getTag() == TypeTags.TUPLE_TAG) {
if (elementType != null && isBasicType(elementType)) {
moveBasicTypeArrayToRefValueArray();
}
Object[] arrayValues = this.getValues();
for (int i = 0; i < this.size(); i++) {
if (arrayValues[i] instanceof RefValue) {
BType memberType = ((BTupleType) type).getTupleTypes().get(i);
if (memberType.getTag() == TypeTags.ANYDATA_TAG || memberType.getTag() == TypeTags.JSON_TAG) {
memberType = TypeConverter.resolveMatchingTypeForUnion(arrayValues[i], memberType);
((BTupleType) type).getTupleTypes().set(i, memberType);
}
((RefValue) arrayValues[i]).stamp(memberType, unresolvedValues);
}
}
} else if (type.getTag() == TypeTags.JSON_TAG) {
if (elementType != null && isBasicType(elementType) && !isBasicType(type)) {
moveBasicTypeArrayToRefValueArray();
this.arrayType = new BArrayType(type);
return;
}
Object[] arrayValues = this.getValues();
for (int i = 0; i < this.size(); i++) {
if (arrayValues[i] instanceof RefValue) {
((RefValue) arrayValues[i]).stamp(TypeConverter.resolveMatchingTypeForUnion(arrayValues[i], type),
unresolvedValues);
}
}
type = new BArrayType(type);
} else if (type.getTag() == TypeTags.UNION_TAG) {
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (TypeChecker.checkIsLikeType(this, memberType, new ArrayList<>())) {
this.stamp(memberType, unresolvedValues);
type = memberType;
break;
}
}
} else if (type.getTag() == TypeTags.ANYDATA_TAG) {
type = TypeConverter.resolveMatchingTypeForUnion(this, type);
this.stamp(type, unresolvedValues);
} else {
BType arrayElementType = ((BArrayType) type).getElementType();
if (elementType != null && isBasicType(elementType)) {
if (isBasicType(arrayElementType)) {
this.arrayType = type;
return;
}
moveBasicTypeArrayToRefValueArray();
this.arrayType = type;
return;
}
if (isBasicType(arrayElementType) &&
(arrayType.getTag() == TypeTags.TUPLE_TAG || !isBasicType(elementType))) {
moveRefValueArrayToBasicTypeArray(type, arrayElementType);
return;
}
Object[] arrayValues = this.getValues();
for (int i = 0; i < this.size(); i++) {
if (arrayValues[i] instanceof RefValue) {
((RefValue) arrayValues[i]).stamp(arrayElementType, unresolvedValues);
}
}
}
this.arrayType = type;
}
@Override
public Object copy(Map<Object, Object> refs) {
if (isFrozen()) {
return this;
}
if (refs.containsKey(this)) {
return refs.get(this);
}
if (elementType != null) {
ArrayValue valueArray = null;
if (elementType.getTag() == TypeTags.INT_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(intValues, intValues.length));
} else if (elementType.getTag() == TypeTags.BOOLEAN_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(booleanValues, booleanValues.length));
} else if (elementType.getTag() == TypeTags.BYTE_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(byteValues, byteValues.length));
} else if (elementType.getTag() == TypeTags.FLOAT_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(floatValues, floatValues.length));
} else if (elementType.getTag() == TypeTags.STRING_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(stringValues, stringValues.length));
}
if (valueArray != null) {
valueArray.size = this.size;
refs.put(this, valueArray);
return valueArray;
}
}
Object[] values = new Object[size];
ArrayValue refValueArray = new ArrayValue(values, arrayType);
refValueArray.size = this.size;
refs.put(this, refValueArray);
int bound = this.size;
IntStream.range(0, bound).forEach(i -> {
Object value = this.refValues[i];
if (value instanceof RefValue) {
values[i] = ((RefValue) value).copy(refs);
} else {
values[i] = value;
}
});
return refValueArray;
}
@Override
public Object frozenCopy(Map<Object, Object> refs) {
ArrayValue copy = (ArrayValue) copy(refs);
if (!copy.isFrozen()) {
copy.freezeDirect();
}
return copy;
}
@Override
public String toString() {
return stringValue();
}
public Object[] getValues() {
return refValues;
}
public byte[] getBytes() {
byte[] bytes = new byte[this.size];
System.arraycopy(byteValues, 0, bytes, 0, this.size);
return bytes;
}
public String[] getStringArray() {
return Arrays.copyOf(stringValues, size);
}
public long[] getLongArray() {
return Arrays.copyOf(intValues, size);
}
@Override
public void serialize(OutputStream outputStream) {
if (elementType.getTag() == TypeTags.BYTE_TAG) {
try {
outputStream.write(byteValues);
} catch (IOException e) {
throw new BallerinaException("error occurred while writing the binary content to the output stream", e);
}
} else {
try {
outputStream.write(this.toString().getBytes(Charset.defaultCharset()));
} catch (IOException e) {
throw new BallerinaException("error occurred while serializing data", e);
}
}
}
public void resizeInternalArray(int newLength) {
if (arrayType.getTag() == TypeTags.TUPLE_TAG) {
refValues = Arrays.copyOf(refValues, newLength);
} else {
if (elementType != null) {
switch (elementType.getTag()) {
case TypeTags.INT_TAG:
intValues = Arrays.copyOf(intValues, newLength);
break;
case TypeTags.BOOLEAN_TAG:
booleanValues = Arrays.copyOf(booleanValues, newLength);
break;
case TypeTags.BYTE_TAG:
byteValues = Arrays.copyOf(byteValues, newLength);
break;
case TypeTags.FLOAT_TAG:
floatValues = Arrays.copyOf(floatValues, newLength);
break;
case TypeTags.STRING_TAG:
stringValues = Arrays.copyOf(stringValues, newLength);
break;
default:
refValues = Arrays.copyOf(refValues, newLength);
break;
}
} else {
refValues = Arrays.copyOf(refValues, newLength);
}
}
}
private void fillValues(int index) {
if (index <= size) {
return;
}
if (arrayType.getTag() == TypeTags.TUPLE_TAG) {
if (tupleRestType != null) {
Arrays.fill(refValues, size, index, tupleRestType.getZeroValue());
}
} else {
int typeTag = elementType.getTag();
if (typeTag == TypeTags.STRING_TAG) {
Arrays.fill(stringValues, size, index, BLangConstants.STRING_EMPTY_VALUE);
return;
}
if (typeTag == TypeTags.INT_TAG || typeTag == TypeTags.BYTE_TAG || typeTag == TypeTags.FLOAT_TAG ||
typeTag == TypeTags.BOOLEAN_TAG) {
return;
}
Arrays.fill(refValues, size, index, elementType.getZeroValue());
}
}
public BType getArrayType() {
return arrayType;
}
private void rangeCheckForGet(long index, int size) {
rangeCheck(index, size);
if (index < 0 || index >= size) {
if (arrayType != null && arrayType.getTag() == TypeTags.TUPLE_TAG) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.TUPLE_INDEX_OUT_OF_RANGE, index, size);
}
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.ARRAY_INDEX_OUT_OF_RANGE, index, size);
}
}
private void rangeCheck(long index, int size) {
if (index > Integer.MAX_VALUE || index < Integer.MIN_VALUE) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.INDEX_NUMBER_TOO_LARGE, index);
}
if (arrayType != null && arrayType.getTag() == TypeTags.TUPLE_TAG) {
if ((((BTupleType) arrayType).getRestType() == null && index >= maxArraySize) || (int) index < 0) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.TUPLE_INDEX_OUT_OF_RANGE, index, size);
}
} else {
if ((int) index < 0 || index >= maxArraySize) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.ARRAY_INDEX_OUT_OF_RANGE, index, size);
}
}
}
private void fillerValueCheck(int index, int size) {
if (arrayType != null && arrayType.getTag() == TypeTags.TUPLE_TAG) {
if (!TypeChecker.hasFillerValue(tupleRestType) && (index > size)) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.ILLEGAL_LIST_INSERTION_ERROR,
RuntimeErrors.ILLEGAL_TUPLE_INSERTION, size, index + 1);
}
} else {
if (!TypeChecker.hasFillerValue(elementType) && (index > size)) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.ILLEGAL_LIST_INSERTION_ERROR,
RuntimeErrors.ILLEGAL_ARRAY_INSERTION, size, index + 1);
}
}
}
Object newArrayInstance(Class<?> componentType) {
return (size > 0) ?
Array.newInstance(componentType, size) : Array.newInstance(componentType, DEFAULT_ARRAY_SIZE);
}
private void setArrayElementType(BType type) {
this.arrayType = new BArrayType(type);
this.elementType = type;
}
public String getJSONString() {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
JSONGenerator gen = new JSONGenerator(byteOut);
try {
gen.serialize(this);
gen.flush();
} catch (IOException e) {
throw new BallerinaException("Error in converting JSON to a string: " + e.getMessage(), e);
}
return new String(byteOut.toByteArray());
}
private BType getElementType(BType type) {
if (type.getTag() != TypeTags.ARRAY_TAG) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
/**
* Util method to handle frozen array values.
*/
private void handleFrozenArrayValue() {
synchronized (this) {
try {
if (this.freezeStatus.getState() != State.UNFROZEN) {
FreezeUtils.handleInvalidUpdate(freezeStatus.getState());
}
} catch (BLangFreezeException e) {
throw BallerinaErrors.createError(e.getMessage(), e.getDetail());
}
}
}
protected void prepareForAdd(long index, int currentArraySize) {
int intIndex = (int) index;
rangeCheck(index, size);
fillerValueCheck(intIndex, size);
ensureCapacity(intIndex + 1, currentArraySize);
fillValues(intIndex);
resetSize(intIndex);
}
/**
* Same as {@code prepareForAdd}, except fillerValueCheck is not performed as we are guaranteed to add
* elements to consecutive positions.
*
* @param index last index after add operation completes
* @param currentArraySize current array size
*/
void prepareForConsecutiveMultiAdd(long index, int currentArraySize) {
int intIndex = (int) index;
rangeCheck(index, size);
ensureCapacity(intIndex + 1, currentArraySize);
resetSize(intIndex);
}
private void ensureCapacity(int requestedCapacity, int currentArraySize) {
if ((requestedCapacity) - currentArraySize > 0) {
if ((this.arrayType.getTag() == TypeTags.ARRAY_TAG
&& ((BArrayType) this.arrayType).getState() == ArrayState.UNSEALED)
|| this.arrayType.getTag() == TypeTags.TUPLE_TAG) {
int newArraySize = currentArraySize + (currentArraySize >> 1);
newArraySize = Math.max(newArraySize, requestedCapacity);
newArraySize = Math.min(newArraySize, maxArraySize);
resizeInternalArray(newArraySize);
}
}
}
private void resetSize(int index) {
if (index >= size) {
size = index + 1;
}
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void attemptFreeze(Status freezeStatus) {
if (!FreezeUtils.isOpenForFreeze(this.freezeStatus, freezeStatus)) {
return;
}
this.freezeStatus = freezeStatus;
if (elementType == null || elementType.getTag() > TypeTags.BOOLEAN_TAG) {
for (int i = 0; i < this.size; i++) {
Object value = this.getRefValue(i);
if (value instanceof RefValue) {
((RefValue) value).attemptFreeze(freezeStatus);
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void freezeDirect() {
if (isFrozen()) {
return;
}
this.freezeStatus.setFrozen();
if (elementType == null || elementType.getTag() > TypeTags.BOOLEAN_TAG) {
for (int i = 0; i < this.size; i++) {
Object value = this.getRefValue(i);
if (value instanceof RefValue) {
((RefValue) value).freezeDirect();
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public synchronized boolean isFrozen() {
return this.freezeStatus.isFrozen();
}
private boolean isBasicType(BType type) {
return type.getTag() <= TypeTags.BOOLEAN_TAG && type.getTag() != TypeTags.DECIMAL_TAG;
}
private void moveBasicTypeArrayToRefValueArray() {
refValues = new Object[this.size];
if (elementType == BTypes.typeBoolean) {
for (int i = 0; i < this.size(); i++) {
refValues[i] = booleanValues[i];
}
booleanValues = null;
}
if (elementType == BTypes.typeInt) {
for (int i = 0; i < this.size(); i++) {
refValues[i] = intValues[i];
}
intValues = null;
}
if (elementType == BTypes.typeString) {
System.arraycopy(stringValues, 0, refValues, 0, this.size());
stringValues = null;
}
if (elementType == BTypes.typeFloat) {
for (int i = 0; i < this.size(); i++) {
refValues[i] = floatValues[i];
}
floatValues = null;
}
if (elementType == BTypes.typeByte) {
for (int i = 0; i < this.size(); i++) {
refValues[i] = (byteValues[i]);
}
byteValues = null;
}
elementType = null;
}
private void moveRefValueArrayToBasicTypeArray(BType type, BType arrayElementType) {
Object[] arrayValues = this.getValues();
if (arrayElementType.getTag() == TypeTags.INT_TAG) {
intValues = (long[]) newArrayInstance(Long.TYPE);
for (int i = 0; i < this.size(); i++) {
intValues[i] = ((long) arrayValues[i]);
}
}
if (arrayElementType.getTag() == TypeTags.FLOAT_TAG) {
floatValues = (double[]) newArrayInstance(Double.TYPE);
for (int i = 0; i < this.size(); i++) {
floatValues[i] = ((float) arrayValues[i]);
}
}
if (arrayElementType.getTag() == TypeTags.BOOLEAN_TAG) {
booleanValues = new boolean[this.size()];
for (int i = 0; i < this.size(); i++) {
booleanValues[i] = ((boolean) arrayValues[i]);
}
}
if (arrayElementType.getTag() == TypeTags.STRING_TAG) {
stringValues = (String[]) newArrayInstance(String.class);
for (int i = 0; i < this.size(); i++) {
stringValues[i] = (String) arrayValues[i];
}
}
if (arrayElementType.getTag() == TypeTags.BYTE_TAG) {
byteValues = (byte[]) newArrayInstance(Byte.TYPE);
for (int i = 0; i < this.size(); i++) {
byteValues[i] = (byte) arrayValues[i];
}
}
this.elementType = arrayElementType;
this.arrayType = type;
refValues = null;
}
@Override
public IteratorValue getIterator() {
return new ArrayIterator(this);
}
public void setLength(long length) {
handleFrozenArrayValue();
int newLength = (int) length;
checkFixedLength(length);
rangeCheck(length, size);
fillerValueCheck(newLength, size);
resizeInternalArray(newLength);
fillValues(newLength);
size = newLength;
}
/**
* {@code {@link ArrayIterator}} provides iterator implementation for Ballerina array values.
*
* @since 0.995.0
*/
static class ArrayIterator implements IteratorValue {
ArrayValue array;
long cursor = 0;
long length;
ArrayIterator(ArrayValue value) {
this.array = value;
this.length = value.size();
}
@Override
public Object next() {
long cursor = this.cursor++;
if (cursor == length) {
return null;
}
return array.getValue(cursor);
}
@Override
public boolean hasNext() {
return cursor < length;
}
}
}
|
class ArrayValue implements RefValue, CollectionValue {
static final int SYSTEM_ARRAY_MAX = Integer.MAX_VALUE - 8;
protected BType arrayType;
private volatile Status freezeStatus = new Status(State.UNFROZEN);
/**
* The maximum size of arrays to allocate.
* <p>
* This is same as Java
*/
protected int maxArraySize = SYSTEM_ARRAY_MAX;
private static final int DEFAULT_ARRAY_SIZE = 100;
protected int size = 0;
Object[] refValues;
private long[] intValues;
private boolean[] booleanValues;
private byte[] byteValues;
private double[] floatValues;
private String[] stringValues;
public BType elementType;
private BType tupleRestType;
public ArrayValue(Object[] values, BType type) {
this.refValues = values;
this.arrayType = type;
this.size = values.length;
if (type.getTag() == TypeTags.ARRAY_TAG) {
this.elementType = ((BArrayType) type).getElementType();
}
}
public ArrayValue(long[] values) {
this.intValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeInt);
}
public ArrayValue(boolean[] values) {
this.booleanValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeBoolean);
}
public ArrayValue(byte[] values) {
this.byteValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeByte);
}
public ArrayValue(double[] values) {
this.floatValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeFloat);
}
public ArrayValue(String[] values) {
this.stringValues = values;
this.size = values.length;
setArrayElementType(BTypes.typeString);
}
public ArrayValue(BType type) {
if (type.getTag() == TypeTags.INT_TAG) {
intValues = (long[]) newArrayInstance(Long.TYPE);
setArrayElementType(type);
} else if (type.getTag() == TypeTags.BOOLEAN_TAG) {
booleanValues = (boolean[]) newArrayInstance(Boolean.TYPE);
setArrayElementType(type);
} else if (type.getTag() == TypeTags.BYTE_TAG) {
byteValues = (byte[]) newArrayInstance(Byte.TYPE);
setArrayElementType(type);
} else if (type.getTag() == TypeTags.FLOAT_TAG) {
floatValues = (double[]) newArrayInstance(Double.TYPE);
setArrayElementType(type);
} else if (type.getTag() == TypeTags.STRING_TAG) {
stringValues = (String[]) newArrayInstance(String.class);
setArrayElementType(type);
} else {
this.arrayType = type;
if (type.getTag() == TypeTags.ARRAY_TAG) {
BArrayType arrayType = (BArrayType) type;
this.elementType = arrayType.getElementType();
if (arrayType.getState() == ArrayState.CLOSED_SEALED) {
this.size = maxArraySize = arrayType.getSize();
}
initArrayValues(this.elementType);
} else if (type.getTag() == TypeTags.TUPLE_TAG) {
BTupleType tupleType = (BTupleType) type;
tupleRestType = tupleType.getRestType();
size = tupleType.getTupleTypes().size();
maxArraySize = (tupleRestType != null) ? maxArraySize : size;
refValues = (Object[]) newArrayInstance(Object.class);
AtomicInteger counter = new AtomicInteger(0);
tupleType.getTupleTypes()
.forEach(memType -> refValues[counter.getAndIncrement()] = memType.getEmptyValue());
} else if (type.getTag() == TypeTags.UNION_TAG) {
BUnionType unionType = (BUnionType) type;
this.size = maxArraySize = unionType.getMemberTypes().size();
unionType.getMemberTypes().forEach(this::initArrayValues);
} else {
refValues = (Object[]) newArrayInstance(Object.class);
}
}
}
private void initArrayValues(BType elementType) {
switch (elementType.getTag()) {
case TypeTags.INT_TAG:
intValues = (long[]) newArrayInstance(Long.TYPE);
break;
case TypeTags.FLOAT_TAG:
floatValues = (double[]) newArrayInstance(Double.TYPE);
break;
case TypeTags.STRING_TAG:
stringValues = (String[]) newArrayInstance(String.class);
break;
case TypeTags.BOOLEAN_TAG:
booleanValues = (boolean[]) newArrayInstance(Boolean.TYPE);
break;
case TypeTags.BYTE_TAG:
byteValues = (byte[]) newArrayInstance(Byte.TYPE);
break;
case TypeTags.XML_TAG:
refValues = (Object[]) newArrayInstance(Object.class);
break;
default:
refValues = (Object[]) newArrayInstance(Object.class);
}
}
public ArrayValue() {
refValues = (Object[]) newArrayInstance(Object.class);
}
public ArrayValue(BType type, long size) {
this.arrayType = type;
if (type.getTag() == TypeTags.ARRAY_TAG) {
elementType = ((BArrayType) type).getElementType();
if (size != -1) {
this.size = maxArraySize = (int) size;
}
initArrayValues(elementType);
} else if (type.getTag() == TypeTags.TUPLE_TAG) {
tupleRestType = ((BTupleType) type).getRestType();
if (size != -1) {
this.size = (int) size;
maxArraySize = (tupleRestType != null) ? maxArraySize : (int) size;
}
refValues = (Object[]) newArrayInstance(Object.class);
} else {
if (size != -1) {
this.size = maxArraySize = (int) size;
}
refValues = (Object[]) newArrayInstance(Object.class);
}
}
public Object getValue(long index) {
if (elementType != null) {
if (elementType.getTag() == TypeTags.INT_TAG) {
return getInt(index);
} else if (elementType.getTag() == TypeTags.BOOLEAN_TAG) {
return getBoolean(index);
} else if (elementType.getTag() == TypeTags.BYTE_TAG) {
return getByte(index);
} else if (elementType.getTag() == TypeTags.FLOAT_TAG) {
return getFloat(index);
} else if (elementType.getTag() == TypeTags.STRING_TAG) {
return getString(index);
} else {
return getRefValue(index);
}
}
return getRefValue(index);
}
public Object getRefValue(long index) {
rangeCheckForGet(index, size);
if (refValues == null) {
return getValue(index);
}
return refValues[(int) index];
}
public long getInt(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.INT_TAG) {
return intValues[(int) index];
} else {
return (Long) refValues[(int) index];
}
}
public boolean getBoolean(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.BOOLEAN_TAG) {
return booleanValues[(int) index];
} else {
return (Boolean) refValues[(int) index];
}
}
public byte getByte(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.BYTE_TAG) {
return byteValues[(int) index];
} else {
return (Byte) refValues[(int) index];
}
}
public double getFloat(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.FLOAT_TAG) {
return floatValues[(int) index];
} else {
return (Double) refValues[(int) index];
}
}
public String getString(long index) {
rangeCheckForGet(index, size);
if (elementType.getTag() == TypeTags.STRING_TAG) {
return stringValues[(int) index];
} else {
return (String) refValues[(int) index];
}
}
public Object get(long index) {
rangeCheckForGet(index, size);
switch (this.elementType.getTag()) {
case TypeTags.INT_TAG:
return intValues[(int) index];
case TypeTags.BOOLEAN_TAG:
return booleanValues[(int) index];
case TypeTags.BYTE_TAG:
return byteValues[(int) index];
case TypeTags.FLOAT_TAG:
return floatValues[(int) index];
case TypeTags.STRING_TAG:
return stringValues[(int) index];
default:
return refValues[(int) index];
}
}
public void add(long index, Object value) {
handleFrozenArrayValue();
prepareForAdd(index, refValues.length);
refValues[(int) index] = value;
}
public void add(long index, long value) {
handleFrozenArrayValue();
prepareForAdd(index, intValues.length);
intValues[(int) index] = value;
}
public void add(long index, boolean value) {
if (elementType.getTag() == TypeTags.INT_TAG) {
add(index, value);
return;
}
handleFrozenArrayValue();
prepareForAdd(index, booleanValues.length);
booleanValues[(int) index] = value;
}
public void add(long index, byte value) {
handleFrozenArrayValue();
prepareForAdd(index, byteValues.length);
byteValues[(int) index] = value;
}
public void add(long index, double value) {
handleFrozenArrayValue();
prepareForAdd(index, floatValues.length);
floatValues[(int) index] = value;
}
public void add(long index, String value) {
handleFrozenArrayValue();
prepareForAdd(index, stringValues.length);
stringValues[(int) index] = value;
}
public void append(Object value) {
add(size, value);
}
public Object shift(long index) {
handleFrozenArrayValue();
Object val = get(index);
shiftArray((int) index, getArrayFromType(elementType.getTag()));
return val;
}
private void shiftArray(int index, Object arr) {
int nElemsToBeMoved = this.size - 1 - index;
if (nElemsToBeMoved >= 0) {
System.arraycopy(arr, index + 1, arr, index, nElemsToBeMoved);
}
this.size--;
}
public void unshift(long index, ArrayValue vals) {
handleFrozenArrayValue();
unshiftArray(index, vals.size, getCurrentArrayLength());
switch (elementType.getTag()) {
case TypeTags.INT_TAG:
addToIntArray(vals, (int) index);
break;
case TypeTags.BOOLEAN_TAG:
addToBooleanArray(vals, (int) index);
break;
case TypeTags.BYTE_TAG:
addToByteArray(vals, (int) index);
break;
case TypeTags.FLOAT_TAG:
addToFloatArray(vals, (int) index);
break;
case TypeTags.STRING_TAG:
addToStringArray(vals, (int) index);
break;
default:
addToRefArray(vals, (int) index);
}
}
private void addToIntArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getInt(j));
}
}
private void addToFloatArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getFloat(j));
}
}
private void addToStringArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getString(j));
}
}
private void addToByteArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
byte[] bytes = vals.getBytes();
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
this.byteValues[i] = bytes[j];
}
}
private void addToBooleanArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getBoolean(j));
}
}
private void addToRefArray(ArrayValue vals, int startIndex) {
int endIndex = startIndex + vals.size;
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
add(i, vals.getRefValue(j));
}
}
private void unshiftArray(long index, int unshiftByN, int arrLength) {
int lastIndex = size() + unshiftByN - 1;
prepareForConsecutiveMultiAdd(lastIndex, arrLength);
Object arr = getArrayFromType(elementType.getTag());
if (index > lastIndex) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.INDEX_NUMBER_TOO_LARGE, index);
}
int i = (int) index;
System.arraycopy(arr, i, arr, i + unshiftByN, this.size - i);
}
private Object getArrayFromType(int typeTag) {
switch (typeTag) {
case TypeTags.INT_TAG:
return intValues;
case TypeTags.BOOLEAN_TAG:
return booleanValues;
case TypeTags.BYTE_TAG:
return byteValues;
case TypeTags.FLOAT_TAG:
return floatValues;
case TypeTags.STRING_TAG:
return stringValues;
default:
return refValues;
}
}
private int getCurrentArrayLength() {
switch (elementType.getTag()) {
case TypeTags.INT_TAG:
return intValues.length;
case TypeTags.BOOLEAN_TAG:
return booleanValues.length;
case TypeTags.BYTE_TAG:
return byteValues.length;
case TypeTags.FLOAT_TAG:
return floatValues.length;
case TypeTags.STRING_TAG:
return stringValues.length;
default:
return refValues.length;
}
}
@Override
public String stringValue() {
if (elementType != null) {
StringJoiner sj = new StringJoiner(" ");
if (elementType.getTag() == TypeTags.INT_TAG) {
for (int i = 0; i < size; i++) {
sj.add(Long.toString(intValues[i]));
}
return sj.toString();
} else if (elementType.getTag() == TypeTags.BOOLEAN_TAG) {
for (int i = 0; i < size; i++) {
sj.add(Boolean.toString(booleanValues[i]));
}
return sj.toString();
} else if (elementType.getTag() == TypeTags.BYTE_TAG) {
for (int i = 0; i < size; i++) {
sj.add(Long.toString(Byte.toUnsignedLong(byteValues[i])));
}
return sj.toString();
} else if (elementType.getTag() == TypeTags.FLOAT_TAG) {
for (int i = 0; i < size; i++) {
sj.add(Double.toString(floatValues[i]));
}
return sj.toString();
} else if (elementType.getTag() == TypeTags.STRING_TAG) {
for (int i = 0; i < size; i++) {
sj.add(stringValues[i]);
}
return sj.toString();
}
}
if (getElementType(arrayType).getTag() == TypeTags.JSON_TAG) {
return getJSONString();
}
StringJoiner sj;
if (arrayType != null && (arrayType.getTag() == TypeTags.TUPLE_TAG)) {
sj = new StringJoiner(" ");
} else {
sj = new StringJoiner(" ");
}
for (int i = 0; i < size; i++) {
if (refValues[i] != null) {
sj.add((refValues[i] instanceof RefValue) ? ((RefValue) refValues[i]).stringValue() :
(refValues[i] instanceof String) ? (String) refValues[i] : refValues[i].toString());
} else {
sj.add("()");
}
}
return sj.toString();
}
@Override
public BType getType() {
return arrayType;
}
@Override
public int size() {
return size;
}
public boolean isEmpty() {
return size == 0;
}
@Override
public void stamp(BType type, List<TypeValuePair> unresolvedValues) {
if (type.getTag() == TypeTags.TUPLE_TAG) {
if (elementType != null && isBasicType(elementType)) {
moveBasicTypeArrayToRefValueArray();
}
Object[] arrayValues = this.getValues();
for (int i = 0; i < this.size(); i++) {
if (arrayValues[i] instanceof RefValue) {
BType memberType = ((BTupleType) type).getTupleTypes().get(i);
if (memberType.getTag() == TypeTags.ANYDATA_TAG || memberType.getTag() == TypeTags.JSON_TAG) {
memberType = TypeConverter.resolveMatchingTypeForUnion(arrayValues[i], memberType);
((BTupleType) type).getTupleTypes().set(i, memberType);
}
((RefValue) arrayValues[i]).stamp(memberType, unresolvedValues);
}
}
} else if (type.getTag() == TypeTags.JSON_TAG) {
if (elementType != null && isBasicType(elementType) && !isBasicType(type)) {
moveBasicTypeArrayToRefValueArray();
this.arrayType = new BArrayType(type);
return;
}
Object[] arrayValues = this.getValues();
for (int i = 0; i < this.size(); i++) {
if (arrayValues[i] instanceof RefValue) {
((RefValue) arrayValues[i]).stamp(TypeConverter.resolveMatchingTypeForUnion(arrayValues[i], type),
unresolvedValues);
}
}
type = new BArrayType(type);
} else if (type.getTag() == TypeTags.UNION_TAG) {
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (TypeChecker.checkIsLikeType(this, memberType, new ArrayList<>())) {
this.stamp(memberType, unresolvedValues);
type = memberType;
break;
}
}
} else if (type.getTag() == TypeTags.ANYDATA_TAG) {
type = TypeConverter.resolveMatchingTypeForUnion(this, type);
this.stamp(type, unresolvedValues);
} else {
BType arrayElementType = ((BArrayType) type).getElementType();
if (elementType != null && isBasicType(elementType)) {
if (isBasicType(arrayElementType)) {
this.arrayType = type;
return;
}
moveBasicTypeArrayToRefValueArray();
this.arrayType = type;
return;
}
if (isBasicType(arrayElementType) &&
(arrayType.getTag() == TypeTags.TUPLE_TAG || !isBasicType(elementType))) {
moveRefValueArrayToBasicTypeArray(type, arrayElementType);
return;
}
Object[] arrayValues = this.getValues();
for (int i = 0; i < this.size(); i++) {
if (arrayValues[i] instanceof RefValue) {
((RefValue) arrayValues[i]).stamp(arrayElementType, unresolvedValues);
}
}
}
this.arrayType = type;
}
@Override
public Object copy(Map<Object, Object> refs) {
if (isFrozen()) {
return this;
}
if (refs.containsKey(this)) {
return refs.get(this);
}
if (elementType != null) {
ArrayValue valueArray = null;
if (elementType.getTag() == TypeTags.INT_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(intValues, intValues.length));
} else if (elementType.getTag() == TypeTags.BOOLEAN_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(booleanValues, booleanValues.length));
} else if (elementType.getTag() == TypeTags.BYTE_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(byteValues, byteValues.length));
} else if (elementType.getTag() == TypeTags.FLOAT_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(floatValues, floatValues.length));
} else if (elementType.getTag() == TypeTags.STRING_TAG) {
valueArray = new ArrayValue(Arrays.copyOf(stringValues, stringValues.length));
}
if (valueArray != null) {
valueArray.size = this.size;
refs.put(this, valueArray);
return valueArray;
}
}
Object[] values = new Object[size];
ArrayValue refValueArray = new ArrayValue(values, arrayType);
refValueArray.size = this.size;
refs.put(this, refValueArray);
int bound = this.size;
IntStream.range(0, bound).forEach(i -> {
Object value = this.refValues[i];
if (value instanceof RefValue) {
values[i] = ((RefValue) value).copy(refs);
} else {
values[i] = value;
}
});
return refValueArray;
}
@Override
public Object frozenCopy(Map<Object, Object> refs) {
ArrayValue copy = (ArrayValue) copy(refs);
if (!copy.isFrozen()) {
copy.freezeDirect();
}
return copy;
}
@Override
public String toString() {
return stringValue();
}
public Object[] getValues() {
return refValues;
}
public byte[] getBytes() {
byte[] bytes = new byte[this.size];
System.arraycopy(byteValues, 0, bytes, 0, this.size);
return bytes;
}
public String[] getStringArray() {
return Arrays.copyOf(stringValues, size);
}
public long[] getLongArray() {
return Arrays.copyOf(intValues, size);
}
@Override
public void serialize(OutputStream outputStream) {
if (elementType.getTag() == TypeTags.BYTE_TAG) {
try {
outputStream.write(byteValues);
} catch (IOException e) {
throw new BallerinaException("error occurred while writing the binary content to the output stream", e);
}
} else {
try {
outputStream.write(this.toString().getBytes(Charset.defaultCharset()));
} catch (IOException e) {
throw new BallerinaException("error occurred while serializing data", e);
}
}
}
public void resizeInternalArray(int newLength) {
if (arrayType.getTag() == TypeTags.TUPLE_TAG) {
refValues = Arrays.copyOf(refValues, newLength);
} else {
if (elementType != null) {
switch (elementType.getTag()) {
case TypeTags.INT_TAG:
intValues = Arrays.copyOf(intValues, newLength);
break;
case TypeTags.BOOLEAN_TAG:
booleanValues = Arrays.copyOf(booleanValues, newLength);
break;
case TypeTags.BYTE_TAG:
byteValues = Arrays.copyOf(byteValues, newLength);
break;
case TypeTags.FLOAT_TAG:
floatValues = Arrays.copyOf(floatValues, newLength);
break;
case TypeTags.STRING_TAG:
stringValues = Arrays.copyOf(stringValues, newLength);
break;
default:
refValues = Arrays.copyOf(refValues, newLength);
break;
}
} else {
refValues = Arrays.copyOf(refValues, newLength);
}
}
}
private void fillValues(int index) {
if (index <= size) {
return;
}
if (arrayType.getTag() == TypeTags.TUPLE_TAG) {
if (tupleRestType != null) {
Arrays.fill(refValues, size, index, tupleRestType.getZeroValue());
}
} else {
int typeTag = elementType.getTag();
if (typeTag == TypeTags.STRING_TAG) {
Arrays.fill(stringValues, size, index, BLangConstants.STRING_EMPTY_VALUE);
return;
}
if (typeTag == TypeTags.INT_TAG || typeTag == TypeTags.BYTE_TAG || typeTag == TypeTags.FLOAT_TAG ||
typeTag == TypeTags.BOOLEAN_TAG) {
return;
}
Arrays.fill(refValues, size, index, elementType.getZeroValue());
}
}
public BType getArrayType() {
return arrayType;
}
private void rangeCheckForGet(long index, int size) {
rangeCheck(index, size);
if (index < 0 || index >= size) {
if (arrayType != null && arrayType.getTag() == TypeTags.TUPLE_TAG) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.TUPLE_INDEX_OUT_OF_RANGE, index, size);
}
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.ARRAY_INDEX_OUT_OF_RANGE, index, size);
}
}
private void rangeCheck(long index, int size) {
if (index > Integer.MAX_VALUE || index < Integer.MIN_VALUE) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.INDEX_NUMBER_TOO_LARGE, index);
}
if (arrayType != null && arrayType.getTag() == TypeTags.TUPLE_TAG) {
if ((((BTupleType) arrayType).getRestType() == null && index >= maxArraySize) || (int) index < 0) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.TUPLE_INDEX_OUT_OF_RANGE, index, size);
}
} else {
if ((int) index < 0 || index >= maxArraySize) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.INDEX_OUT_OF_RANGE_ERROR,
RuntimeErrors.ARRAY_INDEX_OUT_OF_RANGE, index, size);
}
}
}
private void fillerValueCheck(int index, int size) {
if (arrayType != null && arrayType.getTag() == TypeTags.TUPLE_TAG) {
if (!TypeChecker.hasFillerValue(tupleRestType) && (index > size)) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.ILLEGAL_LIST_INSERTION_ERROR,
RuntimeErrors.ILLEGAL_TUPLE_INSERTION, size, index + 1);
}
} else {
if (!TypeChecker.hasFillerValue(elementType) && (index > size)) {
throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.ILLEGAL_LIST_INSERTION_ERROR,
RuntimeErrors.ILLEGAL_ARRAY_INSERTION, size, index + 1);
}
}
}
Object newArrayInstance(Class<?> componentType) {
return (size > 0) ?
Array.newInstance(componentType, size) : Array.newInstance(componentType, DEFAULT_ARRAY_SIZE);
}
private void setArrayElementType(BType type) {
this.arrayType = new BArrayType(type);
this.elementType = type;
}
public String getJSONString() {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
JSONGenerator gen = new JSONGenerator(byteOut);
try {
gen.serialize(this);
gen.flush();
} catch (IOException e) {
throw new BallerinaException("Error in converting JSON to a string: " + e.getMessage(), e);
}
return new String(byteOut.toByteArray());
}
private BType getElementType(BType type) {
if (type.getTag() != TypeTags.ARRAY_TAG) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
/**
* Util method to handle frozen array values.
*/
private void handleFrozenArrayValue() {
synchronized (this) {
try {
if (this.freezeStatus.getState() != State.UNFROZEN) {
FreezeUtils.handleInvalidUpdate(freezeStatus.getState());
}
} catch (BLangFreezeException e) {
throw BallerinaErrors.createError(e.getMessage(), e.getDetail());
}
}
}
protected void prepareForAdd(long index, int currentArraySize) {
int intIndex = (int) index;
rangeCheck(index, size);
fillerValueCheck(intIndex, size);
ensureCapacity(intIndex + 1, currentArraySize);
fillValues(intIndex);
resetSize(intIndex);
}
/**
* Same as {@code prepareForAdd}, except fillerValueCheck is not performed as we are guaranteed to add
* elements to consecutive positions.
*
* @param index last index after add operation completes
* @param currentArraySize current array size
*/
void prepareForConsecutiveMultiAdd(long index, int currentArraySize) {
int intIndex = (int) index;
rangeCheck(index, size);
ensureCapacity(intIndex + 1, currentArraySize);
resetSize(intIndex);
}
private void ensureCapacity(int requestedCapacity, int currentArraySize) {
if ((requestedCapacity) - currentArraySize > 0) {
if ((this.arrayType.getTag() == TypeTags.ARRAY_TAG
&& ((BArrayType) this.arrayType).getState() == ArrayState.UNSEALED)
|| this.arrayType.getTag() == TypeTags.TUPLE_TAG) {
int newArraySize = currentArraySize + (currentArraySize >> 1);
newArraySize = Math.max(newArraySize, requestedCapacity);
newArraySize = Math.min(newArraySize, maxArraySize);
resizeInternalArray(newArraySize);
}
}
}
private void resetSize(int index) {
if (index >= size) {
size = index + 1;
}
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void attemptFreeze(Status freezeStatus) {
if (!FreezeUtils.isOpenForFreeze(this.freezeStatus, freezeStatus)) {
return;
}
this.freezeStatus = freezeStatus;
if (elementType == null || elementType.getTag() > TypeTags.BOOLEAN_TAG) {
for (int i = 0; i < this.size; i++) {
Object value = this.getRefValue(i);
if (value instanceof RefValue) {
((RefValue) value).attemptFreeze(freezeStatus);
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void freezeDirect() {
if (isFrozen()) {
return;
}
this.freezeStatus.setFrozen();
if (elementType == null || elementType.getTag() > TypeTags.BOOLEAN_TAG) {
for (int i = 0; i < this.size; i++) {
Object value = this.getRefValue(i);
if (value instanceof RefValue) {
((RefValue) value).freezeDirect();
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public synchronized boolean isFrozen() {
return this.freezeStatus.isFrozen();
}
private boolean isBasicType(BType type) {
return type.getTag() <= TypeTags.BOOLEAN_TAG && type.getTag() != TypeTags.DECIMAL_TAG;
}
private void moveBasicTypeArrayToRefValueArray() {
refValues = new Object[this.size];
if (elementType == BTypes.typeBoolean) {
for (int i = 0; i < this.size(); i++) {
refValues[i] = booleanValues[i];
}
booleanValues = null;
}
if (elementType == BTypes.typeInt) {
for (int i = 0; i < this.size(); i++) {
refValues[i] = intValues[i];
}
intValues = null;
}
if (elementType == BTypes.typeString) {
System.arraycopy(stringValues, 0, refValues, 0, this.size());
stringValues = null;
}
if (elementType == BTypes.typeFloat) {
for (int i = 0; i < this.size(); i++) {
refValues[i] = floatValues[i];
}
floatValues = null;
}
if (elementType == BTypes.typeByte) {
for (int i = 0; i < this.size(); i++) {
refValues[i] = (byteValues[i]);
}
byteValues = null;
}
elementType = null;
}
private void moveRefValueArrayToBasicTypeArray(BType type, BType arrayElementType) {
Object[] arrayValues = this.getValues();
if (arrayElementType.getTag() == TypeTags.INT_TAG) {
intValues = (long[]) newArrayInstance(Long.TYPE);
for (int i = 0; i < this.size(); i++) {
intValues[i] = ((long) arrayValues[i]);
}
}
if (arrayElementType.getTag() == TypeTags.FLOAT_TAG) {
floatValues = (double[]) newArrayInstance(Double.TYPE);
for (int i = 0; i < this.size(); i++) {
floatValues[i] = ((float) arrayValues[i]);
}
}
if (arrayElementType.getTag() == TypeTags.BOOLEAN_TAG) {
booleanValues = new boolean[this.size()];
for (int i = 0; i < this.size(); i++) {
booleanValues[i] = ((boolean) arrayValues[i]);
}
}
if (arrayElementType.getTag() == TypeTags.STRING_TAG) {
stringValues = (String[]) newArrayInstance(String.class);
for (int i = 0; i < this.size(); i++) {
stringValues[i] = (String) arrayValues[i];
}
}
if (arrayElementType.getTag() == TypeTags.BYTE_TAG) {
byteValues = (byte[]) newArrayInstance(Byte.TYPE);
for (int i = 0; i < this.size(); i++) {
byteValues[i] = (byte) arrayValues[i];
}
}
this.elementType = arrayElementType;
this.arrayType = type;
refValues = null;
}
@Override
public IteratorValue getIterator() {
return new ArrayIterator(this);
}
public void setLength(long length) {
if (length == size) {
return;
}
handleFrozenArrayValue();
int newLength = (int) length;
checkFixedLength(length);
rangeCheck(length, size);
fillerValueCheck(newLength, size);
resizeInternalArray(newLength);
fillValues(newLength);
size = newLength;
}
/**
* {@code {@link ArrayIterator}} provides iterator implementation for Ballerina array values.
*
* @since 0.995.0
*/
static class ArrayIterator implements IteratorValue {
ArrayValue array;
long cursor = 0;
long length;
ArrayIterator(ArrayValue value) {
this.array = value;
this.length = value.size();
}
@Override
public Object next() {
long cursor = this.cursor++;
if (cursor == length) {
return null;
}
return array.getValue(cursor);
}
@Override
public boolean hasNext() {
return cursor < length;
}
}
}
|
If this throws InterruptedException then locked will be true but the lock will not have been acquired, and the finally block will attempt to unlock a lock it does not hold.
|
private Object writeLock(Lock lock, InvocationContext ctx) throws Exception {
boolean locked = true;
long time = lock.time();
try {
if (time > 0) {
locked = readWriteLock.writeLock().tryLock(time, lock.unit());
if (!locked) {
throw new LockException("Write lock not acquired in " + lock.unit().toMillis(time) + " ms");
}
} else {
readWriteLock.writeLock().lock();
}
return ctx.proceed();
} finally {
if (locked) {
readWriteLock.writeLock().unlock();
}
}
}
|
locked = readWriteLock.writeLock().tryLock(time, lock.unit());
|
private Object writeLock(Lock lock, InvocationContext ctx) throws Exception {
boolean locked = false;
long time = lock.time();
try {
if (time > 0) {
locked = readWriteLock.writeLock().tryLock(time, lock.unit());
if (!locked) {
throw new LockException("Write lock not acquired in " + lock.unit().toMillis(time) + " ms");
}
} else {
readWriteLock.writeLock().lock();
locked = true;
}
return ctx.proceed();
} finally {
if (locked) {
readWriteLock.writeLock().unlock();
}
}
}
|
class LockInterceptor {
private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
@AroundInvoke
Object lock(InvocationContext ctx) throws Exception {
Lock lock = getLock(ctx);
switch (lock.value()) {
case WRITE:
return writeLock(lock, ctx);
case READ:
return readLock(lock, ctx);
case NONE:
return ctx.proceed();
}
throw new LockException("Unsupported @Lock type found on business method " + ctx.getMethod());
}
private Object readLock(Lock lock, InvocationContext ctx) throws Exception {
boolean locked = true;
long time = lock.time();
try {
if (time > 0) {
locked = readWriteLock.readLock().tryLock(time, lock.unit());
if (!locked) {
throw new LockException("Read lock not acquired in " + lock.unit().toMillis(time) + " ms");
}
} else {
readWriteLock.readLock().lock();
}
return ctx.proceed();
} finally {
if (locked) {
readWriteLock.readLock().unlock();
}
}
}
@SuppressWarnings("unchecked")
Lock getLock(InvocationContext ctx) {
Set<Annotation> bindings = (Set<Annotation>) ctx.getContextData().get(ArcInvocationContext.KEY_INTERCEPTOR_BINDINGS);
for (Annotation annotation : bindings) {
if (annotation.annotationType().equals(Lock.class)) {
return (Lock) annotation;
}
}
throw new LockException("@Lock binding not found on business method " + ctx.getMethod());
}
}
|
class LockInterceptor {
private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
@AroundInvoke
Object lock(InvocationContext ctx) throws Exception {
Lock lock = getLock(ctx);
switch (lock.value()) {
case WRITE:
return writeLock(lock, ctx);
case READ:
return readLock(lock, ctx);
case NONE:
return ctx.proceed();
}
throw new LockException("Unsupported @Lock type found on business method " + ctx.getMethod());
}
private Object readLock(Lock lock, InvocationContext ctx) throws Exception {
boolean locked = false;
long time = lock.time();
try {
if (time > 0) {
locked = readWriteLock.readLock().tryLock(time, lock.unit());
if (!locked) {
throw new LockException("Read lock not acquired in " + lock.unit().toMillis(time) + " ms");
}
} else {
readWriteLock.readLock().lock();
locked = true;
}
return ctx.proceed();
} finally {
if (locked) {
readWriteLock.readLock().unlock();
}
}
}
@SuppressWarnings("unchecked")
Lock getLock(InvocationContext ctx) {
Set<Annotation> bindings = (Set<Annotation>) ctx.getContextData().get(ArcInvocationContext.KEY_INTERCEPTOR_BINDINGS);
for (Annotation annotation : bindings) {
if (annotation.annotationType().equals(Lock.class)) {
return (Lock) annotation;
}
}
throw new LockException("@Lock binding not found on business method " + ctx.getMethod());
}
}
|
Just a formatting thing: I think it would be more readable to have both lambda statements on new lines.
|
public PagedFlux<KeyBase> listKeys() {
return new PagedFlux<>(() -> listKeysFirstPage(),
continuationToken -> listKeysNextPage(continuationToken));
}
|
continuationToken -> listKeysNextPage(continuationToken));
|
public PagedFlux<KeyBase> listKeys() {
return new PagedFlux<>(() ->
listKeysFirstPage(),
continuationToken -> listKeysNextPage(continuationToken));
}
|
class KeyAsyncClient {
static final String API_VERSION = "7.0";
static final String ACCEPT_LANGUAGE = "en-US";
static final int DEFAULT_MAX_PAGE_RESULTS = 25;
static final String CONTENT_TYPE_HEADER_VALUE = "application/json";
static final String KEY_VAULT_SCOPE = "https:
private final String endpoint;
private final KeyService service;
private final ClientLogger logger = new ClientLogger(KeyAsyncClient.class);
/**
* Creates a KeyAsyncClient that uses {@code pipeline} to service requests
*
* @param endpoint URL for the Azure KeyVault service.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
*/
KeyAsyncClient(URL endpoint, HttpPipeline pipeline) {
Objects.requireNonNull(endpoint, KeyVaultErrorCodeStrings.getErrorString(KeyVaultErrorCodeStrings.VAULT_END_POINT_REQUIRED));
this.endpoint = endpoint.toString();
this.service = RestProxy.create(KeyService.class, pipeline);
}
/**
* Creates a new key and stores it in the key vault. The create key operation can be used to create any key type in
* key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission.
*
* <p>The {@link KeyType keyType} indicates the type of key to create. Possible values include: {@link KeyType
* {@link KeyType
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a new EC key. Subscribes to the call asynchronously and prints out the newly created key details when a response has been received.</p>
* <pre>
* keyAsyncClient.createKey("keyName", KeyType.EC).subscribe(keyResponse ->
* System.out.printf("Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param name The name of the key being created.
* @param keyType The type of key to create. For valid values, see {@link KeyType KeyType}.
* @throws ResourceModifiedException if {@code name} or {@code keyType} is null.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> createKey(String name, KeyType keyType) {
KeyRequestParameters parameters = new KeyRequestParameters().kty(keyType);
return service.createKey(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Creating key - {}", name))
.doOnSuccess(response -> logger.info("Created key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to create key - {}", name, error));
}
/**
* Creates a new key and stores it in the key vault. The create key operation can be used to create any key type in
* key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission.
*
* <p>The {@link KeyCreateOptions} is required. The {@link KeyCreateOptions
* are optional. The {@link KeyCreateOptions
*
* <p>The {@link KeyCreateOptions
* {@link KeyType
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a new Rsa key which activates in one day and expires in one year. Subscribes to the call asynchronously
* and prints out the newly created key details when a response has been received.</p>
* <pre>
* KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA)
* .notBefore(OffsetDateTime.now().plusDays(1))
* .expires(OffsetDateTime.now().plusYears(1));
*
* keyAsyncClient.createKey(keyCreateOptions).subscribe(keyResponse ->
* System.out.printf("Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param keyCreateOptions The key configuration object containing information about the key being created.
* @throws NullPointerException if {@code keyCreateOptions} is {@code null}.
* @throws ResourceModifiedException if {@code keyCreateOptions} is malformed.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> createKey(KeyCreateOptions keyCreateOptions) {
Objects.requireNonNull(keyCreateOptions, "The key options parameter cannot be null.");
KeyRequestParameters parameters = new KeyRequestParameters()
.kty(keyCreateOptions.keyType())
.keyOps(keyCreateOptions.keyOperations())
.keyAttributes(new KeyRequestAttributes(keyCreateOptions));
return service.createKey(endpoint, keyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Creating key - {}", keyCreateOptions.name()))
.doOnSuccess(response -> logger.info("Created key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to create key - {}", keyCreateOptions.name(), error));
}
/**
* Creates a new Rsa key and stores it in the key vault. The create Rsa key operation can be used to create any Rsa key type in
* key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission.
*
* <p>The {@link RsaKeyCreateOptions} is required. The {@link RsaKeyCreateOptions
* and {@link RsaKeyCreateOptions
* is set to true by Azure Key Vault, if not specified.</p>
*
* <p>The {@link RsaKeyCreateOptions
* {@link KeyType
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a new RSA key with size 2048 which activates in one day and expires in one year. Subscribes to the call asynchronously
* and prints out the newly created key details when a response has been received.</p>
* <pre>
* RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName", KeyType.RSA)
* .keySize(2048)
* .notBefore(OffsetDateTime.now().plusDays(1))
* .expires(OffsetDateTime.now().plusYears(1));
*
* keyAsyncClient.createRsaKey(rsaKeyCreateOptions).subscribe(keyResponse ->
* System.out.printf("RSA Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param rsaKeyCreateOptions The key configuration object containing information about the rsa key being created.
* @throws NullPointerException if {@code rsaKeyCreateOptions} is {@code null}.
* @throws ResourceModifiedException if {@code rsaKeyCreateOptions} is malformed.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> createRsaKey(RsaKeyCreateOptions rsaKeyCreateOptions) {
Objects.requireNonNull(rsaKeyCreateOptions, "The Rsa key options parameter cannot be null.");
KeyRequestParameters parameters = new KeyRequestParameters()
.kty(rsaKeyCreateOptions.keyType())
.keySize(rsaKeyCreateOptions.keySize())
.keyOps(rsaKeyCreateOptions.keyOperations())
.keyAttributes(new KeyRequestAttributes(rsaKeyCreateOptions));
return service.createKey(endpoint, rsaKeyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Creating Rsa key - {}", rsaKeyCreateOptions.name()))
.doOnSuccess(response -> logger.info("Created Rsa key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to create Rsa key - {}", rsaKeyCreateOptions.name(), error));
}
/**
* Creates a new Ec key and stores it in the key vault. The create Ec key operation can be used to create any Ec key type in
* key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission.
*
* <p>The {@link EcKeyCreateOptions} parameter is required. The {@link EcKeyCreateOptions
* default value of {@link KeyCurveName
* are optional. The {@link EcKeyCreateOptions
*
* <p>The {@link EcKeyCreateOptions
* {@link KeyType
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a new EC key with P-384 web key curve. The key activates in one day and expires in one year. Subscribes to the call asynchronously
* and prints out the newly created ec key details when a response has been received.</p>
* <pre>
* EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName", KeyType.EC)
* .curve(KeyCurveName.P_384)
* .notBefore(OffsetDateTime.now().plusDays(1))
* .expires(OffsetDateTime.now().plusYears(1));
*
* keyAsyncClient.createEcKey(ecKeyCreateOptions).subscribe(keyResponse ->
* System.out.printf("EC Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param ecKeyCreateOptions The key options object containing information about the ec key being created.
* @throws NullPointerException if {@code ecKeyCreateOptions} is {@code null}.
* @throws ResourceModifiedException if {@code ecKeyCreateOptions} is malformed.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> createEcKey(EcKeyCreateOptions ecKeyCreateOptions) {
Objects.requireNonNull(ecKeyCreateOptions, "The Ec key options options cannot be null.");
KeyRequestParameters parameters = new KeyRequestParameters()
.kty(ecKeyCreateOptions.keyType())
.curve(ecKeyCreateOptions.curve())
.keyOps(ecKeyCreateOptions.keyOperations())
.keyAttributes(new KeyRequestAttributes(ecKeyCreateOptions));
return service.createKey(endpoint, ecKeyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Creating Ec key - {}", ecKeyCreateOptions.name()))
.doOnSuccess(response -> logger.info("Created Ec key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to create Ec key - {}", ecKeyCreateOptions.name(), error));
}
/**
* Imports an externally created key and stores it in key vault. The import key operation may be used to import any key type
* into the Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. This operation requires the {@code keys/import} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Imports a new key into key vault. Subscribes to the call asynchronously and prints out the newly imported key details
* when a response has been received.</p>
* <pre>
* keyAsyncClient.importKey("keyName", jsonWebKeyToImport).subscribe(keyResponse ->
* System.out.printf("Key is imported with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param name The name for the imported key.
* @param keyMaterial The Json web key being imported.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> importKey(String name, JsonWebKey keyMaterial) {
KeyImportRequestParameters parameters = new KeyImportRequestParameters().key(keyMaterial);
return service.importKey(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Importing key - {}", name))
.doOnSuccess(response -> logger.info("Imported key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to import key - {}", name, error));
}
/**
* Imports an externally created key and stores it in key vault. The import key operation may be used to import any key type
* into the Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. This operation requires the {@code keys/import} permission.
*
* <p>The {@code keyImportOptions} is required and its fields {@link KeyImportOptions
* be null. The {@link KeyImportOptions
* are optional. If not specified, no values are set for the fields. The {@link KeyImportOptions
* the {@link KeyImportOptions
*
* <p><strong>Code Samples</strong></p>
* <p>Imports a new key into key vault. Subscribes to the call asynchronously and prints out the newly imported key details
* when a response has been received.</p>
* <pre>
* KeyImportOptions keyImportOptions = new KeyImportOptions("keyName", jsonWebKeyToImport)
* .hsm(true)
* .expires(OffsetDateTime.now().plusDays(60));
*
* keyAsyncClient.importKey(keyImportOptions).subscribe(keyResponse ->
* System.out.printf("Key is imported with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param keyImportOptions The key import configuration object containing information about the json web key being imported.
* @throws NullPointerException if {@code keyImportOptions} is {@code null}.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> importKey(KeyImportOptions keyImportOptions) {
Objects.requireNonNull(keyImportOptions, "The key import configuration parameter cannot be null.");
KeyImportRequestParameters parameters = new KeyImportRequestParameters()
.key(keyImportOptions.keyMaterial())
.hsm(keyImportOptions.hsm())
.keyAttributes(new KeyRequestAttributes(keyImportOptions));
return service.importKey(endpoint, keyImportOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Importing key - {}", keyImportOptions.name()))
.doOnSuccess(response -> logger.info("Imported key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to import key - {}", keyImportOptions.name(), error));
}
/**
* Gets the public part of the specified key and key version. The get key operation is applicable to all key types and it requires the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets a specific version of the key in the key vault. Subscribes to the call asynchronously and prints out the
* returned key details when a response has been received.</p>
* <pre>
* String keyVersion = "6A385B124DEF4096AF1361A85B16C204";
* keyAsyncClient.getKey("keyName", keyVersion).subscribe(keyResponse ->
* System.out.printf("Key returned with name %s, id %s and version %s", keyResponse.value().name(),
* keyResponse.value().id(), keyResponse.value().version()));
* </pre>
*
* @param name The name of the key, cannot be null
* @param version The version of the key to retrieve. If this is an empty String or null, this call is equivalent to calling {@link KeyAsyncClient
* @throws ResourceNotFoundException when a key with {@code name} and {@code version} doesn't exist in the key vault.
* @throws HttpRequestException if {@code name} or {@code version} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> getKey(String name, String version) {
String keyVersion = "";
if (version != null) {
keyVersion = version;
}
return service.getKey(endpoint, name, keyVersion, API_VERSION, ACCEPT_LANGUAGE, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Retrieving key - {}", name))
.doOnSuccess(response -> logger.info("Retrieved key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to get key - {}", name, error));
}
/**
* Get the public part of the latest version of the specified key from the key vault. The get key operation is applicable to
* all key types and it requires the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets latest version of the key in the key vault. Subscribes to the call asynchronously and prints out the
* returned key details when a response has been received.</p>
* <pre>
* keyAsyncClient.getKey("keyName").subscribe(keyResponse ->
* System.out.printf("Key with name %s, id %s \n", keyResponse.value().name(),
* keyResponse.value().id()));
* </pre>
*
* @param name The name of the key.
* @throws ResourceNotFoundException when a key with {@code name} doesn't exist in the key vault.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> getKey(String name) {
return getKey(name, "")
.doOnRequest(ignored -> logger.info("Retrieving key - {}", name))
.doOnSuccess(response -> logger.info("Retrieved key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to get key - {}", name, error));
}
/**
* Get public part of the key which represents {@link KeyBase keyBase} from the key vault. The get key operation is applicable
* to all key types and it requires the {@code keys/get} permission.
*
* <p>The list operations {@link KeyAsyncClient
* the {@link Flux} containing {@link KeyBase base key} as output excluding the key material of the key.
* This operation can then be used to get the full key with its key material from {@code keyBase}.</p>
* <pre>
* keyAsyncClient.listKeys().subscribe(keyBase ->
* client.getKey(keyBase).subscribe(keyResponse ->
* System.out.printf("Key with name %s and value %s \n", keyResponse.value().name(), keyResponse.value().id())));
* </pre>
*
* @param keyBase The {@link KeyBase base key} holding attributes of the key being requested.
* @throws ResourceNotFoundException when a key with {@link KeyBase
* @throws HttpRequestException if {@link KeyBase
|
class KeyAsyncClient {
static final String API_VERSION = "7.0";
static final String ACCEPT_LANGUAGE = "en-US";
static final int DEFAULT_MAX_PAGE_RESULTS = 25;
static final String CONTENT_TYPE_HEADER_VALUE = "application/json";
static final String KEY_VAULT_SCOPE = "https:
private final String endpoint;
private final KeyService service;
private final ClientLogger logger = new ClientLogger(KeyAsyncClient.class);
/**
* Creates a KeyAsyncClient that uses {@code pipeline} to service requests
*
* @param endpoint URL for the Azure KeyVault service.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
*/
KeyAsyncClient(URL endpoint, HttpPipeline pipeline) {
Objects.requireNonNull(endpoint, KeyVaultErrorCodeStrings.getErrorString(KeyVaultErrorCodeStrings.VAULT_END_POINT_REQUIRED));
this.endpoint = endpoint.toString();
this.service = RestProxy.create(KeyService.class, pipeline);
}
/**
* Creates a new key and stores it in the key vault. The create key operation can be used to create any key type in
* key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission.
*
* <p>The {@link KeyType keyType} indicates the type of key to create. Possible values include: {@link KeyType
* {@link KeyType
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a new EC key. Subscribes to the call asynchronously and prints out the newly created key details when a response has been received.</p>
* <pre>
* keyAsyncClient.createKey("keyName", KeyType.EC).subscribe(keyResponse ->
* System.out.printf("Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param name The name of the key being created.
* @param keyType The type of key to create. For valid values, see {@link KeyType KeyType}.
* @throws ResourceModifiedException if {@code name} or {@code keyType} is null.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> createKey(String name, KeyType keyType) {
KeyRequestParameters parameters = new KeyRequestParameters().kty(keyType);
return service.createKey(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Creating key - {}", name))
.doOnSuccess(response -> logger.info("Created key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to create key - {}", name, error));
}
/**
* Creates a new key and stores it in the key vault. The create key operation can be used to create any key type in
* key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission.
*
* <p>The {@link KeyCreateOptions} is required. The {@link KeyCreateOptions
* are optional. The {@link KeyCreateOptions
*
* <p>The {@link KeyCreateOptions
* {@link KeyType
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a new Rsa key which activates in one day and expires in one year. Subscribes to the call asynchronously
* and prints out the newly created key details when a response has been received.</p>
* <pre>
* KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA)
* .notBefore(OffsetDateTime.now().plusDays(1))
* .expires(OffsetDateTime.now().plusYears(1));
*
* keyAsyncClient.createKey(keyCreateOptions).subscribe(keyResponse ->
* System.out.printf("Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param keyCreateOptions The key configuration object containing information about the key being created.
* @throws NullPointerException if {@code keyCreateOptions} is {@code null}.
* @throws ResourceModifiedException if {@code keyCreateOptions} is malformed.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> createKey(KeyCreateOptions keyCreateOptions) {
Objects.requireNonNull(keyCreateOptions, "The key options parameter cannot be null.");
KeyRequestParameters parameters = new KeyRequestParameters()
.kty(keyCreateOptions.keyType())
.keyOps(keyCreateOptions.keyOperations())
.keyAttributes(new KeyRequestAttributes(keyCreateOptions));
return service.createKey(endpoint, keyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Creating key - {}", keyCreateOptions.name()))
.doOnSuccess(response -> logger.info("Created key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to create key - {}", keyCreateOptions.name(), error));
}
/**
* Creates a new Rsa key and stores it in the key vault. The create Rsa key operation can be used to create any Rsa key type in
* key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission.
*
* <p>The {@link RsaKeyCreateOptions} is required. The {@link RsaKeyCreateOptions
* and {@link RsaKeyCreateOptions
* is set to true by Azure Key Vault, if not specified.</p>
*
* <p>The {@link RsaKeyCreateOptions
* {@link KeyType
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a new RSA key with size 2048 which activates in one day and expires in one year. Subscribes to the call asynchronously
* and prints out the newly created key details when a response has been received.</p>
* <pre>
* RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName", KeyType.RSA)
* .keySize(2048)
* .notBefore(OffsetDateTime.now().plusDays(1))
* .expires(OffsetDateTime.now().plusYears(1));
*
* keyAsyncClient.createRsaKey(rsaKeyCreateOptions).subscribe(keyResponse ->
* System.out.printf("RSA Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param rsaKeyCreateOptions The key configuration object containing information about the rsa key being created.
* @throws NullPointerException if {@code rsaKeyCreateOptions} is {@code null}.
* @throws ResourceModifiedException if {@code rsaKeyCreateOptions} is malformed.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> createRsaKey(RsaKeyCreateOptions rsaKeyCreateOptions) {
Objects.requireNonNull(rsaKeyCreateOptions, "The Rsa key options parameter cannot be null.");
KeyRequestParameters parameters = new KeyRequestParameters()
.kty(rsaKeyCreateOptions.keyType())
.keySize(rsaKeyCreateOptions.keySize())
.keyOps(rsaKeyCreateOptions.keyOperations())
.keyAttributes(new KeyRequestAttributes(rsaKeyCreateOptions));
return service.createKey(endpoint, rsaKeyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Creating Rsa key - {}", rsaKeyCreateOptions.name()))
.doOnSuccess(response -> logger.info("Created Rsa key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to create Rsa key - {}", rsaKeyCreateOptions.name(), error));
}
/**
* Creates a new Ec key and stores it in the key vault. The create Ec key operation can be used to create any Ec key type in
* key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission.
*
* <p>The {@link EcKeyCreateOptions} parameter is required. The {@link EcKeyCreateOptions
* default value of {@link KeyCurveName
* are optional. The {@link EcKeyCreateOptions
*
* <p>The {@link EcKeyCreateOptions
* {@link KeyType
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a new EC key with P-384 web key curve. The key activates in one day and expires in one year. Subscribes to the call asynchronously
* and prints out the newly created ec key details when a response has been received.</p>
* <pre>
* EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName", KeyType.EC)
* .curve(KeyCurveName.P_384)
* .notBefore(OffsetDateTime.now().plusDays(1))
* .expires(OffsetDateTime.now().plusYears(1));
*
* keyAsyncClient.createEcKey(ecKeyCreateOptions).subscribe(keyResponse ->
* System.out.printf("EC Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param ecKeyCreateOptions The key options object containing information about the ec key being created.
* @throws NullPointerException if {@code ecKeyCreateOptions} is {@code null}.
* @throws ResourceModifiedException if {@code ecKeyCreateOptions} is malformed.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> createEcKey(EcKeyCreateOptions ecKeyCreateOptions) {
Objects.requireNonNull(ecKeyCreateOptions, "The Ec key options options cannot be null.");
KeyRequestParameters parameters = new KeyRequestParameters()
.kty(ecKeyCreateOptions.keyType())
.curve(ecKeyCreateOptions.curve())
.keyOps(ecKeyCreateOptions.keyOperations())
.keyAttributes(new KeyRequestAttributes(ecKeyCreateOptions));
return service.createKey(endpoint, ecKeyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Creating Ec key - {}", ecKeyCreateOptions.name()))
.doOnSuccess(response -> logger.info("Created Ec key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to create Ec key - {}", ecKeyCreateOptions.name(), error));
}
/**
* Imports an externally created key and stores it in key vault. The import key operation may be used to import any key type
* into the Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. This operation requires the {@code keys/import} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Imports a new key into key vault. Subscribes to the call asynchronously and prints out the newly imported key details
* when a response has been received.</p>
* <pre>
* keyAsyncClient.importKey("keyName", jsonWebKeyToImport).subscribe(keyResponse ->
* System.out.printf("Key is imported with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param name The name for the imported key.
* @param keyMaterial The Json web key being imported.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> importKey(String name, JsonWebKey keyMaterial) {
KeyImportRequestParameters parameters = new KeyImportRequestParameters().key(keyMaterial);
return service.importKey(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Importing key - {}", name))
.doOnSuccess(response -> logger.info("Imported key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to import key - {}", name, error));
}
/**
* Imports an externally created key and stores it in key vault. The import key operation may be used to import any key type
* into the Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. This operation requires the {@code keys/import} permission.
*
* <p>The {@code keyImportOptions} is required and its fields {@link KeyImportOptions
* be null. The {@link KeyImportOptions
* are optional. If not specified, no values are set for the fields. The {@link KeyImportOptions
* the {@link KeyImportOptions
*
* <p><strong>Code Samples</strong></p>
* <p>Imports a new key into key vault. Subscribes to the call asynchronously and prints out the newly imported key details
* when a response has been received.</p>
* <pre>
* KeyImportOptions keyImportOptions = new KeyImportOptions("keyName", jsonWebKeyToImport)
* .hsm(true)
* .expires(OffsetDateTime.now().plusDays(60));
*
* keyAsyncClient.importKey(keyImportOptions).subscribe(keyResponse ->
* System.out.printf("Key is imported with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()));
* </pre>
*
* @param keyImportOptions The key import configuration object containing information about the json web key being imported.
* @throws NullPointerException if {@code keyImportOptions} is {@code null}.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> importKey(KeyImportOptions keyImportOptions) {
Objects.requireNonNull(keyImportOptions, "The key import configuration parameter cannot be null.");
KeyImportRequestParameters parameters = new KeyImportRequestParameters()
.key(keyImportOptions.keyMaterial())
.hsm(keyImportOptions.hsm())
.keyAttributes(new KeyRequestAttributes(keyImportOptions));
return service.importKey(endpoint, keyImportOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Importing key - {}", keyImportOptions.name()))
.doOnSuccess(response -> logger.info("Imported key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to import key - {}", keyImportOptions.name(), error));
}
/**
* Gets the public part of the specified key and key version. The get key operation is applicable to all key types and it requires the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets a specific version of the key in the key vault. Subscribes to the call asynchronously and prints out the
* returned key details when a response has been received.</p>
* <pre>
* String keyVersion = "6A385B124DEF4096AF1361A85B16C204";
* keyAsyncClient.getKey("keyName", keyVersion).subscribe(keyResponse ->
* System.out.printf("Key returned with name %s, id %s and version %s", keyResponse.value().name(),
* keyResponse.value().id(), keyResponse.value().version()));
* </pre>
*
* @param name The name of the key, cannot be null
* @param version The version of the key to retrieve. If this is an empty String or null, this call is equivalent to calling {@link KeyAsyncClient
* @throws ResourceNotFoundException when a key with {@code name} and {@code version} doesn't exist in the key vault.
* @throws HttpRequestException if {@code name} or {@code version} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> getKey(String name, String version) {
String keyVersion = "";
if (version != null) {
keyVersion = version;
}
return service.getKey(endpoint, name, keyVersion, API_VERSION, ACCEPT_LANGUAGE, CONTENT_TYPE_HEADER_VALUE)
.doOnRequest(ignored -> logger.info("Retrieving key - {}", name))
.doOnSuccess(response -> logger.info("Retrieved key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to get key - {}", name, error));
}
/**
* Get the public part of the latest version of the specified key from the key vault. The get key operation is applicable to
* all key types and it requires the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets latest version of the key in the key vault. Subscribes to the call asynchronously and prints out the
* returned key details when a response has been received.</p>
* <pre>
* keyAsyncClient.getKey("keyName").subscribe(keyResponse ->
* System.out.printf("Key with name %s, id %s \n", keyResponse.value().name(),
* keyResponse.value().id()));
* </pre>
*
* @param name The name of the key.
* @throws ResourceNotFoundException when a key with {@code name} doesn't exist in the key vault.
* @throws HttpRequestException if {@code name} is empty string.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
*/
public Mono<Response<Key>> getKey(String name) {
return getKey(name, "")
.doOnRequest(ignored -> logger.info("Retrieving key - {}", name))
.doOnSuccess(response -> logger.info("Retrieved key - {}", response.value().name()))
.doOnError(error -> logger.warning("Failed to get key - {}", name, error));
}
/**
* Get public part of the key which represents {@link KeyBase keyBase} from the key vault. The get key operation is applicable
* to all key types and it requires the {@code keys/get} permission.
*
* <p>The list operations {@link KeyAsyncClient
* the {@link Flux} containing {@link KeyBase base key} as output excluding the key material of the key.
* This operation can then be used to get the full key with its key material from {@code keyBase}.</p>
* <pre>
* keyAsyncClient.listKeys().subscribe(keyBase ->
* client.getKey(keyBase).subscribe(keyResponse ->
* System.out.printf("Key with name %s and value %s \n", keyResponse.value().name(), keyResponse.value().id())));
* </pre>
*
* @param keyBase The {@link KeyBase base key} holding attributes of the key being requested.
* @throws ResourceNotFoundException when a key with {@link KeyBase
* @throws HttpRequestException if {@link KeyBase
|
for checkstyle: `preTransactionType != null` => `null != preTransactionType`
|
public Object invoke(final MethodInvocation methodInvocation) throws Throwable {
ShardingTransactionType shardingTransactionType = getAnnotation(methodInvocation);
Objects.requireNonNull(shardingTransactionType, "could not found sharding transaction type annotation");
TransactionType preTransactionType = TransactionTypeHolder.get();
TransactionTypeHolder.set(shardingTransactionType.value());
try {
return methodInvocation.proceed();
} finally {
TransactionTypeHolder.clear();
if (preTransactionType != null) {
TransactionTypeHolder.set(preTransactionType);
}
}
}
|
if (preTransactionType != null) {
|
public Object invoke(final MethodInvocation methodInvocation) throws Throwable {
ShardingTransactionType shardingTransactionType = getAnnotation(methodInvocation);
Objects.requireNonNull(shardingTransactionType, "could not found sharding transaction type annotation");
TransactionType preTransactionType = TransactionTypeHolder.get();
TransactionTypeHolder.set(shardingTransactionType.value());
try {
return methodInvocation.proceed();
} finally {
TransactionTypeHolder.clear();
if (null != preTransactionType) {
TransactionTypeHolder.set(preTransactionType);
}
}
}
|
class ShardingTransactionTypeInterceptor implements MethodInterceptor {
@Override
private ShardingTransactionType getAnnotation(final MethodInvocation invocation) {
Objects.requireNonNull(invocation.getThis());
Class<?> targetClass = AopUtils.getTargetClass(invocation.getThis());
ShardingTransactionType result = getMethodAnnotation(invocation, targetClass);
return null != result ? result : targetClass.getAnnotation(ShardingTransactionType.class);
}
private ShardingTransactionType getMethodAnnotation(final MethodInvocation invocation, final Class<?> targetClass) {
Method specificMethod = ClassUtils.getMostSpecificMethod(invocation.getMethod(), targetClass);
final Method userDeclaredMethod = BridgeMethodResolver.findBridgedMethod(specificMethod);
return userDeclaredMethod.getAnnotation(ShardingTransactionType.class);
}
}
|
class ShardingTransactionTypeInterceptor implements MethodInterceptor {
@Override
private ShardingTransactionType getAnnotation(final MethodInvocation invocation) {
Objects.requireNonNull(invocation.getThis());
Class<?> targetClass = AopUtils.getTargetClass(invocation.getThis());
ShardingTransactionType result = getMethodAnnotation(invocation, targetClass);
return null != result ? result : targetClass.getAnnotation(ShardingTransactionType.class);
}
private ShardingTransactionType getMethodAnnotation(final MethodInvocation invocation, final Class<?> targetClass) {
Method specificMethod = ClassUtils.getMostSpecificMethod(invocation.getMethod(), targetClass);
final Method userDeclaredMethod = BridgeMethodResolver.findBridgedMethod(specificMethod);
return userDeclaredMethod.getAnnotation(ShardingTransactionType.class);
}
}
|
I think you could just use phase instead of getPhase() here
|
public void onNext(T value) {
synchronized (lock) {
if (++numMessages >= maxMessagesBeforeCheck) {
numMessages = 0;
int waitTime = 1;
int totalTimeWaited = 0;
int phase = phaser.getPhase();
int initialPhase = phase;
while (!outboundObserver.isReady()) {
try {
phase = phaser.awaitAdvanceInterruptibly(phase, waitTime, TimeUnit.SECONDS);
} catch (TimeoutException e) {
totalTimeWaited += waitTime;
waitTime = waitTime * 2;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
if (totalTimeWaited > 0) {
if (initialPhase == phaser.getPhase()) {
LOG.info(
"Output channel stalled for {}s, outbound thread {}. See: "
+ "https:
+ "this issue.",
totalTimeWaited,
Thread.currentThread().getName());
} else {
LOG.debug(
"Output channel stalled for {}s, outbound thread {}.",
totalTimeWaited,
Thread.currentThread().getName());
}
}
}
outboundObserver.onNext(value);
}
}
|
if (initialPhase == phaser.getPhase()) {
|
public void onNext(T value) {
synchronized (lock) {
if (++numMessages >= maxMessagesBeforeCheck) {
numMessages = 0;
int waitTime = 1;
int totalTimeWaited = 0;
int phase = phaser.getPhase();
int initialPhase = phase;
while (!outboundObserver.isReady()) {
try {
phase = phaser.awaitAdvanceInterruptibly(phase, waitTime, TimeUnit.SECONDS);
} catch (TimeoutException e) {
totalTimeWaited += waitTime;
waitTime = waitTime * 2;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
if (totalTimeWaited > 0) {
if (initialPhase == phase) {
LOG.info(
"Output channel stalled for {}s, outbound thread {}. See: "
+ "https:
+ "this issue.",
totalTimeWaited,
Thread.currentThread().getName());
} else {
LOG.debug(
"Output channel stalled for {}s, outbound thread {}.",
totalTimeWaited,
Thread.currentThread().getName());
}
}
}
outboundObserver.onNext(value);
}
}
|
class DirectStreamObserver<T> implements StreamObserver<T> {
private static final Logger LOG = LoggerFactory.getLogger(DirectStreamObserver.class);
private static final int DEFAULT_MAX_MESSAGES_BEFORE_CHECK = 100;
private final Phaser phaser;
private final CallStreamObserver<T> outboundObserver;
/**
* Controls the number of messages that will be sent before isReady is invoked for the following
* message. For example, maxMessagesBeforeCheck = 0, would mean to check isReady for each message
* while maxMessagesBeforeCheck = 10, would mean that you are willing to send 10 messages and then
* check isReady before the 11th message is sent.
*/
private final int maxMessagesBeforeCheck;
private final Object lock = new Object();
private int numMessages = -1;
public DirectStreamObserver(Phaser phaser, CallStreamObserver<T> outboundObserver) {
this(phaser, outboundObserver, DEFAULT_MAX_MESSAGES_BEFORE_CHECK);
}
DirectStreamObserver(
Phaser phaser, CallStreamObserver<T> outboundObserver, int maxMessagesBeforeCheck) {
this.phaser = phaser;
this.outboundObserver = outboundObserver;
this.maxMessagesBeforeCheck = maxMessagesBeforeCheck;
}
@Override
@Override
public void onError(Throwable t) {
synchronized (lock) {
outboundObserver.onError(t);
}
}
@Override
public void onCompleted() {
synchronized (lock) {
outboundObserver.onCompleted();
}
}
}
|
class DirectStreamObserver<T> implements StreamObserver<T> {
private static final Logger LOG = LoggerFactory.getLogger(DirectStreamObserver.class);
private static final int DEFAULT_MAX_MESSAGES_BEFORE_CHECK = 100;
private final Phaser phaser;
private final CallStreamObserver<T> outboundObserver;
/**
* Controls the number of messages that will be sent before isReady is invoked for the following
* message. For example, maxMessagesBeforeCheck = 0, would mean to check isReady for each message
* while maxMessagesBeforeCheck = 10, would mean that you are willing to send 10 messages and then
* check isReady before the 11th message is sent.
*/
private final int maxMessagesBeforeCheck;
private final Object lock = new Object();
private int numMessages = -1;
public DirectStreamObserver(Phaser phaser, CallStreamObserver<T> outboundObserver) {
this(phaser, outboundObserver, DEFAULT_MAX_MESSAGES_BEFORE_CHECK);
}
DirectStreamObserver(
Phaser phaser, CallStreamObserver<T> outboundObserver, int maxMessagesBeforeCheck) {
this.phaser = phaser;
this.outboundObserver = outboundObserver;
this.maxMessagesBeforeCheck = maxMessagesBeforeCheck;
}
@Override
@Override
public void onError(Throwable t) {
synchronized (lock) {
outboundObserver.onError(t);
}
}
@Override
public void onCompleted() {
synchronized (lock) {
outboundObserver.onCompleted();
}
}
}
|
ops! yes ;) I've used [guava](https://guava.dev/releases/19.0/api/docs/com/google/common/io/Closer.html) nomenclature
|
public void close() throws RocksDBException {
try {
if (batch.count() != 0) {
flush();
}
} finally {
IOUtils.closeQuietly(batch);
if (ownsWriteOptions) {
IOUtils.closeQuietly(options);
}
}
}
|
}
|
public void close() throws RocksDBException {
try {
if (batch.count() != 0) {
flush();
}
} finally {
IOUtils.closeAllQuietly(toClose);
}
}
|
class RocksDBWriteBatchWrapper implements AutoCloseable {
private static final int MIN_CAPACITY = 100;
private static final int MAX_CAPACITY = 1000;
private static final int PER_RECORD_BYTES = 100;
private static final long DEFAULT_BATCH_SIZE = 0;
private final RocksDB db;
private final WriteBatch batch;
private final WriteOptions options;
/** True if this instance created and has ownership of the WriteOptions and must close it. */
private final boolean ownsWriteOptions;
private final int capacity;
@Nonnegative private final long batchSize;
public RocksDBWriteBatchWrapper(@Nonnull RocksDB rocksDB, long writeBatchSize) {
this(rocksDB, null, 500, writeBatchSize);
}
public RocksDBWriteBatchWrapper(@Nonnull RocksDB rocksDB, @Nullable WriteOptions options) {
this(rocksDB, options, 500, DEFAULT_BATCH_SIZE);
}
public RocksDBWriteBatchWrapper(
@Nonnull RocksDB rocksDB, @Nullable WriteOptions options, long batchSize) {
this(rocksDB, options, 500, batchSize);
}
public RocksDBWriteBatchWrapper(
@Nonnull RocksDB rocksDB,
@Nullable WriteOptions options,
int capacity,
long batchSize) {
Preconditions.checkArgument(
capacity >= MIN_CAPACITY && capacity <= MAX_CAPACITY,
"capacity should be between " + MIN_CAPACITY + " and " + MAX_CAPACITY);
Preconditions.checkArgument(batchSize >= 0, "Max batch size have to be no negative.");
this.db = rocksDB;
if (options != null) {
this.options = options;
this.ownsWriteOptions = false;
} else {
this.options = new WriteOptions().setDisableWAL(true);
this.ownsWriteOptions = true;
}
this.capacity = capacity;
this.batchSize = batchSize;
if (this.batchSize > 0) {
this.batch =
new WriteBatch(
(int) Math.min(this.batchSize, this.capacity * PER_RECORD_BYTES));
} else {
this.batch = new WriteBatch(this.capacity * PER_RECORD_BYTES);
}
}
public void put(@Nonnull ColumnFamilyHandle handle, @Nonnull byte[] key, @Nonnull byte[] value)
throws RocksDBException {
batch.put(handle, key, value);
flushIfNeeded();
}
public void remove(@Nonnull ColumnFamilyHandle handle, @Nonnull byte[] key)
throws RocksDBException {
batch.delete(handle, key);
flushIfNeeded();
}
public void flush() throws RocksDBException {
db.write(options, batch);
batch.clear();
}
public WriteOptions getOptions() {
return options;
}
@Override
private void flushIfNeeded() throws RocksDBException {
boolean needFlush =
batch.count() == capacity || (batchSize > 0 && getDataSize() >= batchSize);
if (needFlush) {
flush();
}
}
@VisibleForTesting
long getDataSize() {
return batch.getDataSize();
}
}
|
class RocksDBWriteBatchWrapper implements AutoCloseable {
private static final int MIN_CAPACITY = 100;
private static final int MAX_CAPACITY = 1000;
private static final int PER_RECORD_BYTES = 100;
private static final long DEFAULT_BATCH_SIZE = 0;
private final RocksDB db;
private final WriteBatch batch;
private final WriteOptions options;
private final int capacity;
@Nonnegative private final long batchSize;
/** List of all objects that we need to close in close(). */
private final List<AutoCloseable> toClose;
public RocksDBWriteBatchWrapper(@Nonnull RocksDB rocksDB, long writeBatchSize) {
this(rocksDB, null, 500, writeBatchSize);
}
public RocksDBWriteBatchWrapper(@Nonnull RocksDB rocksDB, @Nullable WriteOptions options) {
this(rocksDB, options, 500, DEFAULT_BATCH_SIZE);
}
public RocksDBWriteBatchWrapper(
@Nonnull RocksDB rocksDB, @Nullable WriteOptions options, long batchSize) {
this(rocksDB, options, 500, batchSize);
}
public RocksDBWriteBatchWrapper(
@Nonnull RocksDB rocksDB,
@Nullable WriteOptions options,
int capacity,
long batchSize) {
Preconditions.checkArgument(
capacity >= MIN_CAPACITY && capacity <= MAX_CAPACITY,
"capacity should be between " + MIN_CAPACITY + " and " + MAX_CAPACITY);
Preconditions.checkArgument(batchSize >= 0, "Max batch size have to be no negative.");
this.db = rocksDB;
this.capacity = capacity;
this.batchSize = batchSize;
this.toClose = new ArrayList<>(2);
if (this.batchSize > 0) {
this.batch =
new WriteBatch(
(int) Math.min(this.batchSize, this.capacity * PER_RECORD_BYTES));
} else {
this.batch = new WriteBatch(this.capacity * PER_RECORD_BYTES);
}
this.toClose.add(this.batch);
if (options != null) {
this.options = options;
} else {
this.options = new WriteOptions().setDisableWAL(true);
this.toClose.add(this.options);
}
}
public void put(@Nonnull ColumnFamilyHandle handle, @Nonnull byte[] key, @Nonnull byte[] value)
throws RocksDBException {
batch.put(handle, key, value);
flushIfNeeded();
}
public void remove(@Nonnull ColumnFamilyHandle handle, @Nonnull byte[] key)
throws RocksDBException {
batch.delete(handle, key);
flushIfNeeded();
}
public void flush() throws RocksDBException {
db.write(options, batch);
batch.clear();
}
@VisibleForTesting
WriteOptions getOptions() {
return options;
}
@Override
private void flushIfNeeded() throws RocksDBException {
boolean needFlush =
batch.count() == capacity || (batchSize > 0 && getDataSize() >= batchSize);
if (needFlush) {
flush();
}
}
@VisibleForTesting
long getDataSize() {
return batch.getDataSize();
}
}
|
We should also check for `isNullLiteral`?
|
private void setArguments(CallContext callContext) {
DataType[] inputTypes = callContext.getArgumentDataTypes().toArray(new DataType[0]);
Object[] constantArgs = new Object[inputTypes.length];
for (int i = 0; i < constantArgs.length; i++) {
if (callContext.isArgumentLiteral(i)) {
constantArgs[i] = callContext.getArgumentValue(
i, ClassLogicalTypeConverter.getDefaultExternalClassForType(inputTypes[i].getLogicalType()))
.orElse(null);
}
}
this.constantArguments = constantArgs;
this.argTypes = inputTypes;
}
|
if (callContext.isArgumentLiteral(i)) {
|
private void setArguments(CallContext callContext) {
DataType[] inputTypes = callContext.getArgumentDataTypes().toArray(new DataType[0]);
Object[] constantArgs = new Object[inputTypes.length];
for (int i = 0; i < constantArgs.length; i++) {
if (callContext.isArgumentLiteral(i)) {
constantArgs[i] = callContext.getArgumentValue(
i, ClassLogicalTypeConverter.getDefaultExternalClassForType(inputTypes[i].getLogicalType()))
.orElse(null);
}
}
this.constantArguments = constantArgs;
this.argTypes = inputTypes;
}
|
class HiveScalarFunction<UDFType> extends ScalarFunction {
protected final HiveFunctionWrapper<UDFType> hiveFunctionWrapper;
protected Object[] constantArguments;
protected DataType[] argTypes;
protected transient UDFType function;
protected transient ObjectInspector returnInspector;
private transient boolean isArgsSingleArray;
HiveScalarFunction(HiveFunctionWrapper<UDFType> hiveFunctionWrapper) {
this.hiveFunctionWrapper = hiveFunctionWrapper;
}
@Override
public boolean isDeterministic() {
try {
org.apache.hadoop.hive.ql.udf.UDFType udfType =
hiveFunctionWrapper.getUDFClass()
.getAnnotation(org.apache.hadoop.hive.ql.udf.UDFType.class);
return udfType != null && udfType.deterministic() && !udfType.stateful();
} catch (ClassNotFoundException e) {
throw new FlinkHiveUDFException(e);
}
}
@Override
public void open(FunctionContext context) {
openInternal();
isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes);
}
@Override
public TypeInference getTypeInference(DataTypeFactory typeFactory) {
TypeInference.Builder builder = TypeInference.newBuilder();
builder.inputTypeStrategy(new HiveUDFInputStrategy());
builder.outputTypeStrategy(new HiveUDFOutputStrategy());
return builder.build();
}
/**
* See {@link ScalarFunction
*/
protected abstract void openInternal();
public Object eval(Object... args) {
if (isArgsSingleArray) {
args = new Object[] {args};
}
return evalInternal(args);
}
/**
* Evaluation logical, args will be wrapped when is a single array.
*/
protected abstract Object evalInternal(Object[] args);
/**
* Infer return type of this function call.
*/
protected abstract DataType inferReturnType() throws UDFArgumentException;
private class HiveUDFOutputStrategy implements TypeStrategy {
@Override
public Optional<DataType> inferType(CallContext callContext) {
setArguments(callContext);
try {
return Optional.of(inferReturnType());
} catch (UDFArgumentException e) {
throw new FlinkHiveUDFException(e);
}
}
}
private class HiveUDFInputStrategy implements InputTypeStrategy {
@Override
public ArgumentCount getArgumentCount() {
return ConstantArgumentCount.any();
}
@Override
public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) {
setArguments(callContext);
try {
inferReturnType();
} catch (UDFArgumentException e) {
if (throwOnFailure) {
throw new ValidationException(
String.format("Cannot find a suitable Hive function from %s for the input arguments",
hiveFunctionWrapper.getClassName()), e);
} else {
return Optional.empty();
}
}
return Optional.of(callContext.getArgumentDataTypes());
}
@Override
public List<Signature> getExpectedSignatures(FunctionDefinition definition) {
return Collections.singletonList(Signature.of(Signature.Argument.of("*")));
}
}
}
|
class HiveScalarFunction<UDFType> extends ScalarFunction {
protected final HiveFunctionWrapper<UDFType> hiveFunctionWrapper;
protected Object[] constantArguments;
protected DataType[] argTypes;
protected transient UDFType function;
protected transient ObjectInspector returnInspector;
private transient boolean isArgsSingleArray;
HiveScalarFunction(HiveFunctionWrapper<UDFType> hiveFunctionWrapper) {
this.hiveFunctionWrapper = hiveFunctionWrapper;
}
@Override
public boolean isDeterministic() {
try {
org.apache.hadoop.hive.ql.udf.UDFType udfType =
hiveFunctionWrapper.getUDFClass()
.getAnnotation(org.apache.hadoop.hive.ql.udf.UDFType.class);
return udfType != null && udfType.deterministic() && !udfType.stateful();
} catch (ClassNotFoundException e) {
throw new FlinkHiveUDFException(e);
}
}
@Override
public void open(FunctionContext context) {
openInternal();
isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes);
}
@Override
public TypeInference getTypeInference(DataTypeFactory typeFactory) {
TypeInference.Builder builder = TypeInference.newBuilder();
builder.inputTypeStrategy(new HiveUDFInputStrategy());
builder.outputTypeStrategy(new HiveUDFOutputStrategy());
return builder.build();
}
/**
* See {@link ScalarFunction
*/
protected abstract void openInternal();
public Object eval(Object... args) {
if (isArgsSingleArray) {
args = new Object[] {args};
}
return evalInternal(args);
}
/**
* Evaluation logical, args will be wrapped when is a single array.
*/
protected abstract Object evalInternal(Object[] args);
/**
* Infer return type of this function call.
*/
protected abstract DataType inferReturnType() throws UDFArgumentException;
private class HiveUDFOutputStrategy implements TypeStrategy {
@Override
public Optional<DataType> inferType(CallContext callContext) {
setArguments(callContext);
try {
return Optional.of(inferReturnType());
} catch (UDFArgumentException e) {
throw new FlinkHiveUDFException(e);
}
}
}
private class HiveUDFInputStrategy implements InputTypeStrategy {
@Override
public ArgumentCount getArgumentCount() {
return ConstantArgumentCount.any();
}
@Override
public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) {
setArguments(callContext);
try {
inferReturnType();
} catch (UDFArgumentException e) {
if (throwOnFailure) {
throw new ValidationException(
String.format("Cannot find a suitable Hive function from %s for the input arguments",
hiveFunctionWrapper.getClassName()), e);
} else {
return Optional.empty();
}
}
return Optional.of(callContext.getArgumentDataTypes());
}
@Override
public List<Signature> getExpectedSignatures(FunctionDefinition definition) {
return Collections.singletonList(Signature.of(Signature.Argument.of("*")));
}
}
}
|
Let's stay with the option 2 (as you have currently implemented)
|
public BufferAndBacklog getNextBuffer() throws IOException {
if (isReleased) {
return null;
}
Buffer current = dataReader.nextBuffer();
if (current == null) {
return null;
}
updateStatistics(current);
Buffer.DataType nextDataType = Buffer.DataType.NONE;
if (numDataBuffers > 0) {
nextDataType = Buffer.DataType.DATA_BUFFER;
} else if (numDataAndEventBuffers > 0) {
nextDataType = Buffer.DataType.EVENT_BUFFER;
}
return BufferAndBacklog.fromBufferAndLookahead(
current, nextDataType, numDataBuffers, sequenceNumber++);
}
|
}
|
public BufferAndBacklog getNextBuffer() throws IOException {
if (isReleased) {
return null;
}
Buffer current = dataReader.nextBuffer();
if (current == null) {
return null;
}
updateStatistics(current);
Buffer.DataType nextDataType = Buffer.DataType.NONE;
if (numDataBuffers > 0) {
nextDataType = Buffer.DataType.DATA_BUFFER;
} else if (numDataAndEventBuffers > 0) {
nextDataType = Buffer.DataType.EVENT_BUFFER;
}
return BufferAndBacklog.fromBufferAndLookahead(
current, nextDataType, numDataBuffers, sequenceNumber++);
}
|
class BoundedBlockingSubpartitionDirectTransferReader implements ResultSubpartitionView {
/** The result subpartition that we read. */
private final BoundedBlockingSubpartition parent;
/** The reader/decoder to the file region with the data we currently read from. */
private final BoundedData.Reader dataReader;
/** The remaining number of data buffers (not events) in the result. */
private int numDataBuffers;
/** The remaining number of data buffers and events in the result. */
private int numDataAndEventBuffers;
/** Flag whether this reader is released. */
private boolean isReleased;
private int sequenceNumber;
BoundedBlockingSubpartitionDirectTransferReader(
BoundedBlockingSubpartition parent,
Path filePath,
int numDataBuffers,
int numDataAndEventBuffers)
throws IOException {
checkArgument(numDataAndEventBuffers - numDataBuffers == 1, "Too many event buffers.");
this.parent = checkNotNull(parent);
checkNotNull(filePath);
this.dataReader = new FileRegionReader(filePath);
checkArgument(numDataBuffers >= 0);
this.numDataBuffers = numDataBuffers;
checkArgument(numDataAndEventBuffers >= 0);
this.numDataAndEventBuffers = numDataAndEventBuffers;
}
@Nullable
@Override
private void updateStatistics(Buffer buffer) {
if (buffer.isBuffer()) {
numDataBuffers--;
}
numDataAndEventBuffers--;
}
@Override
public boolean isAvailable(int numCreditsAvailable) {
return (numCreditsAvailable > 0 || numDataBuffers == 0) && numDataAndEventBuffers > 0;
}
@Override
public int getRemainingBacklog() {
return numDataBuffers;
}
@Override
public void releaseAllResources() throws IOException {
isReleased = true;
IOUtils.closeQuietly(dataReader);
parent.releaseReaderReference(this);
}
@Override
public boolean isReleased() {
return isReleased;
}
@Override
public Throwable getFailureCause() {
return null;
}
@Override
public int unsynchronizedGetNumberOfQueuedBuffers() {
return parent.unsynchronizedGetNumberOfQueuedBuffers();
}
@Override
public void notifyDataAvailable() {
throw new UnsupportedOperationException("Method should never be called.");
}
@Override
public void resumeConsumption() {
throw new UnsupportedOperationException("Method should never be called.");
}
@Override
public String toString() {
return String.format(
"Blocking Subpartition Reader: ID=%s, index=%d",
parent.parent.getPartitionId(), parent.getSubPartitionIndex());
}
/**
* The reader to read from {@link BoundedBlockingSubpartition} and return the wrapped {@link
* org.apache.flink.shaded.netty4.io.netty.channel.FileRegion} based buffer.
*/
static final class FileRegionReader implements BoundedData.Reader {
private final FileChannel fileChannel;
private final ByteBuffer headerBuffer;
FileRegionReader(Path filePath) throws IOException {
this.fileChannel = FileChannel.open(filePath, StandardOpenOption.READ);
this.headerBuffer = BufferReaderWriterUtil.allocatedHeaderBuffer();
}
@Nullable
@Override
public Buffer nextBuffer() throws IOException {
return BufferReaderWriterUtil.readFileRegionFromByteChannel(fileChannel, headerBuffer);
}
@Override
public void close() throws IOException {
fileChannel.close();
}
}
}
|
class BoundedBlockingSubpartitionDirectTransferReader implements ResultSubpartitionView {
/** The result subpartition that we read. */
private final BoundedBlockingSubpartition parent;
/** The reader/decoder to the file region with the data we currently read from. */
private final BoundedData.Reader dataReader;
/** The remaining number of data buffers (not events) in the result. */
private int numDataBuffers;
/** The remaining number of data buffers and events in the result. */
private int numDataAndEventBuffers;
/** Flag whether this reader is released. */
private boolean isReleased;
private int sequenceNumber;
BoundedBlockingSubpartitionDirectTransferReader(
BoundedBlockingSubpartition parent,
Path filePath,
int numDataBuffers,
int numDataAndEventBuffers)
throws IOException {
checkArgument(numDataAndEventBuffers - numDataBuffers == 1, "Too many event buffers.");
this.parent = checkNotNull(parent);
checkNotNull(filePath);
this.dataReader = new FileRegionReader(filePath);
checkArgument(numDataBuffers >= 0);
this.numDataBuffers = numDataBuffers;
checkArgument(numDataAndEventBuffers >= 0);
this.numDataAndEventBuffers = numDataAndEventBuffers;
}
@Nullable
@Override
private void updateStatistics(Buffer buffer) {
if (buffer.isBuffer()) {
numDataBuffers--;
}
numDataAndEventBuffers--;
}
@Override
public AvailabilityWithBacklog getAvailabilityAndBacklog(int numCreditsAvailable) {
return new AvailabilityWithBacklog(
(numCreditsAvailable > 0 || numDataBuffers == 0) && numDataAndEventBuffers > 0,
numDataBuffers);
}
@Override
public void releaseAllResources() throws IOException {
isReleased = true;
IOUtils.closeQuietly(dataReader);
parent.releaseReaderReference(this);
}
@Override
public boolean isReleased() {
return isReleased;
}
@Override
public Throwable getFailureCause() {
return null;
}
@Override
public int unsynchronizedGetNumberOfQueuedBuffers() {
return parent.unsynchronizedGetNumberOfQueuedBuffers();
}
@Override
public void notifyDataAvailable() {
throw new UnsupportedOperationException("Method should never be called.");
}
@Override
public void resumeConsumption() {
throw new UnsupportedOperationException("Method should never be called.");
}
@Override
public void acknowledgeAllRecordsProcessed() {
throw new UnsupportedOperationException("Method should never be called.");
}
@Override
public String toString() {
return String.format(
"Blocking Subpartition Reader: ID=%s, index=%d",
parent.parent.getPartitionId(), parent.getSubPartitionIndex());
}
/**
* The reader to read from {@link BoundedBlockingSubpartition} and return the wrapped {@link
* org.apache.flink.shaded.netty4.io.netty.channel.FileRegion} based buffer.
*/
static final class FileRegionReader implements BoundedData.Reader {
private final FileChannel fileChannel;
private final ByteBuffer headerBuffer;
FileRegionReader(Path filePath) throws IOException {
this.fileChannel = FileChannel.open(filePath, StandardOpenOption.READ);
this.headerBuffer = BufferReaderWriterUtil.allocatedHeaderBuffer();
}
@Nullable
@Override
public Buffer nextBuffer() throws IOException {
return BufferReaderWriterUtil.readFileRegionFromByteChannel(fileChannel, headerBuffer);
}
@Override
public void close() throws IOException {
fileChannel.close();
}
}
}
|
same as above (if I understand your suggestion correctly)
|
private static String getTargetOrNullOldSemconv(Attributes attributes, int defaultPort) {
String peerService = attributes.get(SemanticAttributes.PEER_SERVICE);
if (peerService != null) {
return peerService;
}
String host = attributes.get(SemanticAttributes.NET_PEER_NAME);
if (host != null) {
Long port = attributes.get(SemanticAttributes.NET_PEER_PORT);
return getTarget(host, port, defaultPort);
}
host = attributes.get(SemanticAttributes.NET_SOCK_PEER_NAME);
if (host == null) {
host = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);
}
if (host != null) {
Long port = attributes.get(SemanticAttributes.NET_SOCK_PEER_PORT);
return getTarget(host, port, defaultPort);
}
String httpUrl = attributes.get(SemanticAttributes.HTTP_URL);
if (httpUrl != null) {
return UrlParser.getTarget(httpUrl);
}
return null;
}
|
Long port = attributes.get(SemanticAttributes.NET_SOCK_PEER_PORT);
|
private static String getTargetOrNullOldSemconv(Attributes attributes, int defaultPort) {
String peerService = attributes.get(SemanticAttributes.PEER_SERVICE);
if (peerService != null) {
return peerService;
}
String host = attributes.get(SemanticAttributes.NET_PEER_NAME);
if (host != null) {
Long port = attributes.get(SemanticAttributes.NET_PEER_PORT);
return getTarget(host, port, defaultPort);
}
host = attributes.get(SemanticAttributes.NET_SOCK_PEER_NAME);
if (host == null) {
host = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);
}
if (host != null) {
Long port = attributes.get(SemanticAttributes.NET_SOCK_PEER_PORT);
return getTarget(host, port, defaultPort);
}
String httpUrl = attributes.get(SemanticAttributes.HTTP_URL);
if (httpUrl != null) {
return UrlParser.getTarget(httpUrl);
}
return null;
}
|
class SpanDataMapper {
public static final String MS_PROCESSED_BY_METRIC_EXTRACTORS = "_MS.ProcessedByMetricExtractors";
private static final Set<String> SQL_DB_SYSTEMS =
new HashSet<>(
asList(
SemanticAttributes.DbSystemValues.DB2,
SemanticAttributes.DbSystemValues.DERBY,
SemanticAttributes.DbSystemValues.MARIADB,
SemanticAttributes.DbSystemValues.MSSQL,
SemanticAttributes.DbSystemValues.MYSQL,
SemanticAttributes.DbSystemValues.ORACLE,
SemanticAttributes.DbSystemValues.POSTGRESQL,
SemanticAttributes.DbSystemValues.SQLITE,
SemanticAttributes.DbSystemValues.OTHER_SQL,
SemanticAttributes.DbSystemValues.HSQLDB,
SemanticAttributes.DbSystemValues.H2));
private static final String COSMOS = "Cosmos";
private static final Mappings MAPPINGS;
private static final ContextTagKeys AI_DEVICE_OS = ContextTagKeys.fromString("ai.device.os");
static {
MappingsBuilder mappingsBuilder =
new MappingsBuilder()
.ignoreExact(AiSemanticAttributes.AZURE_SDK_NAMESPACE.getKey())
.ignoreExact(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION.getKey())
.ignoreExact(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME.getKey())
.ignoreExact(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS.getKey())
.ignoreExact(AiSemanticAttributes.KAFKA_OFFSET.getKey())
.exact(
SemanticAttributes.USER_AGENT_ORIGINAL.getKey(),
(builder, value) -> {
if (value instanceof String) {
builder.addTag("ai.user.userAgent", (String) value);
}
})
.ignorePrefix("applicationinsights.internal.")
.prefix(
"http.request.header.",
(telemetryBuilder, key, value) -> {
if (value instanceof List) {
telemetryBuilder.addProperty(key, Mappings.join((List<?>) value));
}
})
.prefix(
"http.response.header.",
(telemetryBuilder, key, value) -> {
if (value instanceof List) {
telemetryBuilder.addProperty(key, Mappings.join((List<?>) value));
}
});
applyCommonTags(mappingsBuilder);
MAPPINGS = mappingsBuilder.build();
}
private final boolean captureHttpServer4xxAsError;
private final BiConsumer<AbstractTelemetryBuilder, Resource> telemetryInitializer;
private final BiPredicate<EventData, String> eventSuppressor;
private final BiPredicate<SpanData, EventData> shouldSuppress;
public SpanDataMapper(
boolean captureHttpServer4xxAsError,
BiConsumer<AbstractTelemetryBuilder, Resource> telemetryInitializer,
BiPredicate<EventData, String> eventSuppressor,
BiPredicate<SpanData, EventData> shouldSuppress) {
this.captureHttpServer4xxAsError = captureHttpServer4xxAsError;
this.telemetryInitializer = telemetryInitializer;
this.eventSuppressor = eventSuppressor;
this.shouldSuppress = shouldSuppress;
}
public TelemetryItem map(SpanData span) {
long itemCount = getItemCount(span);
return map(span, itemCount);
}
public void map(SpanData span, Consumer<TelemetryItem> consumer) {
long itemCount = getItemCount(span);
TelemetryItem telemetryItem = map(span, itemCount);
consumer.accept(telemetryItem);
exportEvents(
span,
telemetryItem.getTags().get(ContextTagKeys.AI_OPERATION_NAME.toString()),
itemCount,
consumer);
}
public TelemetryItem map(SpanData span, long itemCount) {
if (RequestChecker.isRequest(span)) {
return exportRequest(span, itemCount);
} else {
return exportRemoteDependency(span, span.getKind() == SpanKind.INTERNAL, itemCount);
}
}
private static boolean checkIsPreAggregatedStandardMetric(SpanData span) {
Boolean isPreAggregatedStandardMetric =
span.getAttributes().get(AiSemanticAttributes.IS_PRE_AGGREGATED);
return isPreAggregatedStandardMetric != null && isPreAggregatedStandardMetric;
}
private TelemetryItem exportRemoteDependency(SpanData span, boolean inProc, long itemCount) {
RemoteDependencyTelemetryBuilder telemetryBuilder = RemoteDependencyTelemetryBuilder.create();
telemetryInitializer.accept(telemetryBuilder, span.getResource());
setOperationTags(telemetryBuilder, span);
setTime(telemetryBuilder, span.getStartEpochNanos());
setItemCount(telemetryBuilder, itemCount);
MAPPINGS.map(span.getAttributes(), telemetryBuilder);
addLinks(telemetryBuilder, span.getLinks());
telemetryBuilder.setId(span.getSpanId());
telemetryBuilder.setName(getDependencyName(span));
telemetryBuilder.setDuration(
FormattedDuration.fromNanos(span.getEndEpochNanos() - span.getStartEpochNanos()));
telemetryBuilder.setSuccess(getSuccess(span));
if (inProc) {
telemetryBuilder.setType("InProc");
} else {
applySemanticConventions(telemetryBuilder, span);
}
if (checkIsPreAggregatedStandardMetric(span)) {
telemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, "True");
}
return telemetryBuilder.build();
}
private static final Set<String> DEFAULT_HTTP_SPAN_NAMES =
new HashSet<>(
asList("OPTIONS", "GET", "HEAD", "POST", "PUT", "DELETE", "TRACE", "CONNECT", "PATCH"));
private static String getDependencyName(SpanData span) {
String name = span.getName();
String method = getStableAttribute(span.getAttributes(), SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);
if (method == null) {
return name;
}
if (!DEFAULT_HTTP_SPAN_NAMES.contains(name)) {
return name;
}
String url = getStableAttribute(span.getAttributes(), SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);
if (url == null) {
return name;
}
String path = UrlParser.getPath(url);
if (path == null) {
return name;
}
return path.isEmpty() ? method + " /" : method + " " + path;
}
private static void applySemanticConventions(
RemoteDependencyTelemetryBuilder telemetryBuilder, SpanData span) {
Attributes attributes = span.getAttributes();
String httpMethod = getStableAttribute(attributes, SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);
if (httpMethod != null) {
applyHttpClientSpan(telemetryBuilder, attributes);
return;
}
String rpcSystem = attributes.get(SemanticAttributes.RPC_SYSTEM);
if (rpcSystem != null) {
applyRpcClientSpan(telemetryBuilder, rpcSystem, attributes);
return;
}
String dbSystem = attributes.get(SemanticAttributes.DB_SYSTEM);
if (dbSystem == null) {
dbSystem = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_TYPE);
}
if (dbSystem != null) {
applyDatabaseClientSpan(telemetryBuilder, dbSystem, attributes);
return;
}
String messagingSystem = getMessagingSystem(attributes);
if (messagingSystem != null) {
applyMessagingClientSpan(telemetryBuilder, span.getKind(), messagingSystem, attributes);
return;
}
String target = getTargetOrDefault(attributes, Integer.MAX_VALUE, null);
if (target != null) {
telemetryBuilder.setTarget(target);
return;
}
telemetryBuilder.setType("InProc");
}
@Nullable
private static String getMessagingSystem(Attributes attributes) {
String azureNamespace = attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE);
if (isAzureSdkMessaging(azureNamespace)) {
return azureNamespace;
}
return attributes.get(SemanticAttributes.MESSAGING_SYSTEM);
}
private static void setOperationTags(AbstractTelemetryBuilder telemetryBuilder, SpanData span) {
setOperationId(telemetryBuilder, span.getTraceId());
setOperationParentId(telemetryBuilder, span.getParentSpanContext().getSpanId());
setOperationName(telemetryBuilder, span.getAttributes());
}
private static void setOperationId(AbstractTelemetryBuilder telemetryBuilder, String traceId) {
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), traceId);
}
private static void setOperationParentId(
AbstractTelemetryBuilder telemetryBuilder, String parentSpanId) {
if (SpanId.isValid(parentSpanId)) {
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
private static void setOperationName(
AbstractTelemetryBuilder telemetryBuilder, Attributes attributes) {
String operationName = attributes.get(AiSemanticAttributes.OPERATION_NAME);
if (operationName != null) {
setOperationName(telemetryBuilder, operationName);
}
}
private static void setOperationName(
AbstractTelemetryBuilder telemetryBuilder, String operationName) {
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);
}
private static void applyHttpClientSpan(
RemoteDependencyTelemetryBuilder telemetryBuilder, Attributes attributes) {
String httpUrl = getStableAttribute(attributes, SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);
int defaultPort = getDefaultPortForHttpUrl(httpUrl);
String target = getTargetOrDefault(attributes, defaultPort, "Http");
telemetryBuilder.setType("Http");
telemetryBuilder.setTarget(target);
Long httpStatusCode = getStableAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
telemetryBuilder.setResultCode(Long.toString(httpStatusCode));
} else {
telemetryBuilder.setResultCode("0");
}
telemetryBuilder.setData(httpUrl);
}
private static void applyRpcClientSpan(
RemoteDependencyTelemetryBuilder telemetryBuilder, String rpcSystem, Attributes attributes) {
telemetryBuilder.setType(rpcSystem);
String target = getTargetOrDefault(attributes, Integer.MAX_VALUE, rpcSystem);
telemetryBuilder.setTarget(target);
}
private static int getDefaultPortForHttpUrl(@Nullable String httpUrl) {
if (httpUrl == null) {
return Integer.MAX_VALUE;
}
if (httpUrl.startsWith("https:
return 443;
}
if (httpUrl.startsWith("http:
return 80;
}
return Integer.MAX_VALUE;
}
public static String getTargetOrDefault(
Attributes attributes, int defaultPort, String defaultTarget) {
String target = getTargetOrNullStableSemconv(attributes, defaultPort);
if (target != null) {
return target;
}
target = getTargetOrNullOldSemconv(attributes, defaultPort);
if (target != null) {
return target;
}
return defaultTarget;
}
@Nullable
private static String getTargetOrNullStableSemconv(Attributes attributes, int defaultPort) {
String peerService = attributes.get(SemanticAttributes.PEER_SERVICE);
if (peerService != null) {
return peerService;
}
String host = attributes.get(SemanticAttributes.SERVER_ADDRESS);
if (host != null) {
Long port = attributes.get(SemanticAttributes.SERVER_PORT);
return getTarget(host, port, defaultPort);
}
return null;
}
@Nullable
private static String getTarget(String host, @Nullable Long port, int defaultPort) {
if (port != null && port != defaultPort) {
return host + ":" + port;
} else {
return host;
}
}
private static void applyDatabaseClientSpan(
RemoteDependencyTelemetryBuilder telemetryBuilder, String dbSystem, Attributes attributes) {
String dbStatement = attributes.get(SemanticAttributes.DB_STATEMENT);
if (dbStatement == null) {
dbStatement = attributes.get(SemanticAttributes.DB_OPERATION);
}
String type;
if (SQL_DB_SYSTEMS.contains(dbSystem)) {
if (dbSystem.equals(SemanticAttributes.DbSystemValues.MYSQL)) {
type = "mysql";
} else if (dbSystem.equals(SemanticAttributes.DbSystemValues.POSTGRESQL)) {
type = "postgresql";
} else {
type = "SQL";
}
} else if (dbSystem.equals(COSMOS)) {
type = "Microsoft.DocumentDb";
} else {
type = dbSystem;
}
telemetryBuilder.setType(type);
telemetryBuilder.setData(dbStatement);
String target;
String dbName;
if (dbSystem.equals(COSMOS)) {
String dbUrl = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_URL);
if (dbUrl != null) {
target = UrlParser.getTarget(dbUrl);
} else {
target = null;
}
dbName = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_INSTANCE);
} else {
target = getTargetOrDefault(attributes, getDefaultPortForDbSystem(dbSystem), dbSystem);
dbName = attributes.get(SemanticAttributes.DB_NAME);
}
target = nullAwareConcat(target, dbName, " | ");
if (target == null) {
target = dbSystem;
}
telemetryBuilder.setTarget(target);
}
private static void applyMessagingClientSpan(
RemoteDependencyTelemetryBuilder telemetryBuilder,
SpanKind spanKind,
String messagingSystem,
Attributes attributes) {
if (spanKind == SpanKind.PRODUCER) {
telemetryBuilder.setType("Queue Message | " + messagingSystem);
} else {
telemetryBuilder.setType(messagingSystem);
}
telemetryBuilder.setTarget(getMessagingTargetSource(attributes));
}
private static int getDefaultPortForDbSystem(String dbSystem) {
switch (dbSystem) {
case SemanticAttributes.DbSystemValues.MONGODB:
return 27017;
case SemanticAttributes.DbSystemValues.CASSANDRA:
return 9042;
case SemanticAttributes.DbSystemValues.REDIS:
return 6379;
case SemanticAttributes.DbSystemValues.MARIADB:
case SemanticAttributes.DbSystemValues.MYSQL:
return 3306;
case SemanticAttributes.DbSystemValues.MSSQL:
return 1433;
case SemanticAttributes.DbSystemValues.DB2:
return 50000;
case SemanticAttributes.DbSystemValues.ORACLE:
return 1521;
case SemanticAttributes.DbSystemValues.H2:
return 8082;
case SemanticAttributes.DbSystemValues.DERBY:
return 1527;
case SemanticAttributes.DbSystemValues.POSTGRESQL:
return 5432;
default:
return Integer.MAX_VALUE;
}
}
private TelemetryItem exportRequest(SpanData span, long itemCount) {
RequestTelemetryBuilder telemetryBuilder = RequestTelemetryBuilder.create();
telemetryInitializer.accept(telemetryBuilder, span.getResource());
Attributes attributes = span.getAttributes();
long startEpochNanos = span.getStartEpochNanos();
telemetryBuilder.setId(span.getSpanId());
setTime(telemetryBuilder, startEpochNanos);
setItemCount(telemetryBuilder, itemCount);
MAPPINGS.map(attributes, telemetryBuilder);
addLinks(telemetryBuilder, span.getLinks());
String operationName = getOperationName(span);
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getAttributes().get(AiSemanticAttributes.LEGACY_PARENT_ID);
if (aiLegacyParentId != null) {
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
} else if (span.getParentSpanContext().isValid()) {
telemetryBuilder.addTag(
ContextTagKeys.AI_OPERATION_PARENT_ID.toString(),
span.getParentSpanContext().getSpanId());
}
String aiLegacyRootId = span.getAttributes().get(AiSemanticAttributes.LEGACY_ROOT_ID);
if (aiLegacyRootId != null) {
telemetryBuilder.addTag("ai_legacyRootID", aiLegacyRootId);
}
telemetryBuilder.setName(operationName);
telemetryBuilder.setDuration(
FormattedDuration.fromNanos(span.getEndEpochNanos() - startEpochNanos));
telemetryBuilder.setSuccess(getSuccess(span));
String httpUrl = getHttpUrlFromServerSpan(attributes);
if (httpUrl != null) {
telemetryBuilder.setUrl(httpUrl);
}
Long httpStatusCode = getStableAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode == null) {
httpStatusCode = attributes.get(SemanticAttributes.RPC_GRPC_STATUS_CODE);
}
if (httpStatusCode != null) {
telemetryBuilder.setResponseCode(Long.toString(httpStatusCode));
} else {
telemetryBuilder.setResponseCode("0");
}
String locationIp = getStableAttribute(attributes, SemanticAttributes.CLIENT_ADDRESS, SemanticAttributes.HTTP_CLIENT_IP);
if (locationIp == null) {
locationIp = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);
}
if (locationIp != null) {
telemetryBuilder.addTag(ContextTagKeys.AI_LOCATION_IP.toString(), locationIp);
}
telemetryBuilder.setSource(getSource(attributes));
String sessionId = attributes.get(AiSemanticAttributes.SESSION_ID);
if (sessionId != null) {
telemetryBuilder.addTag(ContextTagKeys.AI_SESSION_ID.toString(), sessionId);
}
String deviceOs = attributes.get(AiSemanticAttributes.DEVICE_OS);
if (deviceOs != null) {
telemetryBuilder.addTag(AI_DEVICE_OS.toString(), deviceOs);
}
String deviceOsVersion = attributes.get(AiSemanticAttributes.DEVICE_OS_VERSION);
if (deviceOsVersion != null) {
telemetryBuilder.addTag(ContextTagKeys.AI_DEVICE_OS_VERSION.toString(), deviceOsVersion);
}
if (checkIsPreAggregatedStandardMetric(span)) {
telemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, "True");
}
Long enqueuedTime = attributes.get(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME);
if (enqueuedTime != null) {
long timeSinceEnqueuedMillis =
Math.max(
0L, NANOSECONDS.toMillis(span.getStartEpochNanos()) - SECONDS.toMillis(enqueuedTime));
telemetryBuilder.addMeasurement("timeSinceEnqueued", (double) timeSinceEnqueuedMillis);
}
Long timeSinceEnqueuedMillis = attributes.get(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS);
if (timeSinceEnqueuedMillis != null) {
telemetryBuilder.addMeasurement("timeSinceEnqueued", (double) timeSinceEnqueuedMillis);
}
return telemetryBuilder.build();
}
private boolean getSuccess(SpanData span) {
switch (span.getStatus().getStatusCode()) {
case ERROR:
return false;
case OK:
return true;
case UNSET:
if (captureHttpServer4xxAsError) {
Long statusCode = getStableAttribute(span.getAttributes(), SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);
return statusCode == null || statusCode < 400;
}
return true;
}
return true;
}
@Nullable
public static String getHttpUrlFromServerSpan(Attributes attributes) {
String httpUrl = getHttpUrlFromServerSpanStableSemconv(attributes);
if (httpUrl != null) {
return httpUrl;
}
return getHttpUrlFromServerSpanOldSemconv(attributes);
}
@Nullable
private static String getHttpUrlFromServerSpanStableSemconv(Attributes attributes) {
String scheme = attributes.get(SemanticAttributes.URL_SCHEME);
if (scheme == null) {
return null;
}
String path = attributes.get(SemanticAttributes.URL_PATH);
if (path == null) {
return null;
}
String host = attributes.get(SemanticAttributes.SERVER_ADDRESS);
if (host == null) {
return null;
}
Long port = attributes.get(SemanticAttributes.SERVER_PORT);
if (port != null && port > 0) {
return scheme + ":
}
return scheme + ":
}
@Nullable
private static String getHttpUrlFromServerSpanOldSemconv(Attributes attributes) {
String httpUrl = attributes.get(SemanticAttributes.HTTP_URL);
if (httpUrl != null) {
return httpUrl;
}
String scheme = attributes.get(SemanticAttributes.HTTP_SCHEME);
if (scheme == null) {
return null;
}
String target = attributes.get(SemanticAttributes.HTTP_TARGET);
if (target == null) {
return null;
}
String host = attributes.get(SemanticAttributes.NET_HOST_NAME);
Long port = attributes.get(SemanticAttributes.NET_HOST_PORT);
if (port != null && port > 0) {
return scheme + ":
}
return scheme + ":
}
@Nullable
private static String getSource(Attributes attributes) {
String source = attributes.get(AiSemanticAttributes.SPAN_SOURCE);
if (source != null) {
return source;
}
return getMessagingTargetSource(attributes);
}
@Nullable
private static String getMessagingTargetSource(Attributes attributes) {
if (isAzureSdkMessaging(attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE))) {
String peerAddress = attributes.get(AiSemanticAttributes.AZURE_SDK_PEER_ADDRESS);
if (peerAddress != null) {
String destination = attributes.get(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION);
return peerAddress + "/" + destination;
}
}
String messagingSystem = getMessagingSystem(attributes);
if (messagingSystem == null) {
return null;
}
String source =
nullAwareConcat(
getTargetOrNullOldSemconv(attributes, Integer.MAX_VALUE),
attributes.get(SemanticAttributes.MESSAGING_DESTINATION_NAME),
"/");
if (source != null) {
return source;
}
return messagingSystem;
}
private static boolean isAzureSdkMessaging(String messagingSystem) {
return "Microsoft.EventHub".equals(messagingSystem)
|| "Microsoft.ServiceBus".equals(messagingSystem);
}
private static String getOperationName(SpanData span) {
String operationName = span.getAttributes().get(AiSemanticAttributes.OPERATION_NAME);
if (operationName != null) {
return operationName;
}
return span.getName();
}
private static String nullAwareConcat(
@Nullable String str1, @Nullable String str2, String separator) {
if (str1 == null) {
return str2;
}
if (str2 == null) {
return str1;
}
return str1 + separator + str2;
}
private void exportEvents(
SpanData span,
@Nullable String operationName,
long itemCount,
Consumer<TelemetryItem> consumer) {
for (EventData event : span.getEvents()) {
String instrumentationScopeName = span.getInstrumentationScopeInfo().getName();
if (eventSuppressor.test(event, instrumentationScopeName)) {
continue;
}
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
SpanContext parentSpanContext = span.getParentSpanContext();
if (!parentSpanContext.isValid() || parentSpanContext.isRemote()) {
String stacktrace = event.getAttributes().get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null && !shouldSuppress.test(span, event)) {
consumer.accept(
createExceptionTelemetryItem(stacktrace, span, operationName, itemCount));
}
}
return;
}
MessageTelemetryBuilder telemetryBuilder = MessageTelemetryBuilder.create();
telemetryInitializer.accept(telemetryBuilder, span.getResource());
setOperationId(telemetryBuilder, span.getTraceId());
setOperationParentId(telemetryBuilder, span.getSpanId());
if (operationName != null) {
setOperationName(telemetryBuilder, operationName);
} else {
setOperationName(telemetryBuilder, span.getAttributes());
}
setTime(telemetryBuilder, event.getEpochNanos());
setItemCount(telemetryBuilder, itemCount);
MAPPINGS.map(event.getAttributes(), telemetryBuilder);
telemetryBuilder.setMessage(event.getName());
consumer.accept(telemetryBuilder.build());
}
}
private TelemetryItem createExceptionTelemetryItem(
String errorStack, SpanData span, @Nullable String operationName, long itemCount) {
ExceptionTelemetryBuilder telemetryBuilder = ExceptionTelemetryBuilder.create();
telemetryInitializer.accept(telemetryBuilder, span.getResource());
setOperationId(telemetryBuilder, span.getTraceId());
setOperationParentId(telemetryBuilder, span.getSpanId());
if (operationName != null) {
setOperationName(telemetryBuilder, operationName);
} else {
setOperationName(telemetryBuilder, span.getAttributes());
}
setTime(telemetryBuilder, span.getEndEpochNanos());
setItemCount(telemetryBuilder, itemCount);
MAPPINGS.map(span.getAttributes(), telemetryBuilder);
telemetryBuilder.setExceptions(Exceptions.minimalParse(errorStack));
return telemetryBuilder.build();
}
public static <T> T getStableAttribute(Attributes attributes, AttributeKey<T> stable, AttributeKey<T> old) {
T value = attributes.get(stable);
if (value != null) {
return value;
}
return attributes.get(old);
}
private static void setTime(AbstractTelemetryBuilder telemetryBuilder, long epochNanos) {
telemetryBuilder.setTime(FormattedTime.offSetDateTimeFromEpochNanos(epochNanos));
}
private static void setItemCount(AbstractTelemetryBuilder telemetryBuilder, long itemCount) {
if (itemCount != 1) {
telemetryBuilder.setSampleRate(100.0f / itemCount);
}
}
private static long getItemCount(SpanData span) {
Long itemCount = span.getAttributes().get(AiSemanticAttributes.ITEM_COUNT);
return itemCount == null ? 1 : itemCount;
}
private static void addLinks(AbstractTelemetryBuilder telemetryBuilder, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceId());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanId());
sb.append("\"}");
first = false;
}
sb.append("]");
telemetryBuilder.addProperty("_MS.links", sb.toString());
}
static void applyCommonTags(MappingsBuilder mappingsBuilder) {
mappingsBuilder
.exact(
SemanticAttributes.ENDUSER_ID.getKey(),
(telemetryBuilder, value) -> {
if (value instanceof String) {
telemetryBuilder.addTag(ContextTagKeys.AI_USER_ID.toString(), (String) value);
}
})
.exact(
AiSemanticAttributes.PREVIEW_APPLICATION_VERSION.getKey(),
(telemetryBuilder, value) -> {
if (value instanceof String) {
telemetryBuilder.addTag(
ContextTagKeys.AI_APPLICATION_VER.toString(), (String) value);
}
});
applyConnectionStringAndRoleNameOverrides(mappingsBuilder);
}
@SuppressWarnings("deprecation")
private static final WarningLogger connectionStringAttributeNoLongerSupported =
new WarningLogger(
SpanDataMapper.class,
AiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey()
+ " is no longer supported because it"
+ " is incompatible with pre-aggregated standard metrics. Please use"
+ " \"connectionStringOverrides\" configuration, or reach out to"
+ " https:
+ " different use case.");
@SuppressWarnings("deprecation")
private static final WarningLogger roleNameAttributeNoLongerSupported =
new WarningLogger(
SpanDataMapper.class,
AiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey()
+ " is no longer supported because it"
+ " is incompatible with pre-aggregated standard metrics. Please use"
+ " \"roleNameOverrides\" configuration, or reach out to"
+ " https:
+ " different use case.");
@SuppressWarnings("deprecation")
private static final WarningLogger roleInstanceAttributeNoLongerSupported =
new WarningLogger(
SpanDataMapper.class,
AiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey()
+ " is no longer supported because it"
+ " is incompatible with pre-aggregated standard metrics. Please reach out to"
+ " https:
+ " case for this.");
@SuppressWarnings("deprecation")
private static final WarningLogger instrumentationKeyAttributeNoLongerSupported =
new WarningLogger(
SpanDataMapper.class,
AiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey()
+ " is no longer supported because it"
+ " is incompatible with pre-aggregated standard metrics. Please use"
+ " \"connectionStringOverrides\" configuration, or reach out to"
+ " https:
+ " different use case.");
@SuppressWarnings("deprecation")
static void applyConnectionStringAndRoleNameOverrides(MappingsBuilder mappingsBuilder) {
mappingsBuilder
.exact(
AiSemanticAttributes.INTERNAL_CONNECTION_STRING.getKey(),
(telemetryBuilder, value) -> {
telemetryBuilder.setConnectionString(ConnectionString.parse((String) value));
})
.exact(
AiSemanticAttributes.INTERNAL_ROLE_NAME.getKey(),
(telemetryBuilder, value) -> {
if (value instanceof String) {
telemetryBuilder.addTag(ContextTagKeys.AI_CLOUD_ROLE.toString(), (String) value);
}
})
.exact(
AiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey(),
(telemetryBuilder, value) -> {
connectionStringAttributeNoLongerSupported.recordWarning();
})
.exact(
AiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey(),
(telemetryBuilder, value) -> {
roleNameAttributeNoLongerSupported.recordWarning();
})
.exact(
AiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey(),
(telemetryBuilder, value) -> {
roleInstanceAttributeNoLongerSupported.recordWarning();
})
.exact(
AiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey(),
(telemetryBuilder, value) -> {
instrumentationKeyAttributeNoLongerSupported.recordWarning();
});
}
}
|
class SpanDataMapper {
public static final String MS_PROCESSED_BY_METRIC_EXTRACTORS = "_MS.ProcessedByMetricExtractors";
private static final Set<String> SQL_DB_SYSTEMS =
new HashSet<>(
asList(
SemanticAttributes.DbSystemValues.DB2,
SemanticAttributes.DbSystemValues.DERBY,
SemanticAttributes.DbSystemValues.MARIADB,
SemanticAttributes.DbSystemValues.MSSQL,
SemanticAttributes.DbSystemValues.MYSQL,
SemanticAttributes.DbSystemValues.ORACLE,
SemanticAttributes.DbSystemValues.POSTGRESQL,
SemanticAttributes.DbSystemValues.SQLITE,
SemanticAttributes.DbSystemValues.OTHER_SQL,
SemanticAttributes.DbSystemValues.HSQLDB,
SemanticAttributes.DbSystemValues.H2));
private static final String COSMOS = "Cosmos";
private static final Mappings MAPPINGS;
private static final ContextTagKeys AI_DEVICE_OS = ContextTagKeys.fromString("ai.device.os");
static {
MappingsBuilder mappingsBuilder =
new MappingsBuilder(SPAN)
.ignoreExact(AiSemanticAttributes.AZURE_SDK_NAMESPACE.getKey())
.ignoreExact(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION.getKey())
.ignoreExact(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME.getKey())
.ignoreExact(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS.getKey())
.ignoreExact(AiSemanticAttributes.KAFKA_OFFSET.getKey())
.exact(
SemanticAttributes.USER_AGENT_ORIGINAL.getKey(),
(builder, value) -> {
if (value instanceof String) {
builder.addTag("ai.user.userAgent", (String) value);
}
})
.ignorePrefix("applicationinsights.internal.")
.prefix(
"http.request.header.",
(telemetryBuilder, key, value) -> {
if (value instanceof List) {
telemetryBuilder.addProperty(key, Mappings.join((List<?>) value));
}
})
.prefix(
"http.response.header.",
(telemetryBuilder, key, value) -> {
if (value instanceof List) {
telemetryBuilder.addProperty(key, Mappings.join((List<?>) value));
}
});
applyCommonTags(mappingsBuilder);
MAPPINGS = mappingsBuilder.build();
}
private final boolean captureHttpServer4xxAsError;
private final BiConsumer<AbstractTelemetryBuilder, Resource> telemetryInitializer;
private final BiPredicate<EventData, String> eventSuppressor;
private final BiPredicate<SpanData, EventData> shouldSuppress;
public SpanDataMapper(
boolean captureHttpServer4xxAsError,
BiConsumer<AbstractTelemetryBuilder, Resource> telemetryInitializer,
BiPredicate<EventData, String> eventSuppressor,
BiPredicate<SpanData, EventData> shouldSuppress) {
this.captureHttpServer4xxAsError = captureHttpServer4xxAsError;
this.telemetryInitializer = telemetryInitializer;
this.eventSuppressor = eventSuppressor;
this.shouldSuppress = shouldSuppress;
}
public TelemetryItem map(SpanData span) {
long itemCount = getItemCount(span);
return map(span, itemCount);
}
public void map(SpanData span, Consumer<TelemetryItem> consumer) {
long itemCount = getItemCount(span);
TelemetryItem telemetryItem = map(span, itemCount);
consumer.accept(telemetryItem);
exportEvents(
span,
telemetryItem.getTags().get(ContextTagKeys.AI_OPERATION_NAME.toString()),
itemCount,
consumer);
}
public TelemetryItem map(SpanData span, long itemCount) {
if (RequestChecker.isRequest(span)) {
return exportRequest(span, itemCount);
} else {
return exportRemoteDependency(span, span.getKind() == SpanKind.INTERNAL, itemCount);
}
}
private static boolean checkIsPreAggregatedStandardMetric(SpanData span) {
Boolean isPreAggregatedStandardMetric =
span.getAttributes().get(AiSemanticAttributes.IS_PRE_AGGREGATED);
return isPreAggregatedStandardMetric != null && isPreAggregatedStandardMetric;
}
private TelemetryItem exportRemoteDependency(SpanData span, boolean inProc, long itemCount) {
RemoteDependencyTelemetryBuilder telemetryBuilder = RemoteDependencyTelemetryBuilder.create();
telemetryInitializer.accept(telemetryBuilder, span.getResource());
setOperationTags(telemetryBuilder, span);
setTime(telemetryBuilder, span.getStartEpochNanos());
setItemCount(telemetryBuilder, itemCount);
MAPPINGS.map(span.getAttributes(), telemetryBuilder);
addLinks(telemetryBuilder, span.getLinks());
telemetryBuilder.setId(span.getSpanId());
telemetryBuilder.setName(getDependencyName(span));
telemetryBuilder.setDuration(
FormattedDuration.fromNanos(span.getEndEpochNanos() - span.getStartEpochNanos()));
telemetryBuilder.setSuccess(getSuccess(span));
if (inProc) {
telemetryBuilder.setType("InProc");
} else {
applySemanticConventions(telemetryBuilder, span);
}
if (checkIsPreAggregatedStandardMetric(span)) {
telemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, "True");
}
return telemetryBuilder.build();
}
private static final Set<String> DEFAULT_HTTP_SPAN_NAMES =
new HashSet<>(
asList("OPTIONS", "GET", "HEAD", "POST", "PUT", "DELETE", "TRACE", "CONNECT", "PATCH"));
private static String getDependencyName(SpanData span) {
String name = span.getName();
String method = getStableOrOldAttribute(span.getAttributes(), SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);
if (method == null) {
return name;
}
if (!DEFAULT_HTTP_SPAN_NAMES.contains(name)) {
return name;
}
String url = getStableOrOldAttribute(span.getAttributes(), SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);
if (url == null) {
return name;
}
String path = UrlParser.getPath(url);
if (path == null) {
return name;
}
return path.isEmpty() ? method + " /" : method + " " + path;
}
private static void applySemanticConventions(
RemoteDependencyTelemetryBuilder telemetryBuilder, SpanData span) {
Attributes attributes = span.getAttributes();
String httpMethod = getStableOrOldAttribute(attributes, SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);
if (httpMethod != null) {
applyHttpClientSpan(telemetryBuilder, attributes);
return;
}
String rpcSystem = attributes.get(SemanticAttributes.RPC_SYSTEM);
if (rpcSystem != null) {
applyRpcClientSpan(telemetryBuilder, rpcSystem, attributes);
return;
}
String dbSystem = attributes.get(SemanticAttributes.DB_SYSTEM);
if (dbSystem == null) {
dbSystem = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_TYPE);
}
if (dbSystem != null) {
applyDatabaseClientSpan(telemetryBuilder, dbSystem, attributes);
return;
}
String messagingSystem = getMessagingSystem(attributes);
if (messagingSystem != null) {
applyMessagingClientSpan(telemetryBuilder, span.getKind(), messagingSystem, attributes);
return;
}
String target = getTargetOrDefault(attributes, Integer.MAX_VALUE, null);
if (target != null) {
telemetryBuilder.setTarget(target);
return;
}
telemetryBuilder.setType("InProc");
}
@Nullable
private static String getMessagingSystem(Attributes attributes) {
String azureNamespace = attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE);
if (isAzureSdkMessaging(azureNamespace)) {
return azureNamespace;
}
return attributes.get(SemanticAttributes.MESSAGING_SYSTEM);
}
private static void setOperationTags(AbstractTelemetryBuilder telemetryBuilder, SpanData span) {
setOperationId(telemetryBuilder, span.getTraceId());
setOperationParentId(telemetryBuilder, span.getParentSpanContext().getSpanId());
setOperationName(telemetryBuilder, span.getAttributes());
}
private static void setOperationId(AbstractTelemetryBuilder telemetryBuilder, String traceId) {
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), traceId);
}
private static void setOperationParentId(
AbstractTelemetryBuilder telemetryBuilder, String parentSpanId) {
if (SpanId.isValid(parentSpanId)) {
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
private static void setOperationName(
AbstractTelemetryBuilder telemetryBuilder, Attributes attributes) {
String operationName = attributes.get(AiSemanticAttributes.OPERATION_NAME);
if (operationName != null) {
setOperationName(telemetryBuilder, operationName);
}
}
private static void setOperationName(
AbstractTelemetryBuilder telemetryBuilder, String operationName) {
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);
}
private static void applyHttpClientSpan(
RemoteDependencyTelemetryBuilder telemetryBuilder, Attributes attributes) {
String httpUrl = getStableOrOldAttribute(attributes, SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);
int defaultPort = getDefaultPortForHttpUrl(httpUrl);
String target = getTargetOrDefault(attributes, defaultPort, "Http");
telemetryBuilder.setType("Http");
telemetryBuilder.setTarget(target);
Long httpStatusCode = getStableOrOldAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
telemetryBuilder.setResultCode(Long.toString(httpStatusCode));
} else {
telemetryBuilder.setResultCode("0");
}
telemetryBuilder.setData(httpUrl);
}
private static void applyRpcClientSpan(
RemoteDependencyTelemetryBuilder telemetryBuilder, String rpcSystem, Attributes attributes) {
telemetryBuilder.setType(rpcSystem);
String target = getTargetOrDefault(attributes, Integer.MAX_VALUE, rpcSystem);
telemetryBuilder.setTarget(target);
}
private static int getDefaultPortForHttpUrl(@Nullable String httpUrl) {
if (httpUrl == null) {
return Integer.MAX_VALUE;
}
if (httpUrl.startsWith("https:
return 443;
}
if (httpUrl.startsWith("http:
return 80;
}
return Integer.MAX_VALUE;
}
public static String getTargetOrDefault(
Attributes attributes, int defaultPort, String defaultTarget) {
String target = getTargetOrNullStableSemconv(attributes, defaultPort);
if (target != null) {
return target;
}
target = getTargetOrNullOldSemconv(attributes, defaultPort);
if (target != null) {
return target;
}
return defaultTarget;
}
@Nullable
private static String getTargetOrNullStableSemconv(Attributes attributes, int defaultPort) {
String peerService = attributes.get(SemanticAttributes.PEER_SERVICE);
if (peerService != null) {
return peerService;
}
String host = attributes.get(SemanticAttributes.SERVER_ADDRESS);
if (host != null) {
Long port = attributes.get(SemanticAttributes.SERVER_PORT);
return getTarget(host, port, defaultPort);
}
return null;
}
@Nullable
private static String getTarget(String host, @Nullable Long port, int defaultPort) {
if (port != null && port != defaultPort) {
return host + ":" + port;
} else {
return host;
}
}
private static void applyDatabaseClientSpan(
RemoteDependencyTelemetryBuilder telemetryBuilder, String dbSystem, Attributes attributes) {
String dbStatement = attributes.get(SemanticAttributes.DB_STATEMENT);
if (dbStatement == null) {
dbStatement = attributes.get(SemanticAttributes.DB_OPERATION);
}
String type;
if (SQL_DB_SYSTEMS.contains(dbSystem)) {
if (dbSystem.equals(SemanticAttributes.DbSystemValues.MYSQL)) {
type = "mysql";
} else if (dbSystem.equals(SemanticAttributes.DbSystemValues.POSTGRESQL)) {
type = "postgresql";
} else {
type = "SQL";
}
} else if (dbSystem.equals(COSMOS)) {
type = "Microsoft.DocumentDb";
} else {
type = dbSystem;
}
telemetryBuilder.setType(type);
telemetryBuilder.setData(dbStatement);
String target;
String dbName;
if (dbSystem.equals(COSMOS)) {
String dbUrl = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_URL);
if (dbUrl != null) {
target = UrlParser.getTarget(dbUrl);
} else {
target = null;
}
dbName = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_INSTANCE);
} else {
target = getTargetOrDefault(attributes, getDefaultPortForDbSystem(dbSystem), dbSystem);
dbName = attributes.get(SemanticAttributes.DB_NAME);
}
target = nullAwareConcat(target, dbName, " | ");
if (target == null) {
target = dbSystem;
}
telemetryBuilder.setTarget(target);
}
private static void applyMessagingClientSpan(
RemoteDependencyTelemetryBuilder telemetryBuilder,
SpanKind spanKind,
String messagingSystem,
Attributes attributes) {
if (spanKind == SpanKind.PRODUCER) {
telemetryBuilder.setType("Queue Message | " + messagingSystem);
} else {
telemetryBuilder.setType(messagingSystem);
}
telemetryBuilder.setTarget(getMessagingTargetSource(attributes));
}
private static int getDefaultPortForDbSystem(String dbSystem) {
switch (dbSystem) {
case SemanticAttributes.DbSystemValues.MONGODB:
return 27017;
case SemanticAttributes.DbSystemValues.CASSANDRA:
return 9042;
case SemanticAttributes.DbSystemValues.REDIS:
return 6379;
case SemanticAttributes.DbSystemValues.MARIADB:
case SemanticAttributes.DbSystemValues.MYSQL:
return 3306;
case SemanticAttributes.DbSystemValues.MSSQL:
return 1433;
case SemanticAttributes.DbSystemValues.DB2:
return 50000;
case SemanticAttributes.DbSystemValues.ORACLE:
return 1521;
case SemanticAttributes.DbSystemValues.H2:
return 8082;
case SemanticAttributes.DbSystemValues.DERBY:
return 1527;
case SemanticAttributes.DbSystemValues.POSTGRESQL:
return 5432;
default:
return Integer.MAX_VALUE;
}
}
private TelemetryItem exportRequest(SpanData span, long itemCount) {
RequestTelemetryBuilder telemetryBuilder = RequestTelemetryBuilder.create();
telemetryInitializer.accept(telemetryBuilder, span.getResource());
Attributes attributes = span.getAttributes();
long startEpochNanos = span.getStartEpochNanos();
telemetryBuilder.setId(span.getSpanId());
setTime(telemetryBuilder, startEpochNanos);
setItemCount(telemetryBuilder, itemCount);
MAPPINGS.map(attributes, telemetryBuilder);
addLinks(telemetryBuilder, span.getLinks());
String operationName = getOperationName(span);
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getAttributes().get(AiSemanticAttributes.LEGACY_PARENT_ID);
if (aiLegacyParentId != null) {
telemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
} else if (span.getParentSpanContext().isValid()) {
telemetryBuilder.addTag(
ContextTagKeys.AI_OPERATION_PARENT_ID.toString(),
span.getParentSpanContext().getSpanId());
}
String aiLegacyRootId = span.getAttributes().get(AiSemanticAttributes.LEGACY_ROOT_ID);
if (aiLegacyRootId != null) {
telemetryBuilder.addTag("ai_legacyRootID", aiLegacyRootId);
}
telemetryBuilder.setName(operationName);
telemetryBuilder.setDuration(
FormattedDuration.fromNanos(span.getEndEpochNanos() - startEpochNanos));
telemetryBuilder.setSuccess(getSuccess(span));
String httpUrl = getHttpUrlFromServerSpan(attributes);
if (httpUrl != null) {
telemetryBuilder.setUrl(httpUrl);
}
Long httpStatusCode = getStableOrOldAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode == null) {
httpStatusCode = attributes.get(SemanticAttributes.RPC_GRPC_STATUS_CODE);
}
if (httpStatusCode != null) {
telemetryBuilder.setResponseCode(Long.toString(httpStatusCode));
} else {
telemetryBuilder.setResponseCode("0");
}
String locationIp = getStableOrOldAttribute(attributes, SemanticAttributes.CLIENT_ADDRESS, SemanticAttributes.HTTP_CLIENT_IP);
if (locationIp == null) {
locationIp = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);
}
if (locationIp != null) {
telemetryBuilder.addTag(ContextTagKeys.AI_LOCATION_IP.toString(), locationIp);
}
telemetryBuilder.setSource(getSource(attributes));
String sessionId = attributes.get(AiSemanticAttributes.SESSION_ID);
if (sessionId != null) {
telemetryBuilder.addTag(ContextTagKeys.AI_SESSION_ID.toString(), sessionId);
}
String deviceOs = attributes.get(AiSemanticAttributes.DEVICE_OS);
if (deviceOs != null) {
telemetryBuilder.addTag(AI_DEVICE_OS.toString(), deviceOs);
}
String deviceOsVersion = attributes.get(AiSemanticAttributes.DEVICE_OS_VERSION);
if (deviceOsVersion != null) {
telemetryBuilder.addTag(ContextTagKeys.AI_DEVICE_OS_VERSION.toString(), deviceOsVersion);
}
if (checkIsPreAggregatedStandardMetric(span)) {
telemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, "True");
}
Long enqueuedTime = attributes.get(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME);
if (enqueuedTime != null) {
long timeSinceEnqueuedMillis =
Math.max(
0L, NANOSECONDS.toMillis(span.getStartEpochNanos()) - SECONDS.toMillis(enqueuedTime));
telemetryBuilder.addMeasurement("timeSinceEnqueued", (double) timeSinceEnqueuedMillis);
}
Long timeSinceEnqueuedMillis = attributes.get(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS);
if (timeSinceEnqueuedMillis != null) {
telemetryBuilder.addMeasurement("timeSinceEnqueued", (double) timeSinceEnqueuedMillis);
}
return telemetryBuilder.build();
}
private boolean getSuccess(SpanData span) {
switch (span.getStatus().getStatusCode()) {
case ERROR:
return false;
case OK:
return true;
case UNSET:
if (captureHttpServer4xxAsError) {
Long statusCode = getStableOrOldAttribute(span.getAttributes(), SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);
return statusCode == null || statusCode < 400;
}
return true;
}
return true;
}
@Nullable
public static String getHttpUrlFromServerSpan(Attributes attributes) {
String httpUrl = getHttpUrlFromServerSpanStableSemconv(attributes);
if (httpUrl != null) {
return httpUrl;
}
return getHttpUrlFromServerSpanOldSemconv(attributes);
}
@Nullable
private static String getHttpUrlFromServerSpanStableSemconv(Attributes attributes) {
String scheme = attributes.get(SemanticAttributes.URL_SCHEME);
if (scheme == null) {
return null;
}
String path = attributes.get(SemanticAttributes.URL_PATH);
if (path == null) {
return null;
}
String host = attributes.get(SemanticAttributes.SERVER_ADDRESS);
if (host == null) {
return null;
}
Long port = attributes.get(SemanticAttributes.SERVER_PORT);
if (port != null && port > 0) {
return scheme + ":
}
return scheme + ":
}
@Nullable
private static String getHttpUrlFromServerSpanOldSemconv(Attributes attributes) {
String httpUrl = attributes.get(SemanticAttributes.HTTP_URL);
if (httpUrl != null) {
return httpUrl;
}
String scheme = attributes.get(SemanticAttributes.HTTP_SCHEME);
if (scheme == null) {
return null;
}
String target = attributes.get(SemanticAttributes.HTTP_TARGET);
if (target == null) {
return null;
}
String host = attributes.get(SemanticAttributes.NET_HOST_NAME);
Long port = attributes.get(SemanticAttributes.NET_HOST_PORT);
if (port != null && port > 0) {
return scheme + ":
}
return scheme + ":
}
@Nullable
private static String getSource(Attributes attributes) {
String source = attributes.get(AiSemanticAttributes.SPAN_SOURCE);
if (source != null) {
return source;
}
return getMessagingTargetSource(attributes);
}
@Nullable
private static String getMessagingTargetSource(Attributes attributes) {
if (isAzureSdkMessaging(attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE))) {
String peerAddress = attributes.get(AiSemanticAttributes.AZURE_SDK_PEER_ADDRESS);
if (peerAddress != null) {
String destination = attributes.get(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION);
return peerAddress + "/" + destination;
}
}
String messagingSystem = getMessagingSystem(attributes);
if (messagingSystem == null) {
return null;
}
String source =
nullAwareConcat(
getTargetOrNullOldSemconv(attributes, Integer.MAX_VALUE),
attributes.get(SemanticAttributes.MESSAGING_DESTINATION_NAME),
"/");
if (source != null) {
return source;
}
return messagingSystem;
}
private static boolean isAzureSdkMessaging(String messagingSystem) {
return "Microsoft.EventHub".equals(messagingSystem)
|| "Microsoft.ServiceBus".equals(messagingSystem);
}
private static String getOperationName(SpanData span) {
String operationName = span.getAttributes().get(AiSemanticAttributes.OPERATION_NAME);
if (operationName != null) {
return operationName;
}
return span.getName();
}
private static String nullAwareConcat(
@Nullable String str1, @Nullable String str2, String separator) {
if (str1 == null) {
return str2;
}
if (str2 == null) {
return str1;
}
return str1 + separator + str2;
}
private void exportEvents(
SpanData span,
@Nullable String operationName,
long itemCount,
Consumer<TelemetryItem> consumer) {
for (EventData event : span.getEvents()) {
String instrumentationScopeName = span.getInstrumentationScopeInfo().getName();
if (eventSuppressor.test(event, instrumentationScopeName)) {
continue;
}
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
SpanContext parentSpanContext = span.getParentSpanContext();
if (!parentSpanContext.isValid() || parentSpanContext.isRemote()) {
String stacktrace = event.getAttributes().get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null && !shouldSuppress.test(span, event)) {
consumer.accept(
createExceptionTelemetryItem(stacktrace, span, operationName, itemCount));
}
}
return;
}
MessageTelemetryBuilder telemetryBuilder = MessageTelemetryBuilder.create();
telemetryInitializer.accept(telemetryBuilder, span.getResource());
setOperationId(telemetryBuilder, span.getTraceId());
setOperationParentId(telemetryBuilder, span.getSpanId());
if (operationName != null) {
setOperationName(telemetryBuilder, operationName);
} else {
setOperationName(telemetryBuilder, span.getAttributes());
}
setTime(telemetryBuilder, event.getEpochNanos());
setItemCount(telemetryBuilder, itemCount);
MAPPINGS.map(event.getAttributes(), telemetryBuilder);
telemetryBuilder.setMessage(event.getName());
consumer.accept(telemetryBuilder.build());
}
}
private TelemetryItem createExceptionTelemetryItem(
String errorStack, SpanData span, @Nullable String operationName, long itemCount) {
ExceptionTelemetryBuilder telemetryBuilder = ExceptionTelemetryBuilder.create();
telemetryInitializer.accept(telemetryBuilder, span.getResource());
setOperationId(telemetryBuilder, span.getTraceId());
setOperationParentId(telemetryBuilder, span.getSpanId());
if (operationName != null) {
setOperationName(telemetryBuilder, operationName);
} else {
setOperationName(telemetryBuilder, span.getAttributes());
}
setTime(telemetryBuilder, span.getEndEpochNanos());
setItemCount(telemetryBuilder, itemCount);
MAPPINGS.map(span.getAttributes(), telemetryBuilder);
telemetryBuilder.setExceptions(Exceptions.minimalParse(errorStack));
return telemetryBuilder.build();
}
public static <T> T getStableOrOldAttribute(Attributes attributes, AttributeKey<T> stable, AttributeKey<T> old) {
T value = attributes.get(stable);
if (value != null) {
return value;
}
return attributes.get(old);
}
private static void setTime(AbstractTelemetryBuilder telemetryBuilder, long epochNanos) {
telemetryBuilder.setTime(FormattedTime.offSetDateTimeFromEpochNanos(epochNanos));
}
private static void setItemCount(AbstractTelemetryBuilder telemetryBuilder, long itemCount) {
if (itemCount != 1) {
telemetryBuilder.setSampleRate(100.0f / itemCount);
}
}
private static long getItemCount(SpanData span) {
Long itemCount = span.getAttributes().get(AiSemanticAttributes.ITEM_COUNT);
return itemCount == null ? 1 : itemCount;
}
private static void addLinks(AbstractTelemetryBuilder telemetryBuilder, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceId());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanId());
sb.append("\"}");
first = false;
}
sb.append("]");
telemetryBuilder.addProperty("_MS.links", sb.toString());
}
static void applyCommonTags(MappingsBuilder mappingsBuilder) {
mappingsBuilder
.exact(
SemanticAttributes.ENDUSER_ID.getKey(),
(telemetryBuilder, value) -> {
if (value instanceof String) {
telemetryBuilder.addTag(ContextTagKeys.AI_USER_ID.toString(), (String) value);
}
})
.exact(
AiSemanticAttributes.PREVIEW_APPLICATION_VERSION.getKey(),
(telemetryBuilder, value) -> {
if (value instanceof String) {
telemetryBuilder.addTag(
ContextTagKeys.AI_APPLICATION_VER.toString(), (String) value);
}
});
applyConnectionStringAndRoleNameOverrides(mappingsBuilder);
}
@SuppressWarnings("deprecation")
private static final WarningLogger connectionStringAttributeNoLongerSupported =
new WarningLogger(
SpanDataMapper.class,
AiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey()
+ " is no longer supported because it"
+ " is incompatible with pre-aggregated standard metrics. Please use"
+ " \"connectionStringOverrides\" configuration, or reach out to"
+ " https:
+ " different use case.");
@SuppressWarnings("deprecation")
private static final WarningLogger roleNameAttributeNoLongerSupported =
new WarningLogger(
SpanDataMapper.class,
AiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey()
+ " is no longer supported because it"
+ " is incompatible with pre-aggregated standard metrics. Please use"
+ " \"roleNameOverrides\" configuration, or reach out to"
+ " https:
+ " different use case.");
@SuppressWarnings("deprecation")
private static final WarningLogger roleInstanceAttributeNoLongerSupported =
new WarningLogger(
SpanDataMapper.class,
AiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey()
+ " is no longer supported because it"
+ " is incompatible with pre-aggregated standard metrics. Please reach out to"
+ " https:
+ " case for this.");
@SuppressWarnings("deprecation")
private static final WarningLogger instrumentationKeyAttributeNoLongerSupported =
new WarningLogger(
SpanDataMapper.class,
AiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey()
+ " is no longer supported because it"
+ " is incompatible with pre-aggregated standard metrics. Please use"
+ " \"connectionStringOverrides\" configuration, or reach out to"
+ " https:
+ " different use case.");
@SuppressWarnings("deprecation")
static void applyConnectionStringAndRoleNameOverrides(MappingsBuilder mappingsBuilder) {
mappingsBuilder
.exact(
AiSemanticAttributes.INTERNAL_CONNECTION_STRING.getKey(),
(telemetryBuilder, value) -> {
telemetryBuilder.setConnectionString(ConnectionString.parse((String) value));
})
.exact(
AiSemanticAttributes.INTERNAL_ROLE_NAME.getKey(),
(telemetryBuilder, value) -> {
if (value instanceof String) {
telemetryBuilder.addTag(ContextTagKeys.AI_CLOUD_ROLE.toString(), (String) value);
}
})
.exact(
AiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey(),
(telemetryBuilder, value) -> {
connectionStringAttributeNoLongerSupported.recordWarning();
})
.exact(
AiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey(),
(telemetryBuilder, value) -> {
roleNameAttributeNoLongerSupported.recordWarning();
})
.exact(
AiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey(),
(telemetryBuilder, value) -> {
roleInstanceAttributeNoLongerSupported.recordWarning();
})
.exact(
AiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey(),
(telemetryBuilder, value) -> {
instrumentationKeyAttributeNoLongerSupported.recordWarning();
});
}
}
|
Instead of the `StatelessIdentityDoFn` we could use `MapElements.into(...).via(e -> KV.of("", e.getValue())`, which would enforce shuffle semantically. That might improve readability a bit.
|
private JobGraph getStatefulParDoAfterCombineChainingJobGraph(boolean stablePartitioning) {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final FlinkStreamingPipelineTranslator translator =
new FlinkStreamingPipelineTranslator(env, PipelineOptionsFactory.create());
final PipelineOptions pipelineOptions = PipelineOptionsFactory.create();
pipelineOptions.setRunner(FlinkRunner.class);
final Pipeline pipeline = Pipeline.create(pipelineOptions);
PCollection<KV<String, Long>> aggregate =
pipeline
.apply(Create.of("foo", "bar").withCoder(StringUtf8Coder.of()))
.apply(Count.perElement());
if (!stablePartitioning) {
aggregate = aggregate.apply(ParDo.of(new StatelessIdentityDoFn<>()));
}
aggregate.apply(ParDo.of(new StatefulNoopDoFn<>()));
translator.translate(pipeline);
return env.getStreamGraph().getJobGraph();
}
|
aggregate = aggregate.apply(ParDo.of(new StatelessIdentityDoFn<>()));
|
private JobGraph getStatefulParDoAfterCombineChainingJobGraph(boolean stablePartitioning) {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final FlinkStreamingPipelineTranslator translator =
new FlinkStreamingPipelineTranslator(env, PipelineOptionsFactory.create());
final PipelineOptions pipelineOptions = PipelineOptionsFactory.create();
pipelineOptions.setRunner(FlinkRunner.class);
final Pipeline pipeline = Pipeline.create(pipelineOptions);
PCollection<KV<String, Long>> aggregate =
pipeline
.apply(Create.of("foo", "bar").withCoder(StringUtf8Coder.of()))
.apply(Count.perElement());
if (!stablePartitioning) {
aggregate = aggregate.apply(ParDo.of(new StatelessIdentityDoFn<>()));
}
aggregate.apply(ParDo.of(new StatefulNoopDoFn<>()));
translator.translate(pipeline);
return env.getStreamGraph().getJobGraph();
}
|
class FlinkStreamingPipelineTranslatorTest {
@Test
public void testAutoBalanceShardKeyResolvesMaxParallelism() {
int parallelism = 3;
assertThat(
new FlinkAutoBalancedShardKeyShardingFunction<>(parallelism, -1, StringUtf8Coder.of())
.getMaxParallelism(),
equalTo(KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism)));
assertThat(
new FlinkAutoBalancedShardKeyShardingFunction<>(parallelism, 0, StringUtf8Coder.of())
.getMaxParallelism(),
equalTo(KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism)));
}
@Test
public void testAutoBalanceShardKeyCacheIsNotSerialized() throws Exception {
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(2, 2, StringUtf8Coder.of());
assertNull(fn.getCache());
fn.assignShardKey("target/destination1", "one", 10);
fn.assignShardKey("target/destination2", "two", 10);
assertThat(fn.getCache().size(), equalTo(2));
assertThat(SerializableUtils.clone(fn).getCache(), nullValue());
}
@Test
public void testAutoBalanceShardKeyCacheIsStable() throws Exception {
int numShards = 50;
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(
numShards / 2, numShards * 2, StringUtf8Coder.of());
List<KV<String, String>> inputs = Lists.newArrayList();
for (int i = 0; i < numShards * 100; i++) {
inputs.add(KV.of("target/destination/1", UUID.randomUUID().toString()));
inputs.add(KV.of("target/destination/2", UUID.randomUUID().toString()));
inputs.add(KV.of("target/destination/3", UUID.randomUUID().toString()));
}
Map<KV<String, Integer>, ShardedKey<Integer>> generatedKeys = new HashMap<>();
for (KV<String, String> input : inputs) {
ShardedKey<Integer> shardKey = fn.assignShardKey(input.getKey(), input.getValue(), numShards);
generatedKeys.put(KV.of(input.getKey(), shardKey.getShardNumber()), shardKey);
}
fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(
numShards / 2, numShards * 2, StringUtf8Coder.of());
Collections.shuffle(inputs);
for (KV<String, String> input : inputs) {
ShardedKey<Integer> shardKey = fn.assignShardKey(input.getKey(), input.getValue(), numShards);
ShardedKey<Integer> expectedShardKey =
generatedKeys.get(KV.of(input.getKey(), shardKey.getShardNumber()));
if (expectedShardKey != null) {
assertThat(shardKey, equalTo(expectedShardKey));
}
}
}
@Test
public void testAutoBalanceShardKeyCacheMaxSize() throws Exception {
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(2, 2, StringUtf8Coder.of());
for (int i = 0; i < FlinkAutoBalancedShardKeyShardingFunction.CACHE_MAX_SIZE * 2; i++) {
fn.assignShardKey(UUID.randomUUID().toString(), "one", 2);
}
assertThat(
fn.getCache().size(), equalTo(FlinkAutoBalancedShardKeyShardingFunction.CACHE_MAX_SIZE));
}
@Test
public void testStatefulParDoAfterCombineChaining() {
final JobGraph stablePartitioning = getStatefulParDoAfterCombineChainingJobGraph(true);
final JobGraph unstablePartitioning = getStatefulParDoAfterCombineChainingJobGraph(false);
Assert.assertEquals(
1,
Iterables.size(unstablePartitioning.getVertices())
- Iterables.size(stablePartitioning.getVertices()));
}
@Test
public void testStatefulParDoAfterGroupByKeyChaining() {
final JobGraph stablePartitioning = getStatefulParDoAfterGroupByKeyChainingJobGraph(true);
final JobGraph unstablePartitioning = getStatefulParDoAfterGroupByKeyChainingJobGraph(false);
Assert.assertEquals(
1,
Iterables.size(unstablePartitioning.getVertices())
- Iterables.size(stablePartitioning.getVertices()));
}
private JobGraph getStatefulParDoAfterGroupByKeyChainingJobGraph(boolean stablePartitioning) {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final FlinkStreamingPipelineTranslator translator =
new FlinkStreamingPipelineTranslator(env, PipelineOptionsFactory.create());
final PipelineOptions pipelineOptions = PipelineOptionsFactory.create();
pipelineOptions.setRunner(FlinkRunner.class);
final Pipeline pipeline = Pipeline.create(pipelineOptions);
PCollection<KV<String, Iterable<Long>>> aggregate =
pipeline
.apply(
Create.of(KV.of("foo", 1L), KV.of("bar", 1L))
.withCoder(KvCoder.of(StringUtf8Coder.of(), VarLongCoder.of())))
.apply(GroupByKey.create());
if (!stablePartitioning) {
aggregate = aggregate.apply(ParDo.of(new StatelessIdentityDoFn<>()));
}
aggregate.apply(ParDo.of(new StatefulNoopDoFn<>()));
translator.translate(pipeline);
return env.getStreamGraph().getJobGraph();
}
private static class StatelessIdentityDoFn<KeyT, ValueT>
extends DoFn<KV<KeyT, ValueT>, KV<KeyT, ValueT>> {
@ProcessElement
public void processElement(ProcessContext ctx) {
ctx.output(ctx.element());
}
}
private static class StatefulNoopDoFn<KeyT, ValueT> extends DoFn<KV<KeyT, ValueT>, Void> {
@TimerId("my-timer")
private final TimerSpec myTimer = TimerSpecs.timer(TimeDomain.EVENT_TIME);
@ProcessElement
public void processElement() {
}
@OnTimer("my-timer")
public void onMyTimer() {
}
}
}
|
class FlinkStreamingPipelineTranslatorTest {
@Test
public void testAutoBalanceShardKeyResolvesMaxParallelism() {
int parallelism = 3;
assertThat(
new FlinkAutoBalancedShardKeyShardingFunction<>(parallelism, -1, StringUtf8Coder.of())
.getMaxParallelism(),
equalTo(KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism)));
assertThat(
new FlinkAutoBalancedShardKeyShardingFunction<>(parallelism, 0, StringUtf8Coder.of())
.getMaxParallelism(),
equalTo(KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism)));
}
@Test
public void testAutoBalanceShardKeyCacheIsNotSerialized() throws Exception {
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(2, 2, StringUtf8Coder.of());
assertNull(fn.getCache());
fn.assignShardKey("target/destination1", "one", 10);
fn.assignShardKey("target/destination2", "two", 10);
assertThat(fn.getCache().size(), equalTo(2));
assertThat(SerializableUtils.clone(fn).getCache(), nullValue());
}
@Test
public void testAutoBalanceShardKeyCacheIsStable() throws Exception {
int numShards = 50;
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(
numShards / 2, numShards * 2, StringUtf8Coder.of());
List<KV<String, String>> inputs = Lists.newArrayList();
for (int i = 0; i < numShards * 100; i++) {
inputs.add(KV.of("target/destination/1", UUID.randomUUID().toString()));
inputs.add(KV.of("target/destination/2", UUID.randomUUID().toString()));
inputs.add(KV.of("target/destination/3", UUID.randomUUID().toString()));
}
Map<KV<String, Integer>, ShardedKey<Integer>> generatedKeys = new HashMap<>();
for (KV<String, String> input : inputs) {
ShardedKey<Integer> shardKey = fn.assignShardKey(input.getKey(), input.getValue(), numShards);
generatedKeys.put(KV.of(input.getKey(), shardKey.getShardNumber()), shardKey);
}
fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(
numShards / 2, numShards * 2, StringUtf8Coder.of());
Collections.shuffle(inputs);
for (KV<String, String> input : inputs) {
ShardedKey<Integer> shardKey = fn.assignShardKey(input.getKey(), input.getValue(), numShards);
ShardedKey<Integer> expectedShardKey =
generatedKeys.get(KV.of(input.getKey(), shardKey.getShardNumber()));
if (expectedShardKey != null) {
assertThat(shardKey, equalTo(expectedShardKey));
}
}
}
@Test
public void testAutoBalanceShardKeyCacheMaxSize() throws Exception {
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(2, 2, StringUtf8Coder.of());
for (int i = 0; i < FlinkAutoBalancedShardKeyShardingFunction.CACHE_MAX_SIZE * 2; i++) {
fn.assignShardKey(UUID.randomUUID().toString(), "one", 2);
}
assertThat(
fn.getCache().size(), equalTo(FlinkAutoBalancedShardKeyShardingFunction.CACHE_MAX_SIZE));
}
@Test
public void testStatefulParDoAfterCombineChaining() {
final JobGraph stablePartitioning = getStatefulParDoAfterCombineChainingJobGraph(true);
final JobGraph unstablePartitioning = getStatefulParDoAfterCombineChainingJobGraph(false);
Assert.assertEquals(
1,
Iterables.size(unstablePartitioning.getVertices())
- Iterables.size(stablePartitioning.getVertices()));
}
@Test
public void testStatefulParDoAfterGroupByKeyChaining() {
final JobGraph stablePartitioning = getStatefulParDoAfterGroupByKeyChainingJobGraph(true);
final JobGraph unstablePartitioning = getStatefulParDoAfterGroupByKeyChainingJobGraph(false);
Assert.assertEquals(
1,
Iterables.size(unstablePartitioning.getVertices())
- Iterables.size(stablePartitioning.getVertices()));
}
private JobGraph getStatefulParDoAfterGroupByKeyChainingJobGraph(boolean stablePartitioning) {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final FlinkStreamingPipelineTranslator translator =
new FlinkStreamingPipelineTranslator(env, PipelineOptionsFactory.create());
final PipelineOptions pipelineOptions = PipelineOptionsFactory.create();
pipelineOptions.setRunner(FlinkRunner.class);
final Pipeline pipeline = Pipeline.create(pipelineOptions);
PCollection<KV<String, Iterable<Long>>> aggregate =
pipeline
.apply(
Create.of(KV.of("foo", 1L), KV.of("bar", 1L))
.withCoder(KvCoder.of(StringUtf8Coder.of(), VarLongCoder.of())))
.apply(GroupByKey.create());
if (!stablePartitioning) {
aggregate = aggregate.apply(ParDo.of(new StatelessIdentityDoFn<>()));
}
aggregate.apply(ParDo.of(new StatefulNoopDoFn<>()));
translator.translate(pipeline);
return env.getStreamGraph().getJobGraph();
}
private static class StatelessIdentityDoFn<KeyT, ValueT>
extends DoFn<KV<KeyT, ValueT>, KV<KeyT, ValueT>> {
@ProcessElement
public void processElement(ProcessContext ctx) {
ctx.output(ctx.element());
}
}
private static class StatefulNoopDoFn<KeyT, ValueT> extends DoFn<KV<KeyT, ValueT>, Void> {
@TimerId("my-timer")
private final TimerSpec myTimer = TimerSpecs.timer(TimeDomain.EVENT_TIME);
@ProcessElement
public void processElement() {
}
@OnTimer("my-timer")
public void onMyTimer() {
}
}
}
|
This won't be the correct error when the constructed record is also open, right? E.g., the example you raised ```ballerina type Student record {| string name; int age; int...; |}; type Employee record { string name; int age; }; public function main() { Employee emp = {name: "A", age: 1}; Student _ = {...emp}; } ```
|
private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType, AnalyzerData data) {
BType fieldType = symTable.semanticError;
boolean keyValueField = field.isKeyValueField();
boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP;
boolean readOnlyConstructorField = false;
String fieldName = null;
Location pos = null;
BLangExpression valueExpr = null;
if (keyValueField) {
valueExpr = ((BLangRecordKeyValueField) field).valueExpr;
} else if (!spreadOpField) {
valueExpr = (BLangRecordVarNameField) field;
}
boolean isOptional = false;
switch (mappingType.tag) {
case TypeTags.RECORD:
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(key.expr, key.computedKey,
(BRecordType) mappingType, data);
BVarSymbol fieldSymbol = typeSymbolPair.fieldSymbol;
if (fieldSymbol != null && Symbols.isOptional(fieldSymbol)) {
isOptional = true;
}
fieldType = typeSymbolPair.determinedType;
key.fieldSymbol = fieldSymbol;
readOnlyConstructorField = keyValField.readonly;
pos = key.expr.pos;
fieldName = getKeyValueFieldName(keyValField);
} else if (spreadOpField) {
BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
checkExpr(spreadExpr, data);
BRecordType mappingRecordType = (BRecordType) mappingType;
BType spreadExprType = Types.getImpliedType(spreadExpr.getBType());
if (spreadExprType.tag == TypeTags.MAP) {
return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint,
getAllFieldType(mappingRecordType),
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
if (spreadExprType.tag != TypeTags.RECORD) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadExprType);
return symTable.semanticError;
}
BRecordType spreadRecordType = (BRecordType) spreadExprType;
boolean errored = false;
for (BField bField : spreadRecordType.fields.values()) {
BType specFieldType = bField.type;
if (types.isNeverTypeOrStructureTypeWithARequiredNeverMember(specFieldType)) {
continue;
}
BSymbol fieldSymbol = symResolver.resolveStructField(spreadExpr.pos, data.env, bField.name,
mappingType.tsymbol);
BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, fieldSymbol, bField.name,
mappingRecordType);
if (expectedFieldType != symTable.semanticError &&
!types.isAssignable(specFieldType, expectedFieldType)) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD,
expectedFieldType, bField.name, specFieldType);
if (!errored) {
errored = true;
}
}
}
if (!spreadRecordType.sealed && (mappingRecordType.sealed ||
!types.isAssignable(spreadRecordType.restFieldType, mappingRecordType.restFieldType))) {
dlog.error(spreadExpr.pos,
DiagnosticErrorCode.INVALID_SPREAD_OP_TO_CREATE_CLOSED_RECORD_FROM_OPEN_RECORD,
spreadExprType);
errored = true;
}
return errored ? symTable.semanticError : symTable.noType;
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(varNameField, false,
(BRecordType) mappingType, data);
BVarSymbol fieldSymbol = typeSymbolPair.fieldSymbol;
if (fieldSymbol != null && Symbols.isOptional(fieldSymbol)) {
isOptional = true;
}
fieldType = typeSymbolPair.determinedType;
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
break;
case TypeTags.MAP:
if (spreadOpField) {
BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
BType spreadOpType = checkExpr(spreadExp, data);
BType spreadOpMemberType = checkSpreadFieldWithMapType(spreadOpType);
if (spreadOpMemberType.tag == symTable.semanticError.tag) {
dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadOpType);
return symTable.semanticError;
}
return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
boolean validMapKey;
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey, data);
readOnlyConstructorField = keyValField.readonly;
pos = key.pos;
fieldName = getKeyValueFieldName(keyValField);
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false, data);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError;
break;
}
if (readOnlyConstructorField) {
if (types.isSelectivelyImmutableType(fieldType, data.env.enclPkg.packageID)) {
fieldType =
ImmutableTypeCloner.getImmutableIntersectionType(pos, types, fieldType, data.env, symTable,
anonymousModelHelper, names, new HashSet<>());
} else if (!types.isInherentlyImmutableType(fieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType);
fieldType = symTable.semanticError;
}
}
if (spreadOpField) {
valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
}
BLangExpression exprToCheck = valueExpr;
if (data.commonAnalyzerData.nonErrorLoggingCheck) {
exprToCheck = nodeCloner.cloneNode(valueExpr);
} else {
((BLangNode) field).setBType(fieldType);
}
return checkExpr(exprToCheck, data.env,
isOptional ? types.addNilForNillableAccessType(fieldType) : fieldType, data);
}
|
errored = true;
|
private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType, AnalyzerData data) {
BType fieldType = symTable.semanticError;
boolean keyValueField = field.isKeyValueField();
boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP;
boolean readOnlyConstructorField = false;
String fieldName = null;
Location pos = null;
BLangExpression valueExpr = null;
if (keyValueField) {
valueExpr = ((BLangRecordKeyValueField) field).valueExpr;
} else if (!spreadOpField) {
valueExpr = (BLangRecordVarNameField) field;
}
boolean isOptional = false;
switch (mappingType.tag) {
case TypeTags.RECORD:
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(key.expr, key.computedKey,
(BRecordType) mappingType, data);
BVarSymbol fieldSymbol = typeSymbolPair.fieldSymbol;
if (fieldSymbol != null && Symbols.isOptional(fieldSymbol)) {
isOptional = true;
}
fieldType = typeSymbolPair.determinedType;
key.fieldSymbol = fieldSymbol;
readOnlyConstructorField = keyValField.readonly;
pos = key.expr.pos;
fieldName = getKeyValueFieldName(keyValField);
} else if (spreadOpField) {
BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
checkExpr(spreadExpr, data);
BRecordType mappingRecordType = (BRecordType) mappingType;
BType spreadExprType = Types.getImpliedType(spreadExpr.getBType());
if (spreadExprType.tag == TypeTags.MAP) {
return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint,
getAllFieldType(mappingRecordType),
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
if (spreadExprType.tag != TypeTags.RECORD) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadExprType);
return symTable.semanticError;
}
BRecordType spreadRecordType = (BRecordType) spreadExprType;
boolean errored = false;
for (BField bField : spreadRecordType.fields.values()) {
BType specFieldType = bField.type;
if (types.isNeverTypeOrStructureTypeWithARequiredNeverMember(specFieldType)) {
continue;
}
BSymbol fieldSymbol = symResolver.resolveStructField(spreadExpr.pos, data.env, bField.name,
mappingType.tsymbol);
BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, fieldSymbol, bField.name,
mappingRecordType);
if (expectedFieldType != symTable.semanticError &&
!types.isAssignable(specFieldType, expectedFieldType)) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD,
expectedFieldType, bField.name, specFieldType);
if (!errored) {
errored = true;
}
}
}
if (!spreadRecordType.sealed) {
if (mappingRecordType.sealed) {
dlog.error(spreadExpr.pos,
DiagnosticErrorCode.INVALID_SPREAD_FIELD_TO_CREATE_CLOSED_RECORD_FROM_OPEN_RECORD,
spreadRecordType);
errored = true;
} else if (!types.isAssignable(spreadRecordType.restFieldType,
mappingRecordType.restFieldType)) {
dlog.error(spreadExpr.pos,
DiagnosticErrorCode.INVALID_SPREAD_FIELD_REST_FIELD_MISMATCH,
spreadRecordType, spreadRecordType.restFieldType,
mappingRecordType.restFieldType);
errored = true;
}
}
return errored ? symTable.semanticError : symTable.noType;
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(varNameField, false,
(BRecordType) mappingType, data);
BVarSymbol fieldSymbol = typeSymbolPair.fieldSymbol;
if (fieldSymbol != null && Symbols.isOptional(fieldSymbol)) {
isOptional = true;
}
fieldType = typeSymbolPair.determinedType;
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
break;
case TypeTags.MAP:
if (spreadOpField) {
BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
BType spreadOpType = checkExpr(spreadExp, data);
BType spreadOpMemberType = checkSpreadFieldWithMapType(spreadOpType);
if (spreadOpMemberType.tag == symTable.semanticError.tag) {
dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadOpType);
return symTable.semanticError;
}
return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
boolean validMapKey;
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey, data);
readOnlyConstructorField = keyValField.readonly;
pos = key.pos;
fieldName = getKeyValueFieldName(keyValField);
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false, data);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError;
break;
}
if (readOnlyConstructorField) {
if (types.isSelectivelyImmutableType(fieldType, data.env.enclPkg.packageID)) {
fieldType =
ImmutableTypeCloner.getImmutableIntersectionType(pos, types, fieldType, data.env, symTable,
anonymousModelHelper, names, new HashSet<>());
} else if (!types.isInherentlyImmutableType(fieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType);
fieldType = symTable.semanticError;
}
}
if (spreadOpField) {
valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
}
BLangExpression exprToCheck = valueExpr;
if (data.commonAnalyzerData.nonErrorLoggingCheck) {
exprToCheck = nodeCloner.cloneNode(valueExpr);
} else {
((BLangNode) field).setBType(fieldType);
}
return checkExpr(exprToCheck, data.env,
isOptional ? types.addNilForNillableAccessType(fieldType) : fieldType, data);
}
|
class InferredTupleDetails {
List<BType> fixedMemberTypes = new ArrayList<>();
List<BType> restMemberTypes = new ArrayList<>();
}
|
class InferredTupleDetails {
List<BType> fixedMemberTypes = new ArrayList<>();
List<BType> restMemberTypes = new ArrayList<>();
}
|
I think this is in-correct. Don't we want to use `getIdleTcpConnectionTimeout` ?
|
public Builder(ConnectionPolicy connectionPolicy) {
this.bufferPageSize = DEFAULT_OPTIONS.bufferPageSize;
this.connectionAcquisitionTimeout = DEFAULT_OPTIONS.connectionAcquisitionTimeout;
this.connectTimeout = connectionPolicy.getConnectTimeout();
this.idleChannelTimeout = connectionPolicy.getIdleConnectionTimeout();
this.idleEndpointTimeout = DEFAULT_OPTIONS.idleEndpointTimeout;
this.maxBufferCapacity = DEFAULT_OPTIONS.maxBufferCapacity;
this.maxChannelsPerEndpoint = connectionPolicy.getMaxConnectionsPerEndpoint();
this.maxRequestsPerChannel = connectionPolicy.getMaxRequestsPerConnection();
this.receiveHangDetectionTime = DEFAULT_OPTIONS.receiveHangDetectionTime;
this.requestExpiryInterval = DEFAULT_OPTIONS.requestExpiryInterval;
this.requestTimeout = connectionPolicy.getRequestTimeout();
this.requestTimerResolution = DEFAULT_OPTIONS.requestTimerResolution;
this.sendHangDetectionTime = DEFAULT_OPTIONS.sendHangDetectionTime;
this.shutdownTimeout = DEFAULT_OPTIONS.shutdownTimeout;
this.threadCount = DEFAULT_OPTIONS.threadCount;
this.userAgent = DEFAULT_OPTIONS.userAgent;
}
|
this.idleChannelTimeout = connectionPolicy.getIdleConnectionTimeout();
|
public Builder(ConnectionPolicy connectionPolicy) {
this.bufferPageSize = DEFAULT_OPTIONS.bufferPageSize;
this.connectionAcquisitionTimeout = DEFAULT_OPTIONS.connectionAcquisitionTimeout;
this.connectTimeout = connectionPolicy.getConnectTimeout();
this.idleChannelTimeout = connectionPolicy.getIdleTcpConnectionTimeout();
this.idleEndpointTimeout = DEFAULT_OPTIONS.idleEndpointTimeout;
this.maxBufferCapacity = DEFAULT_OPTIONS.maxBufferCapacity;
this.maxChannelsPerEndpoint = connectionPolicy.getMaxConnectionsPerEndpoint();
this.maxRequestsPerChannel = connectionPolicy.getMaxRequestsPerConnection();
this.receiveHangDetectionTime = DEFAULT_OPTIONS.receiveHangDetectionTime;
this.requestExpiryInterval = DEFAULT_OPTIONS.requestExpiryInterval;
this.requestTimeout = connectionPolicy.getRequestTimeout();
this.requestTimerResolution = DEFAULT_OPTIONS.requestTimerResolution;
this.sendHangDetectionTime = DEFAULT_OPTIONS.sendHangDetectionTime;
this.shutdownTimeout = DEFAULT_OPTIONS.shutdownTimeout;
this.threadCount = DEFAULT_OPTIONS.threadCount;
this.userAgent = DEFAULT_OPTIONS.userAgent;
}
|
class Builder {
private static final String DEFAULT_OPTIONS_PROPERTY_NAME = "azure.cosmos.directTcp.defaultOptions";
private static final Options DEFAULT_OPTIONS;
static {
Options options = null;
try {
final String string = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME);
if (string != null) {
try {
options = RntbdObjectMapper.readValue(string, Options.class);
} catch (IOException error) {
logger.error("failed to parse default Direct TCP options {} due to ", string, error);
}
}
if (options == null) {
final String path = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME + "File");
if (path != null) {
try {
options = RntbdObjectMapper.readValue(new File(path), Options.class);
} catch (IOException error) {
logger.error("failed to load default Direct TCP options from {} due to ", path, error);
}
}
}
if (options == null) {
final ClassLoader loader = RntbdTransportClient.class.getClassLoader();
final String name = DEFAULT_OPTIONS_PROPERTY_NAME + ".json";
try (InputStream stream = loader.getResourceAsStream(name)) {
if (stream != null) {
options = RntbdObjectMapper.readValue(stream, Options.class);
}
} catch (IOException error) {
logger.error("failed to load Direct TCP options from resource {} due to ", name, error);
}
}
} finally {
if (options == null) {
DEFAULT_OPTIONS = new Options(ConnectionPolicy.getDefaultPolicy());
} else {
logger.info("Updated default Direct TCP options from system property {}: {}",
DEFAULT_OPTIONS_PROPERTY_NAME,
options);
DEFAULT_OPTIONS = options;
}
}
}
private int bufferPageSize;
private Duration connectionAcquisitionTimeout;
private Duration connectTimeout;
private Duration idleChannelTimeout;
private Duration idleEndpointTimeout;
private int maxBufferCapacity;
private int maxChannelsPerEndpoint;
private int maxRequestsPerChannel;
private Duration receiveHangDetectionTime;
private Duration requestExpiryInterval;
private Duration requestTimeout;
private Duration requestTimerResolution;
private Duration sendHangDetectionTime;
private Duration shutdownTimeout;
private int threadCount;
private UserAgentContainer userAgent;
public Builder bufferPageSize(final int value) {
checkArgument(value >= 4096 && (value & (value - 1)) == 0,
"expected value to be a power of 2 >= 4096, not %s",
value);
this.bufferPageSize = value;
return this;
}
public Options build() {
checkState(this.bufferPageSize <= this.maxBufferCapacity,
"expected bufferPageSize (%s) <= maxBufferCapacity (%s)",
this.bufferPageSize,
this.maxBufferCapacity);
return new Options(this);
}
public Builder connectionAcquisitionTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.connectTimeout = value.compareTo(Duration.ZERO) < 0 ? Duration.ZERO : value;
return this;
}
public Builder connectionTimeout(final Duration value) {
checkArgument(value == null || value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.connectTimeout = value;
return this;
}
public Builder idleChannelTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.idleChannelTimeout = value;
return this;
}
public Builder idleEndpointTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.idleEndpointTimeout = value;
return this;
}
public Builder maxBufferCapacity(final int value) {
checkArgument(value > 0 && (value & (value - 1)) == 0,
"expected positive value, not %s",
value);
this.maxBufferCapacity = value;
return this;
}
public Builder maxChannelsPerEndpoint(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxChannelsPerEndpoint = value;
return this;
}
public Builder maxRequestsPerChannel(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxRequestsPerChannel = value;
return this;
}
public Builder receiveHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.receiveHangDetectionTime = value;
return this;
}
public Builder requestExpiryInterval(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestExpiryInterval = value;
return this;
}
public Builder requestTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimeout = value;
return this;
}
public Builder requestTimerResolution(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimerResolution = value;
return this;
}
public Builder sendHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.sendHangDetectionTime = value;
return this;
}
public Builder shutdownTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.shutdownTimeout = value;
return this;
}
public Builder threadCount(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.threadCount = value;
return this;
}
public Builder userAgent(final UserAgentContainer value) {
checkNotNull(value, "expected non-null value");
this.userAgent = value;
return this;
}
}
|
class Builder {
private static final String DEFAULT_OPTIONS_PROPERTY_NAME = "azure.cosmos.directTcp.defaultOptions";
private static final Options DEFAULT_OPTIONS;
static {
Options options = null;
try {
final String string = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME);
if (string != null) {
try {
options = RntbdObjectMapper.readValue(string, Options.class);
} catch (IOException error) {
logger.error("failed to parse default Direct TCP options {} due to ", string, error);
}
}
if (options == null) {
final String path = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME + "File");
if (path != null) {
try {
options = RntbdObjectMapper.readValue(new File(path), Options.class);
} catch (IOException error) {
logger.error("failed to load default Direct TCP options from {} due to ", path, error);
}
}
}
if (options == null) {
final ClassLoader loader = RntbdTransportClient.class.getClassLoader();
final String name = DEFAULT_OPTIONS_PROPERTY_NAME + ".json";
try (InputStream stream = loader.getResourceAsStream(name)) {
if (stream != null) {
options = RntbdObjectMapper.readValue(stream, Options.class);
}
} catch (IOException error) {
logger.error("failed to load Direct TCP options from resource {} due to ", name, error);
}
}
} finally {
if (options == null) {
DEFAULT_OPTIONS = new Options(ConnectionPolicy.getDefaultPolicy());
} else {
logger.info("Updated default Direct TCP options from system property {}: {}",
DEFAULT_OPTIONS_PROPERTY_NAME,
options);
DEFAULT_OPTIONS = options;
}
}
}
private int bufferPageSize;
private Duration connectionAcquisitionTimeout;
private Duration connectTimeout;
private Duration idleChannelTimeout;
private Duration idleEndpointTimeout;
private int maxBufferCapacity;
private int maxChannelsPerEndpoint;
private int maxRequestsPerChannel;
private Duration receiveHangDetectionTime;
private Duration requestExpiryInterval;
private Duration requestTimeout;
private Duration requestTimerResolution;
private Duration sendHangDetectionTime;
private Duration shutdownTimeout;
private int threadCount;
private UserAgentContainer userAgent;
public Builder bufferPageSize(final int value) {
checkArgument(value >= 4096 && (value & (value - 1)) == 0,
"expected value to be a power of 2 >= 4096, not %s",
value);
this.bufferPageSize = value;
return this;
}
public Options build() {
checkState(this.bufferPageSize <= this.maxBufferCapacity,
"expected bufferPageSize (%s) <= maxBufferCapacity (%s)",
this.bufferPageSize,
this.maxBufferCapacity);
return new Options(this);
}
public Builder connectionAcquisitionTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.connectTimeout = value.compareTo(Duration.ZERO) < 0 ? Duration.ZERO : value;
return this;
}
public Builder connectionTimeout(final Duration value) {
checkArgument(value == null || value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.connectTimeout = value;
return this;
}
public Builder idleChannelTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.idleChannelTimeout = value;
return this;
}
public Builder idleEndpointTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.idleEndpointTimeout = value;
return this;
}
public Builder maxBufferCapacity(final int value) {
checkArgument(value > 0 && (value & (value - 1)) == 0,
"expected positive value, not %s",
value);
this.maxBufferCapacity = value;
return this;
}
public Builder maxChannelsPerEndpoint(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxChannelsPerEndpoint = value;
return this;
}
public Builder maxRequestsPerChannel(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxRequestsPerChannel = value;
return this;
}
public Builder receiveHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.receiveHangDetectionTime = value;
return this;
}
public Builder requestExpiryInterval(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestExpiryInterval = value;
return this;
}
public Builder requestTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimeout = value;
return this;
}
public Builder requestTimerResolution(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimerResolution = value;
return this;
}
public Builder sendHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.sendHangDetectionTime = value;
return this;
}
public Builder shutdownTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.shutdownTimeout = value;
return this;
}
public Builder threadCount(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.threadCount = value;
return this;
}
public Builder userAgent(final UserAgentContainer value) {
checkNotNull(value, "expected non-null value");
this.userAgent = value;
return this;
}
}
|
Nevermind, I see it just continued the trend from before.
|
public static void main(Function<String, String> environmentVarGetter) throws Exception {
JvmInitializers.runOnStartup();
System.out.format("SDK Fn Harness started%n");
System.out.format("Harness ID %s%n", environmentVarGetter.apply(HARNESS_ID));
System.out.format(
"Logging location %s%n", environmentVarGetter.apply(LOGGING_API_SERVICE_DESCRIPTOR));
System.out.format(
"Control location %s%n", environmentVarGetter.apply(CONTROL_API_SERVICE_DESCRIPTOR));
System.out.format(
"Status location %s%n", environmentVarGetter.apply(STATUS_API_SERVICE_DESCRIPTOR));
String id = environmentVarGetter.apply(HARNESS_ID);
String pipelineOptionsJson = environmentVarGetter.apply(PIPELINE_OPTIONS);
try {
String pipelineOptionsPath = environmentVarGetter.apply(PIPELINE_OPTIONS_FILE);
System.out.format("Pipeline Options File %s%n", pipelineOptionsPath);
if (pipelineOptionsPath != null) {
Path filePath = Paths.get(pipelineOptionsPath);
if (Files.exists(filePath)) {
System.out.format(
"Pipeline Options File %s exists. Overriding existing options.%n",
pipelineOptionsPath);
pipelineOptionsJson = new String(Files.readAllBytes(filePath), StandardCharsets.UTF_8);
}
}
} catch (Exception e) {
System.out.format("Problem loading pipeline options from file: %s%n", e.getMessage());
}
System.out.format("Pipeline options %s%n", pipelineOptionsJson);
PipelineOptions options = PipelineOptionsTranslation.fromJson(pipelineOptionsJson);
Endpoints.ApiServiceDescriptor loggingApiServiceDescriptor =
getApiServiceDescriptor(environmentVarGetter.apply(LOGGING_API_SERVICE_DESCRIPTOR));
Endpoints.ApiServiceDescriptor controlApiServiceDescriptor =
getApiServiceDescriptor(environmentVarGetter.apply(CONTROL_API_SERVICE_DESCRIPTOR));
Endpoints.ApiServiceDescriptor statusApiServiceDescriptor =
environmentVarGetter.apply(STATUS_API_SERVICE_DESCRIPTOR) == null
? null
: getApiServiceDescriptor(environmentVarGetter.apply(STATUS_API_SERVICE_DESCRIPTOR));
String runnerCapabilitesOrNull = environmentVarGetter.apply(RUNNER_CAPABILITIES);
Set<String> runnerCapabilites =
runnerCapabilitesOrNull == null
? Collections.emptySet()
: ImmutableSet.copyOf(runnerCapabilitesOrNull.split("\\s+"));
main(
id,
options,
runnerCapabilites,
loggingApiServiceDescriptor,
controlApiServiceDescriptor,
statusApiServiceDescriptor);
}
|
System.out.format("Pipeline options %s%n", pipelineOptionsJson);
|
public static void main(Function<String, String> environmentVarGetter) throws Exception {
JvmInitializers.runOnStartup();
System.out.format("SDK Fn Harness started%n");
System.out.format("Harness ID %s%n", environmentVarGetter.apply(HARNESS_ID));
System.out.format(
"Logging location %s%n", environmentVarGetter.apply(LOGGING_API_SERVICE_DESCRIPTOR));
System.out.format(
"Control location %s%n", environmentVarGetter.apply(CONTROL_API_SERVICE_DESCRIPTOR));
System.out.format(
"Status location %s%n", environmentVarGetter.apply(STATUS_API_SERVICE_DESCRIPTOR));
String id = environmentVarGetter.apply(HARNESS_ID);
String pipelineOptionsJson = environmentVarGetter.apply(PIPELINE_OPTIONS);
try {
String pipelineOptionsPath = environmentVarGetter.apply(PIPELINE_OPTIONS_FILE);
System.out.format("Pipeline Options File %s%n", pipelineOptionsPath);
if (pipelineOptionsPath != null) {
Path filePath = Paths.get(pipelineOptionsPath);
if (Files.exists(filePath)) {
System.out.format(
"Pipeline Options File %s exists. Overriding existing options.%n",
pipelineOptionsPath);
pipelineOptionsJson = new String(Files.readAllBytes(filePath), StandardCharsets.UTF_8);
}
}
} catch (Exception e) {
System.out.format("Problem loading pipeline options from file: %s%n", e.getMessage());
}
System.out.format("Pipeline options %s%n", pipelineOptionsJson);
PipelineOptions options = PipelineOptionsTranslation.fromJson(pipelineOptionsJson);
Endpoints.ApiServiceDescriptor loggingApiServiceDescriptor =
getApiServiceDescriptor(environmentVarGetter.apply(LOGGING_API_SERVICE_DESCRIPTOR));
Endpoints.ApiServiceDescriptor controlApiServiceDescriptor =
getApiServiceDescriptor(environmentVarGetter.apply(CONTROL_API_SERVICE_DESCRIPTOR));
Endpoints.ApiServiceDescriptor statusApiServiceDescriptor =
environmentVarGetter.apply(STATUS_API_SERVICE_DESCRIPTOR) == null
? null
: getApiServiceDescriptor(environmentVarGetter.apply(STATUS_API_SERVICE_DESCRIPTOR));
String runnerCapabilitesOrNull = environmentVarGetter.apply(RUNNER_CAPABILITIES);
Set<String> runnerCapabilites =
runnerCapabilitesOrNull == null
? Collections.emptySet()
: ImmutableSet.copyOf(runnerCapabilitesOrNull.split("\\s+"));
main(
id,
options,
runnerCapabilites,
loggingApiServiceDescriptor,
controlApiServiceDescriptor,
statusApiServiceDescriptor);
}
|
class FnHarness {
private static final String HARNESS_ID = "HARNESS_ID";
private static final String CONTROL_API_SERVICE_DESCRIPTOR = "CONTROL_API_SERVICE_DESCRIPTOR";
private static final String LOGGING_API_SERVICE_DESCRIPTOR = "LOGGING_API_SERVICE_DESCRIPTOR";
private static final String STATUS_API_SERVICE_DESCRIPTOR = "STATUS_API_SERVICE_DESCRIPTOR";
private static final String PIPELINE_OPTIONS_FILE = "PIPELINE_OPTIONS_FILE";
private static final String PIPELINE_OPTIONS = "PIPELINE_OPTIONS";
private static final String RUNNER_CAPABILITIES = "RUNNER_CAPABILITIES";
private static final String ENABLE_DATA_SAMPLING_EXPERIMENT = "enable_data_sampling";
private static final Logger LOG = LoggerFactory.getLogger(FnHarness.class);
private static Endpoints.ApiServiceDescriptor getApiServiceDescriptor(String descriptor)
throws TextFormat.ParseException {
Endpoints.ApiServiceDescriptor.Builder apiServiceDescriptorBuilder =
Endpoints.ApiServiceDescriptor.newBuilder();
TextFormat.merge(descriptor, apiServiceDescriptorBuilder);
return apiServiceDescriptorBuilder.build();
}
public static void main(String[] args) throws Exception {
main(System::getenv);
}
@VisibleForTesting
/**
* Run a FnHarness with the given id and options that attaches to the specified logging and
* control API service descriptors.
*
* @param id Harness ID
* @param options The options for this pipeline
* @param runnerCapabilites
* @param loggingApiServiceDescriptor
* @param controlApiServiceDescriptor
* @param statusApiServiceDescriptor
* @throws Exception
*/
public static void main(
String id,
PipelineOptions options,
Set<String> runnerCapabilites,
Endpoints.ApiServiceDescriptor loggingApiServiceDescriptor,
Endpoints.ApiServiceDescriptor controlApiServiceDescriptor,
@Nullable Endpoints.ApiServiceDescriptor statusApiServiceDescriptor)
throws Exception {
ManagedChannelFactory channelFactory;
if (ExperimentalOptions.hasExperiment(options, "beam_fn_api_epoll")) {
channelFactory = ManagedChannelFactory.createEpoll();
} else {
channelFactory = ManagedChannelFactory.createDefault();
}
OutboundObserverFactory outboundObserverFactory =
HarnessStreamObserverFactories.fromOptions(options);
main(
id,
options,
runnerCapabilites,
loggingApiServiceDescriptor,
controlApiServiceDescriptor,
statusApiServiceDescriptor,
channelFactory,
outboundObserverFactory,
Caches.fromOptions(options));
}
/**
* Run a FnHarness with the given id and options that attaches to the specified logging and
* control API service descriptors using the given channel factory and outbound observer factory.
*
* @param id Harness ID
* @param options The options for this pipeline
* @param runnerCapabilites
* @param loggingApiServiceDescriptor
* @param controlApiServiceDescriptor
* @param statusApiServiceDescriptor
* @param channelFactory
* @param outboundObserverFactory
* @param processWideCache
* @throws Exception
*/
public static void main(
String id,
PipelineOptions options,
Set<String> runnerCapabilites,
Endpoints.ApiServiceDescriptor loggingApiServiceDescriptor,
Endpoints.ApiServiceDescriptor controlApiServiceDescriptor,
Endpoints.ApiServiceDescriptor statusApiServiceDescriptor,
ManagedChannelFactory channelFactory,
OutboundObserverFactory outboundObserverFactory,
Cache<Object, Object> processWideCache)
throws Exception {
channelFactory =
channelFactory.withInterceptors(ImmutableList.of(AddHarnessIdInterceptor.create(id)));
IdGenerator idGenerator = IdGenerators.decrementingLongs();
ShortIdMap metricsShortIds = new ShortIdMap();
ExecutorService executorService =
options.as(ExecutorOptions.class).getScheduledExecutorService();
ExecutionStateSampler executionStateSampler =
new ExecutionStateSampler(options, System::currentTimeMillis);
final DataSampler dataSampler = new DataSampler();
try (BeamFnLoggingClient logging =
BeamFnLoggingClient.createAndStart(
options, loggingApiServiceDescriptor, channelFactory::forDescriptor)) {
LOG.info("Fn Harness started");
FileSystems.setDefaultPipelineOptions(options);
EnumMap<
BeamFnApi.InstructionRequest.RequestCase,
ThrowingFunction<InstructionRequest, BeamFnApi.InstructionResponse.Builder>>
handlers = new EnumMap<>(BeamFnApi.InstructionRequest.RequestCase.class);
ManagedChannel channel = channelFactory.forDescriptor(controlApiServiceDescriptor);
BeamFnControlGrpc.BeamFnControlStub controlStub = BeamFnControlGrpc.newStub(channel);
BeamFnControlGrpc.BeamFnControlBlockingStub blockingControlStub =
BeamFnControlGrpc.newBlockingStub(channel);
BeamFnDataGrpcClient beamFnDataMultiplexer =
new BeamFnDataGrpcClient(options, channelFactory::forDescriptor, outboundObserverFactory);
BeamFnStateGrpcClientCache beamFnStateGrpcClientCache =
new BeamFnStateGrpcClientCache(idGenerator, channelFactory, outboundObserverFactory);
FinalizeBundleHandler finalizeBundleHandler = new FinalizeBundleHandler(executorService);
boolean shouldSample =
ExperimentalOptions.hasExperiment(options, ENABLE_DATA_SAMPLING_EXPERIMENT);
Function<String, BeamFnApi.ProcessBundleDescriptor> getProcessBundleDescriptor =
new Function<String, ProcessBundleDescriptor>() {
private static final String PROCESS_BUNDLE_DESCRIPTORS = "ProcessBundleDescriptors";
private final Cache<String, BeamFnApi.ProcessBundleDescriptor> cache =
Caches.subCache(processWideCache, PROCESS_BUNDLE_DESCRIPTORS);
@Override
public BeamFnApi.ProcessBundleDescriptor apply(String id) {
return cache.computeIfAbsent(id, this::loadDescriptor);
}
private BeamFnApi.ProcessBundleDescriptor loadDescriptor(String id) {
return blockingControlStub.getProcessBundleDescriptor(
BeamFnApi.GetProcessBundleDescriptorRequest.newBuilder()
.setProcessBundleDescriptorId(id)
.build());
}
};
MetricsEnvironment.setProcessWideContainer(MetricsContainerImpl.createProcessWideContainer());
ProcessBundleHandler processBundleHandler =
new ProcessBundleHandler(
options,
runnerCapabilites,
getProcessBundleDescriptor,
beamFnDataMultiplexer,
beamFnStateGrpcClientCache,
finalizeBundleHandler,
metricsShortIds,
executionStateSampler,
processWideCache,
shouldSample ? dataSampler : null);
logging.setProcessBundleHandler(processBundleHandler);
BeamFnStatusClient beamFnStatusClient = null;
if (statusApiServiceDescriptor != null) {
beamFnStatusClient =
new BeamFnStatusClient(
statusApiServiceDescriptor,
channelFactory::forDescriptor,
processBundleHandler.getBundleProcessorCache(),
options,
processWideCache);
}
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.REGISTER,
request ->
BeamFnApi.InstructionResponse.newBuilder()
.setRegister(BeamFnApi.RegisterResponse.getDefaultInstance()));
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.FINALIZE_BUNDLE,
finalizeBundleHandler::finalizeBundle);
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.PROCESS_BUNDLE,
processBundleHandler::processBundle);
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.PROCESS_BUNDLE_PROGRESS,
processBundleHandler::progress);
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.PROCESS_BUNDLE_SPLIT,
processBundleHandler::trySplit);
handlers.put(
InstructionRequest.RequestCase.MONITORING_INFOS,
request ->
BeamFnApi.InstructionResponse.newBuilder()
.setMonitoringInfos(
BeamFnApi.MonitoringInfosMetadataResponse.newBuilder()
.putAllMonitoringInfo(
metricsShortIds.get(
request.getMonitoringInfos().getMonitoringInfoIdList()))));
HarnessMonitoringInfosInstructionHandler processWideHandler =
new HarnessMonitoringInfosInstructionHandler(metricsShortIds);
handlers.put(
InstructionRequest.RequestCase.HARNESS_MONITORING_INFOS,
processWideHandler::harnessMonitoringInfos);
handlers.put(
InstructionRequest.RequestCase.SAMPLE_DATA, dataSampler::handleDataSampleRequest);
JvmInitializers.runBeforeProcessing(options);
LOG.info("Entering instruction processing loop");
BeamFnControlClient control =
new BeamFnControlClient(
controlStub.withExecutor(MoreExecutors.directExecutor()),
outboundObserverFactory,
executorService,
handlers);
CompletableFuture.anyOf(control.terminationFuture(), logging.terminationFuture()).get();
if (beamFnStatusClient != null) {
beamFnStatusClient.close();
}
processBundleHandler.shutdown();
} catch (Exception e) {
System.out.println("Shutting down harness due to exception: " + e.toString());
} finally {
System.out.println("Shutting SDK harness down.");
executionStateSampler.stop();
executorService.shutdown();
}
}
}
|
class FnHarness {
private static final String HARNESS_ID = "HARNESS_ID";
private static final String CONTROL_API_SERVICE_DESCRIPTOR = "CONTROL_API_SERVICE_DESCRIPTOR";
private static final String LOGGING_API_SERVICE_DESCRIPTOR = "LOGGING_API_SERVICE_DESCRIPTOR";
private static final String STATUS_API_SERVICE_DESCRIPTOR = "STATUS_API_SERVICE_DESCRIPTOR";
private static final String PIPELINE_OPTIONS_FILE = "PIPELINE_OPTIONS_FILE";
private static final String PIPELINE_OPTIONS = "PIPELINE_OPTIONS";
private static final String RUNNER_CAPABILITIES = "RUNNER_CAPABILITIES";
private static final String ENABLE_DATA_SAMPLING_EXPERIMENT = "enable_data_sampling";
private static final Logger LOG = LoggerFactory.getLogger(FnHarness.class);
private static Endpoints.ApiServiceDescriptor getApiServiceDescriptor(String descriptor)
throws TextFormat.ParseException {
Endpoints.ApiServiceDescriptor.Builder apiServiceDescriptorBuilder =
Endpoints.ApiServiceDescriptor.newBuilder();
TextFormat.merge(descriptor, apiServiceDescriptorBuilder);
return apiServiceDescriptorBuilder.build();
}
public static void main(String[] args) throws Exception {
main(System::getenv);
}
@VisibleForTesting
/**
* Run a FnHarness with the given id and options that attaches to the specified logging and
* control API service descriptors.
*
* @param id Harness ID
* @param options The options for this pipeline
* @param runnerCapabilites
* @param loggingApiServiceDescriptor
* @param controlApiServiceDescriptor
* @param statusApiServiceDescriptor
* @throws Exception
*/
public static void main(
String id,
PipelineOptions options,
Set<String> runnerCapabilites,
Endpoints.ApiServiceDescriptor loggingApiServiceDescriptor,
Endpoints.ApiServiceDescriptor controlApiServiceDescriptor,
@Nullable Endpoints.ApiServiceDescriptor statusApiServiceDescriptor)
throws Exception {
ManagedChannelFactory channelFactory;
if (ExperimentalOptions.hasExperiment(options, "beam_fn_api_epoll")) {
channelFactory = ManagedChannelFactory.createEpoll();
} else {
channelFactory = ManagedChannelFactory.createDefault();
}
OutboundObserverFactory outboundObserverFactory =
HarnessStreamObserverFactories.fromOptions(options);
main(
id,
options,
runnerCapabilites,
loggingApiServiceDescriptor,
controlApiServiceDescriptor,
statusApiServiceDescriptor,
channelFactory,
outboundObserverFactory,
Caches.fromOptions(options));
}
/**
* Run a FnHarness with the given id and options that attaches to the specified logging and
* control API service descriptors using the given channel factory and outbound observer factory.
*
* @param id Harness ID
* @param options The options for this pipeline
* @param runnerCapabilites
* @param loggingApiServiceDescriptor
* @param controlApiServiceDescriptor
* @param statusApiServiceDescriptor
* @param channelFactory
* @param outboundObserverFactory
* @param processWideCache
* @throws Exception
*/
public static void main(
String id,
PipelineOptions options,
Set<String> runnerCapabilites,
Endpoints.ApiServiceDescriptor loggingApiServiceDescriptor,
Endpoints.ApiServiceDescriptor controlApiServiceDescriptor,
Endpoints.ApiServiceDescriptor statusApiServiceDescriptor,
ManagedChannelFactory channelFactory,
OutboundObserverFactory outboundObserverFactory,
Cache<Object, Object> processWideCache)
throws Exception {
channelFactory =
channelFactory.withInterceptors(ImmutableList.of(AddHarnessIdInterceptor.create(id)));
IdGenerator idGenerator = IdGenerators.decrementingLongs();
ShortIdMap metricsShortIds = new ShortIdMap();
ExecutorService executorService =
options.as(ExecutorOptions.class).getScheduledExecutorService();
ExecutionStateSampler executionStateSampler =
new ExecutionStateSampler(options, System::currentTimeMillis);
final DataSampler dataSampler = new DataSampler();
try (BeamFnLoggingClient logging =
BeamFnLoggingClient.createAndStart(
options, loggingApiServiceDescriptor, channelFactory::forDescriptor)) {
LOG.info("Fn Harness started");
FileSystems.setDefaultPipelineOptions(options);
EnumMap<
BeamFnApi.InstructionRequest.RequestCase,
ThrowingFunction<InstructionRequest, BeamFnApi.InstructionResponse.Builder>>
handlers = new EnumMap<>(BeamFnApi.InstructionRequest.RequestCase.class);
ManagedChannel channel = channelFactory.forDescriptor(controlApiServiceDescriptor);
BeamFnControlGrpc.BeamFnControlStub controlStub = BeamFnControlGrpc.newStub(channel);
BeamFnControlGrpc.BeamFnControlBlockingStub blockingControlStub =
BeamFnControlGrpc.newBlockingStub(channel);
BeamFnDataGrpcClient beamFnDataMultiplexer =
new BeamFnDataGrpcClient(options, channelFactory::forDescriptor, outboundObserverFactory);
BeamFnStateGrpcClientCache beamFnStateGrpcClientCache =
new BeamFnStateGrpcClientCache(idGenerator, channelFactory, outboundObserverFactory);
FinalizeBundleHandler finalizeBundleHandler = new FinalizeBundleHandler(executorService);
boolean shouldSample =
ExperimentalOptions.hasExperiment(options, ENABLE_DATA_SAMPLING_EXPERIMENT);
Function<String, BeamFnApi.ProcessBundleDescriptor> getProcessBundleDescriptor =
new Function<String, ProcessBundleDescriptor>() {
private static final String PROCESS_BUNDLE_DESCRIPTORS = "ProcessBundleDescriptors";
private final Cache<String, BeamFnApi.ProcessBundleDescriptor> cache =
Caches.subCache(processWideCache, PROCESS_BUNDLE_DESCRIPTORS);
@Override
public BeamFnApi.ProcessBundleDescriptor apply(String id) {
return cache.computeIfAbsent(id, this::loadDescriptor);
}
private BeamFnApi.ProcessBundleDescriptor loadDescriptor(String id) {
return blockingControlStub.getProcessBundleDescriptor(
BeamFnApi.GetProcessBundleDescriptorRequest.newBuilder()
.setProcessBundleDescriptorId(id)
.build());
}
};
MetricsEnvironment.setProcessWideContainer(MetricsContainerImpl.createProcessWideContainer());
ProcessBundleHandler processBundleHandler =
new ProcessBundleHandler(
options,
runnerCapabilites,
getProcessBundleDescriptor,
beamFnDataMultiplexer,
beamFnStateGrpcClientCache,
finalizeBundleHandler,
metricsShortIds,
executionStateSampler,
processWideCache,
shouldSample ? dataSampler : null);
logging.setProcessBundleHandler(processBundleHandler);
BeamFnStatusClient beamFnStatusClient = null;
if (statusApiServiceDescriptor != null) {
beamFnStatusClient =
new BeamFnStatusClient(
statusApiServiceDescriptor,
channelFactory::forDescriptor,
processBundleHandler.getBundleProcessorCache(),
options,
processWideCache);
}
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.REGISTER,
request ->
BeamFnApi.InstructionResponse.newBuilder()
.setRegister(BeamFnApi.RegisterResponse.getDefaultInstance()));
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.FINALIZE_BUNDLE,
finalizeBundleHandler::finalizeBundle);
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.PROCESS_BUNDLE,
processBundleHandler::processBundle);
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.PROCESS_BUNDLE_PROGRESS,
processBundleHandler::progress);
handlers.put(
BeamFnApi.InstructionRequest.RequestCase.PROCESS_BUNDLE_SPLIT,
processBundleHandler::trySplit);
handlers.put(
InstructionRequest.RequestCase.MONITORING_INFOS,
request ->
BeamFnApi.InstructionResponse.newBuilder()
.setMonitoringInfos(
BeamFnApi.MonitoringInfosMetadataResponse.newBuilder()
.putAllMonitoringInfo(
metricsShortIds.get(
request.getMonitoringInfos().getMonitoringInfoIdList()))));
HarnessMonitoringInfosInstructionHandler processWideHandler =
new HarnessMonitoringInfosInstructionHandler(metricsShortIds);
handlers.put(
InstructionRequest.RequestCase.HARNESS_MONITORING_INFOS,
processWideHandler::harnessMonitoringInfos);
handlers.put(
InstructionRequest.RequestCase.SAMPLE_DATA, dataSampler::handleDataSampleRequest);
JvmInitializers.runBeforeProcessing(options);
LOG.info("Entering instruction processing loop");
BeamFnControlClient control =
new BeamFnControlClient(
controlStub.withExecutor(MoreExecutors.directExecutor()),
outboundObserverFactory,
executorService,
handlers);
CompletableFuture.anyOf(control.terminationFuture(), logging.terminationFuture()).get();
if (beamFnStatusClient != null) {
beamFnStatusClient.close();
}
processBundleHandler.shutdown();
} catch (Exception e) {
System.out.println("Shutting down harness due to exception: " + e.toString());
} finally {
System.out.println("Shutting SDK harness down.");
executionStateSampler.stop();
executorService.shutdown();
}
}
}
|
The only qualm I have with verifying the version is that it creates a dependency on a cluster being available. I had hoped to keep the ability (depending on explicitly setting `withBackendVersion`) for `DocToBulk` to be fully free from IO/dependency on an available cluster. I could do the checking in `ElasticsearchIO.Write`. Thoughts?
|
public boolean start() throws IOException {
restClient = source.spec.getConnectionConfiguration().createClient();
String query = source.spec.getQuery() != null ? source.spec.getQuery().get() : null;
if (query == null) {
query = "{\"query\": { \"match_all\": {} }}";
}
if ((source.backendVersion >= 5) && source.numSlices != null && source.numSlices > 1) {
String sliceQuery =
String.format("\"slice\": {\"id\": %s,\"max\": %s}", source.sliceId, source.numSlices);
query = query.replaceFirst("\\{", "{" + sliceQuery + ",");
}
String endPoint =
String.format(
"/%s/%s/_search",
source.spec.getConnectionConfiguration().getIndex(),
source.spec.getConnectionConfiguration().getType());
Map<String, String> params = new HashMap<>();
params.put("scroll", source.spec.getScrollKeepalive());
if (source.backendVersion == 2) {
params.put("size", String.valueOf(source.spec.getBatchSize()));
if (source.shardPreference != null) {
params.put("preference", "_shards:" + source.shardPreference);
}
}
HttpEntity queryEntity = new NStringEntity(query, ContentType.APPLICATION_JSON);
Request request = new Request("GET", endPoint);
request.addParameters(params);
request.setEntity(queryEntity);
Response response = restClient.performRequest(request);
JsonNode searchResult = parseResponse(response.getEntity());
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
private void updateScrollId(JsonNode searchResult) {
scrollId = searchResult.path("_scroll_id").asText();
}
@Override
public boolean advance() throws IOException {
if (batchIterator.hasNext()) {
current = batchIterator.next();
return true;
} else {
String requestBody =
String.format(
"{\"scroll\" : \"%s\",\"scroll_id\" : \"%s\"}",
source.spec.getScrollKeepalive(), scrollId);
HttpEntity scrollEntity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
Request request = new Request("GET", "/_search/scroll");
request.addParameters(Collections.emptyMap());
request.setEntity(scrollEntity);
Response response = restClient.performRequest(request);
JsonNode searchResult = parseResponse(response.getEntity());
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
}
private boolean readNextBatchAndReturnFirstDocument(JsonNode searchResult) {
JsonNode hits = searchResult.path("hits").path("hits");
if (hits.size() == 0) {
current = null;
batchIterator = null;
return false;
}
List<String> batch = new ArrayList<>();
boolean withMetadata = source.spec.isWithMetadata();
for (JsonNode hit : hits) {
if (withMetadata) {
batch.add(hit.toString());
} else {
String document = hit.path("_source").toString();
batch.add(document);
}
}
batchIterator = batch.listIterator();
current = batchIterator.next();
return true;
}
@Override
public String getCurrent() throws NoSuchElementException {
if (current == null) {
throw new NoSuchElementException();
}
return current;
}
@Override
public void close() throws IOException {
String requestBody = String.format("{\"scroll_id\" : [\"%s\"]}", scrollId);
HttpEntity entity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
try {
Request request = new Request("DELETE", "/_search/scroll");
request.addParameters(Collections.emptyMap());
request.setEntity(entity);
restClient.performRequest(request);
} finally {
if (restClient != null) {
restClient.close();
}
}
}
@Override
public BoundedSource<String> getCurrentSource() {
return source;
}
}
/**
* A POJO encapsulating a configuration for retry behavior when issuing requests to ES. A retry
* will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes first, for
* 429 TOO_MANY_REQUESTS error.
*/
@AutoValue
public abstract static class RetryConfiguration implements Serializable {
@VisibleForTesting
static final RetryPredicate DEFAULT_RETRY_PREDICATE = new DefaultRetryPredicate();
abstract int getMaxAttempts();
abstract Duration getMaxDuration();
abstract RetryPredicate getRetryPredicate();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract ElasticsearchIO.RetryConfiguration.Builder setMaxAttempts(int maxAttempts);
abstract ElasticsearchIO.RetryConfiguration.Builder setMaxDuration(Duration maxDuration);
abstract ElasticsearchIO.RetryConfiguration.Builder setRetryPredicate(
RetryPredicate retryPredicate);
abstract ElasticsearchIO.RetryConfiguration build();
}
/**
* Creates RetryConfiguration for {@link ElasticsearchIO} with provided maxAttempts,
* maxDurations and exponential backoff based retries.
*
* @param maxAttempts max number of attempts.
* @param maxDuration maximum duration for retries.
* @return {@link RetryConfiguration} object with provided settings.
*/
public static RetryConfiguration create(int maxAttempts, Duration maxDuration) {
checkArgument(maxAttempts > 0, "maxAttempts must be greater than 0");
checkArgument(
maxDuration != null && maxDuration.isLongerThan(Duration.ZERO),
"maxDuration must be greater than 0");
return new AutoValue_ElasticsearchIO_RetryConfiguration.Builder()
.setMaxAttempts(maxAttempts)
.setMaxDuration(maxDuration)
.setRetryPredicate(DEFAULT_RETRY_PREDICATE)
.build();
}
@VisibleForTesting
RetryConfiguration withRetryPredicate(RetryPredicate predicate) {
checkArgument(predicate != null, "predicate must be provided");
return builder().setRetryPredicate(predicate).build();
}
/**
* An interface used to control if we retry the Elasticsearch call when a {@link Response} is
* obtained. If {@link RetryPredicate
* the requests to the Elasticsearch server if the {@link RetryConfiguration} permits it.
*/
@FunctionalInterface
interface RetryPredicate extends Predicate<HttpEntity>, Serializable {}
/**
* This is the default predicate used to test if a failed ES operation should be retried. A
* retry will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes
* first, for TOO_MANY_REQUESTS(429) error.
*/
@VisibleForTesting
static class DefaultRetryPredicate implements RetryPredicate {
private int errorCode;
DefaultRetryPredicate(int code) {
this.errorCode = code;
}
DefaultRetryPredicate() {
this(429);
}
/** Returns true if the response has the error code for any mutation. */
private static boolean errorCodePresent(HttpEntity responseEntity, int errorCode) {
try {
JsonNode json = parseResponse(responseEntity);
if (json.path("errors").asBoolean()) {
for (JsonNode item : json.path("items")) {
if (item.findValue("status").asInt() == errorCode) {
return true;
}
}
}
} catch (IOException e) {
LOG.warn("Could not extract error codes from responseEntity {}", responseEntity);
}
return false;
}
@Override
public boolean test(HttpEntity responseEntity) {
return errorCodePresent(responseEntity, errorCode);
}
}
}
/** A {@link PTransform} converting docs to their Bulk API counterparts. */
@AutoValue
public abstract static class DocToBulk
extends PTransform<PCollection<String>, PCollection<String>> {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final int DEFAULT_RETRY_ON_CONFLICT = 5;
static {
SimpleModule module = new SimpleModule();
module.addSerializer(DocumentMetadata.class, new DocumentMetadataSerializer());
OBJECT_MAPPER.registerModule(module);
}
abstract @Nullable ConnectionConfiguration getConnectionConfiguration();
abstract Write.@Nullable FieldValueExtractFn getIdFn();
abstract Write.@Nullable FieldValueExtractFn getIndexFn();
abstract Write.@Nullable FieldValueExtractFn getRoutingFn();
abstract Write.@Nullable FieldValueExtractFn getTypeFn();
abstract Write.@Nullable FieldValueExtractFn getDocVersionFn();
abstract @Nullable String getDocVersionType();
abstract @Nullable String getUpsertScript();
abstract @Nullable Boolean getUsePartialUpdate();
abstract Write.@Nullable BooleanFieldValueExtractFn getIsDeleteFn();
abstract @Nullable Integer getBackendVersion();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setIdFn(Write.FieldValueExtractFn idFunction);
abstract Builder setIndexFn(Write.FieldValueExtractFn indexFn);
abstract Builder setRoutingFn(Write.FieldValueExtractFn routingFunction);
abstract Builder setTypeFn(Write.FieldValueExtractFn typeFn);
abstract Builder setDocVersionFn(Write.FieldValueExtractFn docVersionFn);
abstract Builder setDocVersionType(String docVersionType);
abstract Builder setIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn);
abstract Builder setUsePartialUpdate(Boolean usePartialUpdate);
abstract Builder setUpsertScript(String source);
abstract Builder setBackendVersion(Integer assumedBackendVersion);
abstract DocToBulk build();
}
/**
* Provide the Elasticsearch connection configuration object. Only required if
* withBackendVersion was not used i.e. getBackendVersion() returns null.
*
* @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object
* @return the {@link DocToBulk} with connection configuration set
*/
public DocToBulk withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a function to extract the id from the document. This id will be used as the document
* id in Elasticsearch. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param idFn to extract the document ID
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withIdFn(Write.FieldValueExtractFn idFn) {
checkArgument(idFn != null, "idFn must not be null");
return builder().setIdFn(idFn).build();
}
/**
* Provide a function to extract the target index from the document allowing for dynamic
* document routing. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param indexFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withIndexFn(Write.FieldValueExtractFn indexFn) {
checkArgument(indexFn != null, "indexFn must not be null");
return builder().setIndexFn(indexFn).build();
}
/**
* Provide a function to extract the target routing from the document allowing for dynamic
* document routing. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param routingFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withRoutingFn(Write.FieldValueExtractFn routingFn) {
checkArgument(routingFn != null, "routingFn must not be null");
return builder().setRoutingFn(routingFn).build();
}
/**
* Provide a function to extract the target type from the document allowing for dynamic document
* routing. Should the function throw an Exception then the batch will fail and the exception
* propagated. Users are encouraged to consider carefully if multipe types are a sensible model
* <a
* href="https:
* discussed in this blog</a>.
*
* @param typeFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withTypeFn(Write.FieldValueExtractFn typeFn) {
checkArgument(typeFn != null, "typeFn must not be null");
return builder().setTypeFn(typeFn).build();
}
/**
* Provide an instruction to control whether partial updates or inserts (default) are issued to
* Elasticsearch.
*
* @param usePartialUpdate set to true to issue partial updates
* @return the {@link DocToBulk} with the partial update control set
*/
public DocToBulk withUsePartialUpdate(boolean usePartialUpdate) {
return builder().setUsePartialUpdate(usePartialUpdate).build();
}
/**
* Whether to use scripted updates and what script to use.
*
* @param source set to the value of the script source, painless lang
* @return the {@link DocToBulk} with the scripted updates set
*/
public DocToBulk withUpsertScript(String source) {
return builder().setUsePartialUpdate(false).setUpsertScript(source).build();
}
/**
* Provide a function to extract the doc version from the document. This version number will be
* used as the document version in Elasticsearch. Should the function throw an Exception then
* the batch will fail and the exception propagated. Incompatible with update operations and
* should only be used with withUsePartialUpdate(false)
*
* @param docVersionFn to extract the document version
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withDocVersionFn(Write.FieldValueExtractFn docVersionFn) {
checkArgument(docVersionFn != null, "docVersionFn must not be null");
return builder().setDocVersionFn(docVersionFn).build();
}
/**
* Provide a function to extract the target operation either upsert or delete from the document
* fields allowing dynamic bulk operation decision. While using withIsDeleteFn, it should be
* taken care that the document's id extraction is defined using the withIdFn function or else
* IllegalArgumentException is thrown. Should the function throw an Exception then the batch
* will fail and the exception propagated.
*
* @param isDeleteFn set to true for deleting the specific document
* @return the {@link Write} with the function set
*/
public DocToBulk withIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn) {
checkArgument(isDeleteFn != null, "deleteFn is required");
return builder().setIsDeleteFn(isDeleteFn).build();
}
/**
* Provide a function to extract the doc version from the document. This version number will be
* used as the document version in Elasticsearch. Should the function throw an Exception then
* the batch will fail and the exception propagated. Incompatible with update operations and
* should only be used with withUsePartialUpdate(false)
*
* @param docVersionType the version type to use, one of {@value ElasticsearchIO
* @return the {@link DocToBulk} with the doc version type set
*/
public DocToBulk withDocVersionType(String docVersionType) {
checkArgument(
VERSION_TYPES.contains(docVersionType),
"docVersionType must be one of " + "%s",
String.join(", ", VERSION_TYPES));
return builder().setDocVersionType(docVersionType).build();
}
/**
* Use to set explicitly which version of Elasticsearch the destination cluster is running.
* Providing this hint means there is no need for setting {@link
* DocToBulk
*
* @param backendVersion the major version number of the version of Elasticsearch being run in
* the cluster where documents will be indexed.
* @return the {@link DocToBulk} with the Elasticsearch major version number set
*/
public DocToBulk withBackendVersion(int backendVersion) {
checkArgument(
VALID_CLUSTER_VERSIONS.contains(backendVersion),
"Backend version may only be one of " + "%s",
String.join(", ", VERSION_TYPES));
return builder().setBackendVersion(backendVersion).build();
}
@Override
public PCollection<String> expand(PCollection<String> docs) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
Integer backendVersion = getBackendVersion();
Write.FieldValueExtractFn idFn = getIdFn();
Write.BooleanFieldValueExtractFn isDeleteFn = getIsDeleteFn();
checkState(
(backendVersion != null || connectionConfiguration != null),
"withBackendVersion() or withConnectionConfiguration() is required");
checkArgument(
isDeleteFn == null || idFn != null,
"Id needs to be specified by withIdFn for delete operation");
return docs.apply(ParDo.of(new DocToBulkFn(this)));
}
private static class DocumentMetadata implements Serializable {
final String index;
final String type;
final String id;
final Integer retryOnConflict;
final String routing;
final Integer backendVersion;
final String version;
final String versionType;
DocumentMetadata(
String index,
String type,
String id,
Integer retryOnConflict,
String routing,
Integer backendVersion,
String version,
String versionType) {
this.index = index;
this.id = id;
this.type = type;
this.retryOnConflict = retryOnConflict;
this.routing = routing;
this.backendVersion = backendVersion;
this.version = version;
this.versionType = versionType;
}
}
private static class DocumentMetadataSerializer extends StdSerializer<DocumentMetadata> {
private DocumentMetadataSerializer() {
super(DocumentMetadata.class);
}
@Override
public void serialize(DocumentMetadata value, JsonGenerator gen, SerializerProvider provider)
throws IOException {
gen.writeStartObject();
if (value.index != null) {
gen.writeStringField("_index", value.index);
}
if (value.type != null) {
gen.writeStringField("_type", value.type);
}
if (value.id != null) {
gen.writeStringField("_id", value.id);
}
if (value.routing != null) {
gen.writeStringField("routing", value.routing);
}
if (value.retryOnConflict != null && value.backendVersion <= 6) {
gen.writeNumberField("_retry_on_conflict", value.retryOnConflict);
}
if (value.retryOnConflict != null && value.backendVersion >= 7) {
gen.writeNumberField("retry_on_conflict", value.retryOnConflict);
}
if (value.version != null) {
gen.writeStringField("version", value.version);
}
if (value.versionType != null) {
gen.writeStringField("version_type", value.versionType);
}
gen.writeEndObject();
}
}
@VisibleForTesting
static String createBulkApiEntity(DocToBulk spec, String document, int backendVersion)
throws IOException {
String documentMetadata = "{}";
boolean isDelete = false;
if (spec.getIndexFn() != null || spec.getTypeFn() != null || spec.getIdFn() != null) {
JsonNode parsedDocument = OBJECT_MAPPER.readTree(document);
documentMetadata = getDocumentMetadata(spec, parsedDocument, backendVersion);
if (spec.getIsDeleteFn() != null) {
isDelete = spec.getIsDeleteFn().apply(parsedDocument);
}
}
if (isDelete) {
return String.format("{ \"delete\" : %s }%n", documentMetadata);
} else {
if (spec.getUsePartialUpdate()) {
return String.format(
"{ \"update\" : %s }%n{ \"doc\" : %s, " + "\"doc_as_upsert\" : true }%n",
documentMetadata, document);
} else if (spec.getUpsertScript() != null) {
return String.format(
"{ \"update\" : %s }%n{ \"script\" : {\"source\": \"%s\", "
+ "\"params\": %s}, \"upsert\" : %s }%n",
documentMetadata, spec.getUpsertScript(), document, document);
} else {
return String.format("{ \"index\" : %s }%n%s%n", documentMetadata, document);
}
}
}
private static String lowerCaseOrNull(String input) {
return input == null ? null : input.toLowerCase();
}
/**
* Extracts the components that comprise the document address from the document using the {@link
* Write.FieldValueExtractFn} configured. This allows any or all of the index, type and document
* id to be controlled on a per document basis. If none are provided then an empty default of
* {@code {}} is returned. Sanitization of the index is performed, automatically lower-casing
* the value as required by Elasticsearch.
*
* @param parsedDocument the json from which the index, type and id may be extracted
* @return the document address as JSON or the default
* @throws IOException if the document cannot be parsed as JSON
*/
private static String getDocumentMetadata(
DocToBulk spec, JsonNode parsedDocument, int backendVersion) throws IOException {
DocumentMetadata metadata =
new DocumentMetadata(
spec.getIndexFn() != null
? lowerCaseOrNull(spec.getIndexFn().apply(parsedDocument))
: null,
spec.getTypeFn() != null ? spec.getTypeFn().apply(parsedDocument) : null,
spec.getIdFn() != null ? spec.getIdFn().apply(parsedDocument) : null,
(spec.getUsePartialUpdate()
|| (spec.getUpsertScript() != null && !spec.getUpsertScript().isEmpty()))
? DEFAULT_RETRY_ON_CONFLICT
: null,
spec.getRoutingFn() != null ? spec.getRoutingFn().apply(parsedDocument) : null,
backendVersion,
spec.getDocVersionFn() != null ? spec.getDocVersionFn().apply(parsedDocument) : null,
spec.getDocVersionType());
return OBJECT_MAPPER.writeValueAsString(metadata);
}
/** {@link DoFn} to for the {@link DocToBulk} transform. */
@VisibleForTesting
static class DocToBulkFn extends DoFn<String, String> {
private final DocToBulk spec;
private int backendVersion;
public DocToBulkFn(DocToBulk spec) {
this.spec = spec;
}
@Setup
public void setup() throws IOException {
ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration();
if (spec.getBackendVersion() == null) {
backendVersion = ElasticsearchIO.getBackendVersion(connectionConfiguration);
} else {
backendVersion = spec.getBackendVersion();
}
}
@ProcessElement
public void processElement(ProcessContext c) throws IOException {
c.output(createBulkApiEntity(spec, c.element(), backendVersion));
}
}
}
/**
* A {@link PTransform} convenience wrapper for doing both document to bulk API serialization as
* well as batching those Bulk API entities and writing them to an Elasticsearch cluster. This
* class is effectively a thin proxy for DocToBulk->BulkIO all-in-one for convenience and backward
* compatibility.
*/
@AutoValue
public abstract static class Write extends PTransform<PCollection<String>, PDone> {
public interface FieldValueExtractFn extends SerializableFunction<JsonNode, String> {}
public interface BooleanFieldValueExtractFn extends SerializableFunction<JsonNode, Boolean> {}
public abstract DocToBulk getDocToBulk();
public abstract BulkIO getBulkIO();
abstract Builder writeBuilder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setDocToBulk(DocToBulk docToBulk);
abstract Builder setBulkIO(BulkIO bulkIO);
abstract Write build();
}
/** Refer to {@link DocToBulk
public Write withIdFn(FieldValueExtractFn idFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withIdFn(idFn)).build();
}
/** Refer to {@link DocToBulk
public Write withIndexFn(FieldValueExtractFn indexFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withIndexFn(indexFn)).build();
}
/** Refer to {@link DocToBulk
public Write withRoutingFn(FieldValueExtractFn routingFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withRoutingFn(routingFn)).build();
}
/** Refer to {@link DocToBulk
public Write withTypeFn(FieldValueExtractFn typeFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withTypeFn(typeFn)).build();
}
/** Refer to {@link DocToBulk
public Write withDocVersionFn(FieldValueExtractFn docVersionFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withDocVersionFn(docVersionFn)).build();
}
/** Refer to {@link DocToBulk
public Write withDocVersionType(String docVersionType) {
return writeBuilder().setDocToBulk(getDocToBulk().withDocVersionType(docVersionType)).build();
}
/** Refer to {@link DocToBulk
public Write withUsePartialUpdate(boolean usePartialUpdate) {
return writeBuilder()
.setDocToBulk(getDocToBulk().withUsePartialUpdate(usePartialUpdate))
.build();
}
/** Refer to {@link DocToBulk
public Write withUpsertScript(String source) {
return writeBuilder().setDocToBulk(getDocToBulk().withUpsertScript(source)).build();
}
/** Refer to {@link DocToBulk
public Write withBackendVersion(int backendVersion) {
return writeBuilder().setDocToBulk(getDocToBulk().withBackendVersion(backendVersion)).build();
}
/** Refer to {@link DocToBulk
public Write withIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withIsDeleteFn(isDeleteFn)).build();
}
/** Refer to {@link BulkIO
public Write withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return writeBuilder()
.setDocToBulk(getDocToBulk().withConnectionConfiguration(connectionConfiguration))
.setBulkIO(getBulkIO().withConnectionConfiguration(connectionConfiguration))
.build();
}
/** Refer to {@link BulkIO
public Write withMaxBatchSize(long batchSize) {
return writeBuilder().setBulkIO(getBulkIO().withMaxBatchSize(batchSize)).build();
}
/** Refer to {@link BulkIO
public Write withMaxBatchSizeBytes(long batchSizeBytes) {
return writeBuilder().setBulkIO(getBulkIO().withMaxBatchSizeBytes(batchSizeBytes)).build();
}
/** Refer to {@link BulkIO
public Write withRetryConfiguration(RetryConfiguration retryConfiguration) {
return writeBuilder()
.setBulkIO(getBulkIO().withRetryConfiguration(retryConfiguration))
.build();
}
/** Refer to {@link BulkIO
public Write withIgnoreVersionConflicts(boolean ignoreVersionConflicts) {
return writeBuilder()
.setBulkIO(getBulkIO().withIgnoreVersionConflicts(ignoreVersionConflicts))
.build();
}
/** Refer to {@link BulkIO
public Write withUseStatefulBatches(boolean useStatefulBatches) {
return writeBuilder()
.setBulkIO(getBulkIO().withUseStatefulBatches(useStatefulBatches))
.build();
}
/** Refer to {@link BulkIO
public Write withMaxBufferingDuration(Duration maxBufferingDuration) {
return writeBuilder()
.setBulkIO(getBulkIO().withMaxBufferingDuration(maxBufferingDuration))
.build();
}
/** Refer to {@link BulkIO
public Write withMaxParallelRquestsPerWindow(int maxParallelRquestsPerWindow) {
return writeBuilder()
.setBulkIO(getBulkIO().withMaxParallelRequestsPerWindow(maxParallelRquestsPerWindow))
.build();
}
/** Refer to {@link BulkIO
public Write withAllowableResponseErrors(@Nullable Set<String> allowableResponseErrors) {
if (allowableResponseErrors == null) {
allowableResponseErrors = new HashSet<>();
}
return writeBuilder()
.setBulkIO(getBulkIO().withAllowableResponseErrors(allowableResponseErrors))
.build();
}
@Override
public PDone expand(PCollection<String> input) {
input.apply(getDocToBulk()).apply(getBulkIO());
return PDone.in(input.getPipeline());
}
}
/** A {@link PTransform} writing data to Elasticsearch. */
@AutoValue
public abstract static class BulkIO extends PTransform<PCollection<String>, PDone> {
@VisibleForTesting
static final String RETRY_ATTEMPT_LOG = "Error writing to Elasticsearch. Retry attempt[%d]";
@VisibleForTesting
static final String RETRY_FAILED_LOG =
"Error writing to ES after %d attempt(s). No more attempts allowed";
abstract @Nullable ConnectionConfiguration getConnectionConfiguration();
abstract long getMaxBatchSize();
abstract long getMaxBatchSizeBytes();
abstract @Nullable Duration getMaxBufferingDuration();
abstract boolean getUseStatefulBatches();
abstract int getMaxParallelRequestsPerWindow();
abstract @Nullable RetryConfiguration getRetryConfiguration();
abstract @Nullable Set<String> getAllowedResponseErrors();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setMaxBatchSize(long maxBatchSize);
abstract Builder setMaxBatchSizeBytes(long maxBatchSizeBytes);
abstract Builder setRetryConfiguration(RetryConfiguration retryConfiguration);
abstract Builder setAllowedResponseErrors(Set<String> allowedResponseErrors);
abstract Builder setMaxBufferingDuration(Duration maxBufferingDuration);
abstract Builder setUseStatefulBatches(boolean useStatefulBatches);
abstract Builder setMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow);
abstract BulkIO build();
}
/**
* Provide the Elasticsearch connection configuration object.
*
* @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object
* @return the {@link BulkIO} with connection configuration set
*/
public BulkIO withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a maximum size in number of documents for the batch see bulk API
* (https:
* docs (like Elasticsearch bulk size advice). See
* https:
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSize maximum batch size in number of documents
* @return the {@link BulkIO} with connection batch size set
*/
public BulkIO withMaxBatchSize(long batchSize) {
checkArgument(batchSize > 0, "batchSize must be > 0, but was %s", batchSize);
return builder().setMaxBatchSize(batchSize).build();
}
/**
* Provide a maximum size in bytes for the batch see bulk API
* (https:
* (like Elasticsearch bulk size advice). See
* https:
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSizeBytes maximum batch size in bytes
* @return the {@link BulkIO} with connection batch size in bytes set
*/
public BulkIO withMaxBatchSizeBytes(long batchSizeBytes) {
checkArgument(batchSizeBytes > 0, "batchSizeBytes must be > 0, but was %s", batchSizeBytes);
return builder().setMaxBatchSizeBytes(batchSizeBytes).build();
}
/**
* Provides configuration to retry a failed batch call to Elasticsearch. A batch is considered
* as failed if the underlying {@link RestClient} surfaces 429 HTTP status code as error for one
* or more of the items in the {@link Response}. Users should consider that retrying might
* compound the underlying problem which caused the initial failure. Users should also be aware
* that once retrying is exhausted the error is surfaced to the runner which <em>may</em> then
* opt to retry the current bundle in entirety or abort if the max number of retries of the
* runner is completed. Retrying uses an exponential backoff algorithm, with minimum backoff of
* 5 seconds and then surfacing the error once the maximum number of retries or maximum
* configuration duration is exceeded.
*
* <p>Example use:
*
* <pre>{@code
* ElasticsearchIO.write()
* .withRetryConfiguration(ElasticsearchIO.RetryConfiguration.create(10, Duration.standardMinutes(3))
* ...
* }</pre>
*
* @param retryConfiguration the rules which govern the retry behavior
* @return the {@link BulkIO} with retrying configured
*/
public BulkIO withRetryConfiguration(RetryConfiguration retryConfiguration) {
checkArgument(retryConfiguration != null, "retryConfiguration is required");
return builder().setRetryConfiguration(retryConfiguration).build();
}
/**
* Whether or not to suppress version conflict errors in a Bulk API response. This can be useful
* if your use case involves using external version types.
*
* @param ignoreVersionConflicts true to suppress version conflicts, false to surface version
* conflict errors.
* @return the {@link BulkIO} with version conflict handling configured
*/
public BulkIO withIgnoreVersionConflicts(boolean ignoreVersionConflicts) {
Set<String> allowedResponseErrors = getAllowedResponseErrors();
if (allowedResponseErrors == null) {
allowedResponseErrors = new HashSet<>();
}
if (ignoreVersionConflicts) {
allowedResponseErrors.add(VERSION_CONFLICT_ERROR);
}
return builder().setAllowedResponseErrors(allowedResponseErrors).build();
}
/**
* Provide a set of textual error types which can be contained in Bulk API response
* items[].error.type field. Any element in @param allowableResponseErrorTypes will suppress
* errors of the same type in Bulk responses.
*
* <p>See also
* https:
*
* @param allowableResponseErrorTypes
* @return the {@link BulkIO} with allowable response errors set
*/
public BulkIO withAllowableResponseErrors(@Nullable Set<String> allowableResponseErrorTypes) {
if (allowableResponseErrorTypes == null) {
allowableResponseErrorTypes = new HashSet<>();
}
return builder().setAllowedResponseErrors(allowableResponseErrorTypes).build();
}
/**
* If using {@link BulkIO
* time before buffered elements are emitted to Elasticsearch as a Bulk API request. If this
* config is not set, Bulk requests will not be issued until {@link BulkIO
* number of documents have been buffered. This may result in higher latency in particular if
* your max batch size is set to a large value and your pipeline input is low volume.
*
* @param maxBufferingDuration the maximum duration to wait before sending any buffered
* documents to Elasticsearch, regardless of maxBatchSize.
* @return the {@link BulkIO} with maximum buffering duration set
*/
public BulkIO withMaxBufferingDuration(Duration maxBufferingDuration) {
LOG.warn(
"Use of withMaxBufferingDuration requires withUseStatefulBatches(true). "
+ "Setting that automatically.");
return builder()
.setUseStatefulBatches(true)
.setMaxBufferingDuration(maxBufferingDuration)
.build();
}
/**
* Whether or not to use Stateful Processing to ensure bulk requests have the desired number of
* entities i.e. as close to the maxBatchSize as possible. By default without this feature
* enabled, Bulk requests will not contain more than maxBatchSize entities, but the lower bound
* of batch size is determined by Beam Runner bundle sizes, which may be as few as 1.
*
* @param useStatefulBatches true enables the use of Stateful Processing to ensure that batches
* are as close to the maxBatchSize as possible.
* @return the {@link BulkIO} with Stateful Processing enabled or disabled
*/
public BulkIO withUseStatefulBatches(boolean useStatefulBatches) {
return builder().setUseStatefulBatches(useStatefulBatches).build();
}
/**
* When using {@link BulkIO
* batches are maintained per-key-per-window. If data is globally windowed and this
* configuration is set to 1, there will only ever be 1 request in flight. Having only a single
* request in flight can be beneficial for ensuring an Elasticsearch cluster is not overwhelmed
* by parallel requests, but may not work for all use cases. If this number is less than the
* number of maximum workers in your pipeline, the IO work may not be distributed across all
* workers.
*
* @param maxParallelRequestsPerWindow the maximum number of parallel bulk requests for a window
* of data
* @return the {@link BulkIO} with maximum parallel bulk requests per window set
*/
public BulkIO withMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow) {
checkArgument(
maxParallelRequestsPerWindow > 0, "parameter value must be positive " + "a integer");
return builder().setMaxParallelRequestsPerWindow(maxParallelRequestsPerWindow).build();
}
@Override
public PDone expand(PCollection<String> input) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
checkState(connectionConfiguration != null, "withConnectionConfiguration() is required");
if (getUseStatefulBatches()) {
GroupIntoBatches<Integer, String> groupIntoBatches =
GroupIntoBatches.ofSize(getMaxBatchSize());
if (getMaxBufferingDuration() != null) {
groupIntoBatches = groupIntoBatches.withMaxBufferingDuration(getMaxBufferingDuration());
}
input
.apply(ParDo.of(new AssignShardFn<>(getMaxParallelRequestsPerWindow())))
.apply(groupIntoBatches)
.apply(
"Remove key no longer needed",
MapElements.into(TypeDescriptors.iterables(TypeDescriptors.strings()))
.via(KV::getValue))
.apply(ParDo.of(new BulkIOFn(this)));
} else {
input
.apply(
"Make elements iterable",
MapElements.into(TypeDescriptors.iterables(TypeDescriptors.strings()))
.via(Collections::singletonList))
.apply(ParDo.of(new BulkIOFn(this)));
}
return PDone.in(input.getPipeline());
}
/** {@link DoFn} to for the {@link BulkIO} transform. */
@VisibleForTesting
static class BulkIOFn extends DoFn<Iterable<String>, Void> {
private static final Duration RETRY_INITIAL_BACKOFF = Duration.standardSeconds(5);
private transient FluentBackoff retryBackoff;
protected BulkIO spec;
private transient RestClient restClient;
protected ArrayList<String> batch;
long currentBatchSizeBytes;
@VisibleForTesting
BulkIOFn(BulkIO bulkSpec) {
this.spec = bulkSpec;
}
@Setup
public void setup() throws IOException {
ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration();
restClient = connectionConfiguration.createClient();
retryBackoff =
FluentBackoff.DEFAULT.withMaxRetries(0).withInitialBackoff(RETRY_INITIAL_BACKOFF);
if (spec.getRetryConfiguration() != null) {
retryBackoff =
FluentBackoff.DEFAULT
.withInitialBackoff(RETRY_INITIAL_BACKOFF)
.withMaxRetries(spec.getRetryConfiguration().getMaxAttempts() - 1)
.withMaxCumulativeBackoff(spec.getRetryConfiguration().getMaxDuration());
}
}
@StartBundle
public void startBundle(StartBundleContext context) {
batch = new ArrayList<>();
currentBatchSizeBytes = 0;
}
@FinishBundle
public void finishBundle(FinishBundleContext context)
throws IOException, InterruptedException {
flushBatch();
}
@ProcessElement
public void processElement(@Element @NonNull Iterable<String> bulkApiEntities)
throws Exception {
for (String bulkApiEntity : bulkApiEntities) {
addAndMaybeFlush(bulkApiEntity);
}
}
protected void addAndMaybeFlush(String bulkApiEntity)
throws IOException, InterruptedException {
batch.add(bulkApiEntity);
currentBatchSizeBytes += bulkApiEntity.getBytes(StandardCharsets.UTF_8).length;
if (batch.size() >= spec.getMaxBatchSize()
|| currentBatchSizeBytes >= spec.getMaxBatchSizeBytes()) {
flushBatch();
}
}
private void flushBatch() throws IOException, InterruptedException {
if (batch.isEmpty()) {
return;
}
LOG.info(
"ElasticsearchIO batch size: {}, batch size bytes: {}",
batch.size(),
currentBatchSizeBytes);
StringBuilder bulkRequest = new StringBuilder();
for (String json : batch) {
bulkRequest.append(json);
}
batch.clear();
currentBatchSizeBytes = 0L;
Response response = null;
HttpEntity responseEntity = null;
String endPoint = spec.getConnectionConfiguration().getBulkEndPoint();
HttpEntity requestBody =
new NStringEntity(bulkRequest.toString(), ContentType.APPLICATION_JSON);
try {
Request request = new Request("POST", endPoint);
request.addParameters(Collections.emptyMap());
request.setEntity(requestBody);
response = restClient.performRequest(request);
responseEntity = new BufferedHttpEntity(response.getEntity());
} catch (java.io.IOException ex) {
if (spec.getRetryConfiguration() == null) {
throw ex;
}
LOG.error("Caught ES timeout, retrying", ex);
}
if (spec.getRetryConfiguration() != null
&& (response == null
|| responseEntity == null
|| spec.getRetryConfiguration().getRetryPredicate().test(responseEntity))) {
if (responseEntity != null
&& spec.getRetryConfiguration().getRetryPredicate().test(responseEntity)) {
LOG.warn("ES Cluster is responding with HTP 429 - TOO_MANY_REQUESTS.");
}
responseEntity = handleRetry("POST", endPoint, Collections.emptyMap(), requestBody);
}
checkForErrors(responseEntity, spec.getAllowedResponseErrors());
}
/** retry request based on retry configuration policy. */
private HttpEntity handleRetry(
String method, String endpoint, Map<String, String> params, HttpEntity requestBody)
throws IOException, InterruptedException {
Response response;
HttpEntity responseEntity;
Sleeper sleeper = Sleeper.DEFAULT;
BackOff backoff = retryBackoff.backoff();
int attempt = 0;
while (BackOffUtils.next(sleeper, backoff)) {
LOG.warn(String.format(RETRY_ATTEMPT_LOG, ++attempt));
try {
Request request = new Request(method, endpoint);
request.addParameters(params);
request.setEntity(requestBody);
response = restClient.performRequest(request);
responseEntity = new BufferedHttpEntity(response.getEntity());
} catch (java.io.IOException ex) {
LOG.error("Caught ES timeout, retrying", ex);
continue;
}
if (!Objects.requireNonNull(spec.getRetryConfiguration())
.getRetryPredicate()
.test(responseEntity)) {
return responseEntity;
} else {
LOG.warn("ES Cluster is responding with HTP 429 - TOO_MANY_REQUESTS.");
}
}
throw new IOException(String.format(RETRY_FAILED_LOG, attempt));
}
@Teardown
public void closeClient() throws IOException {
if (restClient != null) {
restClient.close();
}
}
}
}
static int getBackendVersion(ConnectionConfiguration connectionConfiguration) {
try (RestClient restClient = connectionConfiguration.createClient()) {
Request request = new Request("GET", "");
Response response = restClient.performRequest(request);
JsonNode jsonNode = parseResponse(response.getEntity());
int backendVersion =
Integer.parseInt(jsonNode.path("version").path("number").asText().substring(0, 1));
checkArgument(
(VALID_CLUSTER_VERSIONS.contains(backendVersion)),
"The Elasticsearch version to connect to is %s.x. "
+ "This version of the ElasticsearchIO is only compatible with "
+ "Elasticsearch v7.x, v6.x, v5.x and v2.x",
backendVersion);
return backendVersion;
} catch (IOException e) {
throw new IllegalArgumentException("Cannot get Elasticsearch version", e);
}
}
}
|
}
|
public boolean start() throws IOException {
restClient = source.spec.getConnectionConfiguration().createClient();
String query = source.spec.getQuery() != null ? source.spec.getQuery().get() : null;
if (query == null) {
query = "{\"query\": { \"match_all\": {} }}";
}
if ((source.backendVersion >= 5) && source.numSlices != null && source.numSlices > 1) {
String sliceQuery =
String.format("\"slice\": {\"id\": %s,\"max\": %s}", source.sliceId, source.numSlices);
query = query.replaceFirst("\\{", "{" + sliceQuery + ",");
}
String endPoint =
String.format(
"/%s/%s/_search",
source.spec.getConnectionConfiguration().getIndex(),
source.spec.getConnectionConfiguration().getType());
Map<String, String> params = new HashMap<>();
params.put("scroll", source.spec.getScrollKeepalive());
if (source.backendVersion == 2) {
params.put("size", String.valueOf(source.spec.getBatchSize()));
if (source.shardPreference != null) {
params.put("preference", "_shards:" + source.shardPreference);
}
}
HttpEntity queryEntity = new NStringEntity(query, ContentType.APPLICATION_JSON);
Request request = new Request("GET", endPoint);
request.addParameters(params);
request.setEntity(queryEntity);
Response response = restClient.performRequest(request);
JsonNode searchResult = parseResponse(response.getEntity());
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
private void updateScrollId(JsonNode searchResult) {
scrollId = searchResult.path("_scroll_id").asText();
}
@Override
public boolean advance() throws IOException {
if (batchIterator.hasNext()) {
current = batchIterator.next();
return true;
} else {
String requestBody =
String.format(
"{\"scroll\" : \"%s\",\"scroll_id\" : \"%s\"}",
source.spec.getScrollKeepalive(), scrollId);
HttpEntity scrollEntity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
Request request = new Request("GET", "/_search/scroll");
request.addParameters(Collections.emptyMap());
request.setEntity(scrollEntity);
Response response = restClient.performRequest(request);
JsonNode searchResult = parseResponse(response.getEntity());
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
}
private boolean readNextBatchAndReturnFirstDocument(JsonNode searchResult) {
JsonNode hits = searchResult.path("hits").path("hits");
if (hits.size() == 0) {
current = null;
batchIterator = null;
return false;
}
List<String> batch = new ArrayList<>();
boolean withMetadata = source.spec.isWithMetadata();
for (JsonNode hit : hits) {
if (withMetadata) {
batch.add(hit.toString());
} else {
String document = hit.path("_source").toString();
batch.add(document);
}
}
batchIterator = batch.listIterator();
current = batchIterator.next();
return true;
}
@Override
public String getCurrent() throws NoSuchElementException {
if (current == null) {
throw new NoSuchElementException();
}
return current;
}
@Override
public void close() throws IOException {
String requestBody = String.format("{\"scroll_id\" : [\"%s\"]}", scrollId);
HttpEntity entity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
try {
Request request = new Request("DELETE", "/_search/scroll");
request.addParameters(Collections.emptyMap());
request.setEntity(entity);
restClient.performRequest(request);
} finally {
if (restClient != null) {
restClient.close();
}
}
}
@Override
public BoundedSource<String> getCurrentSource() {
return source;
}
}
/**
* A POJO encapsulating a configuration for retry behavior when issuing requests to ES. A retry
* will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes first, for
* 429 TOO_MANY_REQUESTS error.
*/
@AutoValue
public abstract static class RetryConfiguration implements Serializable {
@VisibleForTesting
static final RetryPredicate DEFAULT_RETRY_PREDICATE = new DefaultRetryPredicate();
abstract int getMaxAttempts();
abstract Duration getMaxDuration();
abstract RetryPredicate getRetryPredicate();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract ElasticsearchIO.RetryConfiguration.Builder setMaxAttempts(int maxAttempts);
abstract ElasticsearchIO.RetryConfiguration.Builder setMaxDuration(Duration maxDuration);
abstract ElasticsearchIO.RetryConfiguration.Builder setRetryPredicate(
RetryPredicate retryPredicate);
abstract ElasticsearchIO.RetryConfiguration build();
}
/**
* Creates RetryConfiguration for {@link ElasticsearchIO} with provided maxAttempts,
* maxDurations and exponential backoff based retries.
*
* @param maxAttempts max number of attempts.
* @param maxDuration maximum duration for retries.
* @return {@link RetryConfiguration} object with provided settings.
*/
public static RetryConfiguration create(int maxAttempts, Duration maxDuration) {
checkArgument(maxAttempts > 0, "maxAttempts must be greater than 0");
checkArgument(
maxDuration != null && maxDuration.isLongerThan(Duration.ZERO),
"maxDuration must be greater than 0");
return new AutoValue_ElasticsearchIO_RetryConfiguration.Builder()
.setMaxAttempts(maxAttempts)
.setMaxDuration(maxDuration)
.setRetryPredicate(DEFAULT_RETRY_PREDICATE)
.build();
}
@VisibleForTesting
RetryConfiguration withRetryPredicate(RetryPredicate predicate) {
checkArgument(predicate != null, "predicate must be provided");
return builder().setRetryPredicate(predicate).build();
}
/**
* An interface used to control if we retry the Elasticsearch call when a {@link Response} is
* obtained. If {@link RetryPredicate
* the requests to the Elasticsearch server if the {@link RetryConfiguration} permits it.
*/
@FunctionalInterface
interface RetryPredicate extends Predicate<HttpEntity>, Serializable {}
/**
* This is the default predicate used to test if a failed ES operation should be retried. A
* retry will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes
* first, for TOO_MANY_REQUESTS(429) error.
*/
@VisibleForTesting
static class DefaultRetryPredicate implements RetryPredicate {
private int errorCode;
DefaultRetryPredicate(int code) {
this.errorCode = code;
}
DefaultRetryPredicate() {
this(429);
}
/** Returns true if the response has the error code for any mutation. */
private static boolean errorCodePresent(HttpEntity responseEntity, int errorCode) {
try {
JsonNode json = parseResponse(responseEntity);
if (json.path("errors").asBoolean()) {
for (JsonNode item : json.path("items")) {
if (item.findValue("status").asInt() == errorCode) {
return true;
}
}
}
} catch (IOException e) {
LOG.warn("Could not extract error codes from responseEntity {}", responseEntity);
}
return false;
}
@Override
public boolean test(HttpEntity responseEntity) {
return errorCodePresent(responseEntity, errorCode);
}
}
}
/** A {@link PTransform} converting docs to their Bulk API counterparts. */
@AutoValue
public abstract static class DocToBulk
extends PTransform<PCollection<String>, PCollection<String>> {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final int DEFAULT_RETRY_ON_CONFLICT = 5;
static {
SimpleModule module = new SimpleModule();
module.addSerializer(DocumentMetadata.class, new DocumentMetadataSerializer());
OBJECT_MAPPER.registerModule(module);
}
abstract @Nullable ConnectionConfiguration getConnectionConfiguration();
abstract Write.@Nullable FieldValueExtractFn getIdFn();
abstract Write.@Nullable FieldValueExtractFn getIndexFn();
abstract Write.@Nullable FieldValueExtractFn getRoutingFn();
abstract Write.@Nullable FieldValueExtractFn getTypeFn();
abstract Write.@Nullable FieldValueExtractFn getDocVersionFn();
abstract @Nullable String getDocVersionType();
abstract @Nullable String getUpsertScript();
abstract @Nullable Boolean getUsePartialUpdate();
abstract Write.@Nullable BooleanFieldValueExtractFn getIsDeleteFn();
abstract @Nullable Integer getBackendVersion();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setIdFn(Write.FieldValueExtractFn idFunction);
abstract Builder setIndexFn(Write.FieldValueExtractFn indexFn);
abstract Builder setRoutingFn(Write.FieldValueExtractFn routingFunction);
abstract Builder setTypeFn(Write.FieldValueExtractFn typeFn);
abstract Builder setDocVersionFn(Write.FieldValueExtractFn docVersionFn);
abstract Builder setDocVersionType(String docVersionType);
abstract Builder setIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn);
abstract Builder setUsePartialUpdate(Boolean usePartialUpdate);
abstract Builder setUpsertScript(String source);
abstract Builder setBackendVersion(Integer assumedBackendVersion);
abstract DocToBulk build();
}
/**
* Provide the Elasticsearch connection configuration object. Only required if
* withBackendVersion was not used i.e. getBackendVersion() returns null.
*
* @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object
* @return the {@link DocToBulk} with connection configuration set
*/
public DocToBulk withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a function to extract the id from the document. This id will be used as the document
* id in Elasticsearch. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param idFn to extract the document ID
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withIdFn(Write.FieldValueExtractFn idFn) {
checkArgument(idFn != null, "idFn must not be null");
return builder().setIdFn(idFn).build();
}
/**
* Provide a function to extract the target index from the document allowing for dynamic
* document routing. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param indexFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withIndexFn(Write.FieldValueExtractFn indexFn) {
checkArgument(indexFn != null, "indexFn must not be null");
return builder().setIndexFn(indexFn).build();
}
/**
* Provide a function to extract the target routing from the document allowing for dynamic
* document routing. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param routingFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withRoutingFn(Write.FieldValueExtractFn routingFn) {
checkArgument(routingFn != null, "routingFn must not be null");
return builder().setRoutingFn(routingFn).build();
}
/**
* Provide a function to extract the target type from the document allowing for dynamic document
* routing. Should the function throw an Exception then the batch will fail and the exception
* propagated. Users are encouraged to consider carefully if multipe types are a sensible model
* <a
* href="https:
* discussed in this blog</a>.
*
* @param typeFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withTypeFn(Write.FieldValueExtractFn typeFn) {
checkArgument(typeFn != null, "typeFn must not be null");
return builder().setTypeFn(typeFn).build();
}
/**
* Provide an instruction to control whether partial updates or inserts (default) are issued to
* Elasticsearch.
*
* @param usePartialUpdate set to true to issue partial updates
* @return the {@link DocToBulk} with the partial update control set
*/
public DocToBulk withUsePartialUpdate(boolean usePartialUpdate) {
return builder().setUsePartialUpdate(usePartialUpdate).build();
}
/**
* Whether to use scripted updates and what script to use.
*
* @param source set to the value of the script source, painless lang
* @return the {@link DocToBulk} with the scripted updates set
*/
public DocToBulk withUpsertScript(String source) {
if (getBackendVersion() == null || getBackendVersion() == 2) {
LOG.warn("Painless scripts are not supported on Elasticsearch clusters before version 5.0");
}
return builder().setUsePartialUpdate(false).setUpsertScript(source).build();
}
/**
* Provide a function to extract the doc version from the document. This version number will be
* used as the document version in Elasticsearch. Should the function throw an Exception then
* the batch will fail and the exception propagated. Incompatible with update operations and
* should only be used with withUsePartialUpdate(false)
*
* @param docVersionFn to extract the document version
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withDocVersionFn(Write.FieldValueExtractFn docVersionFn) {
checkArgument(docVersionFn != null, "docVersionFn must not be null");
return builder().setDocVersionFn(docVersionFn).build();
}
/**
* Provide a function to extract the target operation either upsert or delete from the document
* fields allowing dynamic bulk operation decision. While using withIsDeleteFn, it should be
* taken care that the document's id extraction is defined using the withIdFn function or else
* IllegalArgumentException is thrown. Should the function throw an Exception then the batch
* will fail and the exception propagated.
*
* @param isDeleteFn set to true for deleting the specific document
* @return the {@link Write} with the function set
*/
public DocToBulk withIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn) {
checkArgument(isDeleteFn != null, "deleteFn is required");
return builder().setIsDeleteFn(isDeleteFn).build();
}
/**
* Provide a function to extract the doc version from the document. This version number will be
* used as the document version in Elasticsearch. Should the function throw an Exception then
* the batch will fail and the exception propagated. Incompatible with update operations and
* should only be used with withUsePartialUpdate(false)
*
* @param docVersionType the version type to use, one of {@value VERSION_TYPES}
* @return the {@link DocToBulk} with the doc version type set
*/
public DocToBulk withDocVersionType(String docVersionType) {
checkArgument(
VERSION_TYPES.contains(docVersionType),
"docVersionType must be one of " + "%s",
String.join(", ", VERSION_TYPES));
return builder().setDocVersionType(docVersionType).build();
}
/**
* Use to set explicitly which version of Elasticsearch the destination cluster is running.
* Providing this hint means there is no need for setting {@link
* DocToBulk
*
* <p>Note: if the value of @param backendVersion differs from the version the destination
* cluster is running, behavior is undefined and likely to yield errors.
*
* @param backendVersion the major version number of the version of Elasticsearch being run in
* the cluster where documents will be indexed.
* @return the {@link DocToBulk} with the Elasticsearch major version number set
*/
public DocToBulk withBackendVersion(int backendVersion) {
checkArgument(
VALID_CLUSTER_VERSIONS.contains(backendVersion),
"Backend version may only be one of " + "%s",
String.join(", ", VERSION_TYPES));
return builder().setBackendVersion(backendVersion).build();
}
@Override
public PCollection<String> expand(PCollection<String> docs) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
Integer backendVersion = getBackendVersion();
Write.FieldValueExtractFn idFn = getIdFn();
Write.BooleanFieldValueExtractFn isDeleteFn = getIsDeleteFn();
checkState(
(backendVersion != null || connectionConfiguration != null),
"withBackendVersion() or withConnectionConfiguration() is required");
checkArgument(
isDeleteFn == null || idFn != null,
"Id needs to be specified by withIdFn for delete operation");
return docs.apply(ParDo.of(new DocToBulkFn(this)));
}
private static class DocumentMetadata implements Serializable {
final String index;
final String type;
final String id;
final Integer retryOnConflict;
final String routing;
final Integer backendVersion;
final String version;
final String versionType;
DocumentMetadata(
String index,
String type,
String id,
Integer retryOnConflict,
String routing,
Integer backendVersion,
String version,
String versionType) {
this.index = index;
this.id = id;
this.type = type;
this.retryOnConflict = retryOnConflict;
this.routing = routing;
this.backendVersion = backendVersion;
this.version = version;
this.versionType = versionType;
}
}
private static class DocumentMetadataSerializer extends StdSerializer<DocumentMetadata> {
private DocumentMetadataSerializer() {
super(DocumentMetadata.class);
}
@Override
public void serialize(DocumentMetadata value, JsonGenerator gen, SerializerProvider provider)
throws IOException {
gen.writeStartObject();
if (value.index != null) {
gen.writeStringField("_index", value.index);
}
if (value.type != null) {
gen.writeStringField("_type", value.type);
}
if (value.id != null) {
gen.writeStringField("_id", value.id);
}
if (value.routing != null) {
gen.writeStringField("routing", value.routing);
}
if (value.retryOnConflict != null && value.backendVersion <= 6) {
gen.writeNumberField("_retry_on_conflict", value.retryOnConflict);
}
if (value.retryOnConflict != null && value.backendVersion >= 7) {
gen.writeNumberField("retry_on_conflict", value.retryOnConflict);
}
if (value.version != null) {
gen.writeStringField("version", value.version);
}
if (value.versionType != null) {
gen.writeStringField("version_type", value.versionType);
}
gen.writeEndObject();
}
}
@VisibleForTesting
static String createBulkApiEntity(DocToBulk spec, String document, int backendVersion)
throws IOException {
String documentMetadata = "{}";
boolean isDelete = false;
if (spec.getIndexFn() != null
|| spec.getTypeFn() != null
|| spec.getIdFn() != null
|| spec.getRoutingFn() != null) {
JsonNode parsedDocument = OBJECT_MAPPER.readTree(document);
documentMetadata = getDocumentMetadata(spec, parsedDocument, backendVersion);
if (spec.getIsDeleteFn() != null) {
isDelete = spec.getIsDeleteFn().apply(parsedDocument);
}
}
if (isDelete) {
return String.format("{ \"delete\" : %s }%n", documentMetadata);
} else {
if (spec.getUsePartialUpdate()) {
return String.format(
"{ \"update\" : %s }%n{ \"doc\" : %s, " + "\"doc_as_upsert\" : true }%n",
documentMetadata, document);
} else if (spec.getUpsertScript() != null) {
return String.format(
"{ \"update\" : %s }%n{ \"script\" : {\"source\": \"%s\", "
+ "\"params\": %s}, \"upsert\" : %s, \"scripted_upsert\": true}%n",
documentMetadata, spec.getUpsertScript(), document, document);
} else {
return String.format("{ \"index\" : %s }%n%s%n", documentMetadata, document);
}
}
}
private static String lowerCaseOrNull(String input) {
return input == null ? null : input.toLowerCase();
}
/**
* Extracts the components that comprise the document address from the document using the {@link
* Write.FieldValueExtractFn} configured. This allows any or all of the index, type and document
* id to be controlled on a per document basis. If none are provided then an empty default of
* {@code {}} is returned. Sanitization of the index is performed, automatically lower-casing
* the value as required by Elasticsearch.
*
* @param parsedDocument the json from which the index, type and id may be extracted
* @return the document address as JSON or the default
* @throws IOException if the document cannot be parsed as JSON
*/
private static String getDocumentMetadata(
DocToBulk spec, JsonNode parsedDocument, int backendVersion) throws IOException {
DocumentMetadata metadata =
new DocumentMetadata(
spec.getIndexFn() != null
? lowerCaseOrNull(spec.getIndexFn().apply(parsedDocument))
: null,
spec.getTypeFn() != null ? spec.getTypeFn().apply(parsedDocument) : null,
spec.getIdFn() != null ? spec.getIdFn().apply(parsedDocument) : null,
(spec.getUsePartialUpdate()
|| (spec.getUpsertScript() != null && !spec.getUpsertScript().isEmpty()))
? DEFAULT_RETRY_ON_CONFLICT
: null,
spec.getRoutingFn() != null ? spec.getRoutingFn().apply(parsedDocument) : null,
backendVersion,
spec.getDocVersionFn() != null ? spec.getDocVersionFn().apply(parsedDocument) : null,
spec.getDocVersionType());
return OBJECT_MAPPER.writeValueAsString(metadata);
}
/** {@link DoFn} to for the {@link DocToBulk} transform. */
@VisibleForTesting
static class DocToBulkFn extends DoFn<String, String> {
private final DocToBulk spec;
private int backendVersion;
public DocToBulkFn(DocToBulk spec) {
this.spec = spec;
}
@Setup
public void setup() throws IOException {
if (spec.getBackendVersion() != null) {
backendVersion = spec.getBackendVersion();
} else {
backendVersion = ElasticsearchIO.getBackendVersion(spec.getConnectionConfiguration());
}
}
@ProcessElement
public void processElement(ProcessContext c) throws IOException {
c.output(createBulkApiEntity(spec, c.element(), backendVersion));
}
}
}
/**
* A {@link PTransform} writing data to Elasticsearch.
*
* <p>This {@link PTransform} acts as a convenience wrapper for doing both document to bulk API
* serialization as well as batching those Bulk API entities and writing them to an Elasticsearch
* cluster. This class is effectively a thin proxy for DocToBulk->BulkIO all-in-one for
* convenience and backward compatibility.
*/
public static class Write extends PTransform<PCollection<String>, PDone> {
public interface FieldValueExtractFn extends SerializableFunction<JsonNode, String> {}
public interface BooleanFieldValueExtractFn extends SerializableFunction<JsonNode, Boolean> {}
private DocToBulk docToBulk =
new AutoValue_ElasticsearchIO_DocToBulk.Builder()
.setUsePartialUpdate(false)
.build();
private BulkIO bulkIO =
new AutoValue_ElasticsearchIO_BulkIO.Builder()
.setMaxBatchSize(1000L)
.setMaxBatchSizeBytes(5L * 1024L * 1024L)
.setUseStatefulBatches(false)
.setMaxParallelRequestsPerWindow(1)
.build();
public DocToBulk getDocToBulk() {
return docToBulk;
}
public BulkIO getBulkIO() {
return bulkIO;
}
/** Refer to {@link DocToBulk
public Write withIdFn(FieldValueExtractFn idFn) {
docToBulk = docToBulk.withIdFn(idFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withIndexFn(FieldValueExtractFn indexFn) {
docToBulk = docToBulk.withIndexFn(indexFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withRoutingFn(FieldValueExtractFn routingFn) {
docToBulk = docToBulk.withRoutingFn(routingFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withTypeFn(FieldValueExtractFn typeFn) {
docToBulk = docToBulk.withTypeFn(typeFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withDocVersionFn(FieldValueExtractFn docVersionFn) {
docToBulk = docToBulk.withDocVersionFn(docVersionFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withDocVersionType(String docVersionType) {
docToBulk = docToBulk.withDocVersionType(docVersionType);
return this;
}
/** Refer to {@link DocToBulk
public Write withUsePartialUpdate(boolean usePartialUpdate) {
docToBulk = docToBulk.withUsePartialUpdate(usePartialUpdate);
return this;
}
/** Refer to {@link DocToBulk
public Write withUpsertScript(String source) {
docToBulk = docToBulk.withUpsertScript(source);
return this;
}
/** Refer to {@link DocToBulk
public Write withBackendVersion(int backendVersion) {
docToBulk = docToBulk.withBackendVersion(backendVersion);
return this;
}
/** Refer to {@link DocToBulk
public Write withIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn) {
docToBulk = docToBulk.withIsDeleteFn(isDeleteFn);
return this;
}
/** Refer to {@link BulkIO
public Write withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
docToBulk = docToBulk.withConnectionConfiguration(connectionConfiguration);
bulkIO = bulkIO.withConnectionConfiguration(connectionConfiguration);
return this;
}
/** Refer to {@link BulkIO
public Write withMaxBatchSize(long batchSize) {
bulkIO = bulkIO.withMaxBatchSize(batchSize);
return this;
}
/** Refer to {@link BulkIO
public Write withMaxBatchSizeBytes(long batchSizeBytes) {
bulkIO = bulkIO.withMaxBatchSizeBytes(batchSizeBytes);
return this;
}
/** Refer to {@link BulkIO
public Write withRetryConfiguration(RetryConfiguration retryConfiguration) {
bulkIO = bulkIO.withRetryConfiguration(retryConfiguration);
return this;
}
/** Refer to {@link BulkIO
public Write withIgnoreVersionConflicts(boolean ignoreVersionConflicts) {
bulkIO = bulkIO.withIgnoreVersionConflicts(ignoreVersionConflicts);
return this;
}
/** Refer to {@link BulkIO
public Write withUseStatefulBatches(boolean useStatefulBatches) {
bulkIO = bulkIO.withUseStatefulBatches(useStatefulBatches);
return this;
}
/** Refer to {@link BulkIO
public Write withMaxBufferingDuration(Duration maxBufferingDuration) {
bulkIO = bulkIO.withMaxBufferingDuration(maxBufferingDuration);
return this;
}
/** Refer to {@link BulkIO
public Write withMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow) {
bulkIO = bulkIO.withMaxParallelRequestsPerWindow(maxParallelRequestsPerWindow);
return this;
}
/** Refer to {@link BulkIO
public Write withAllowableResponseErrors(@Nullable Set<String> allowableResponseErrors) {
if (allowableResponseErrors == null) {
allowableResponseErrors = new HashSet<>();
}
bulkIO = bulkIO.withAllowableResponseErrors(allowableResponseErrors);
return this;
}
@Override
public PDone expand(PCollection<String> input) {
return input.apply(docToBulk).apply(bulkIO);
}
}
/**
* A {@link PTransform} writing Bulk API entities created by {@link ElasticsearchIO.DocToBulk} to
* an Elasticsearch cluster. Typically, using {@link ElasticsearchIO.Write} is preferred, whereas
* using {@link ElasticsearchIO.DocToBulk} and BulkIO separately is for advanced use cases such as
* mirroring data to multiple clusters or data lakes without recomputation.
*/
@AutoValue
public abstract static class BulkIO extends PTransform<PCollection<String>, PDone> {
@VisibleForTesting
static final String RETRY_ATTEMPT_LOG = "Error writing to Elasticsearch. Retry attempt[%d]";
@VisibleForTesting
static final String RETRY_FAILED_LOG =
"Error writing to ES after %d attempt(s). No more attempts allowed";
abstract @Nullable ConnectionConfiguration getConnectionConfiguration();
abstract long getMaxBatchSize();
abstract long getMaxBatchSizeBytes();
abstract @Nullable Duration getMaxBufferingDuration();
abstract boolean getUseStatefulBatches();
abstract int getMaxParallelRequestsPerWindow();
abstract @Nullable RetryConfiguration getRetryConfiguration();
abstract @Nullable Set<String> getAllowedResponseErrors();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setMaxBatchSize(long maxBatchSize);
abstract Builder setMaxBatchSizeBytes(long maxBatchSizeBytes);
abstract Builder setRetryConfiguration(RetryConfiguration retryConfiguration);
abstract Builder setAllowedResponseErrors(Set<String> allowedResponseErrors);
abstract Builder setMaxBufferingDuration(Duration maxBufferingDuration);
abstract Builder setUseStatefulBatches(boolean useStatefulBatches);
abstract Builder setMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow);
abstract BulkIO build();
}
/**
* Provide the Elasticsearch connection configuration object.
*
* @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object
* @return the {@link BulkIO} with connection configuration set
*/
public BulkIO withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a maximum size in number of documents for the batch see bulk API
* (https:
* docs (like Elasticsearch bulk size advice). See
* https:
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSize maximum batch size in number of documents
* @return the {@link BulkIO} with connection batch size set
*/
public BulkIO withMaxBatchSize(long batchSize) {
checkArgument(batchSize > 0, "batchSize must be > 0, but was %s", batchSize);
return builder().setMaxBatchSize(batchSize).build();
}
/**
* Provide a maximum size in bytes for the batch see bulk API
* (https:
* (like Elasticsearch bulk size advice). See
* https:
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSizeBytes maximum batch size in bytes
* @return the {@link BulkIO} with connection batch size in bytes set
*/
public BulkIO withMaxBatchSizeBytes(long batchSizeBytes) {
checkArgument(batchSizeBytes > 0, "batchSizeBytes must be > 0, but was %s", batchSizeBytes);
return builder().setMaxBatchSizeBytes(batchSizeBytes).build();
}
/**
* Provides configuration to retry a failed batch call to Elasticsearch. A batch is considered
* as failed if the underlying {@link RestClient} surfaces 429 HTTP status code as error for one
* or more of the items in the {@link Response}. Users should consider that retrying might
* compound the underlying problem which caused the initial failure. Users should also be aware
* that once retrying is exhausted the error is surfaced to the runner which <em>may</em> then
* opt to retry the current bundle in entirety or abort if the max number of retries of the
* runner is completed. Retrying uses an exponential backoff algorithm, with minimum backoff of
* 5 seconds and then surfacing the error once the maximum number of retries or maximum
* configuration duration is exceeded.
*
* <p>Example use:
*
* <pre>{@code
* ElasticsearchIO.write()
* .withRetryConfiguration(ElasticsearchIO.RetryConfiguration.create(10, Duration.standardMinutes(3))
* ...
* }</pre>
*
* @param retryConfiguration the rules which govern the retry behavior
* @return the {@link BulkIO} with retrying configured
*/
public BulkIO withRetryConfiguration(RetryConfiguration retryConfiguration) {
checkArgument(retryConfiguration != null, "retryConfiguration is required");
return builder().setRetryConfiguration(retryConfiguration).build();
}
/**
* Whether or not to suppress version conflict errors in a Bulk API response. This can be useful
* if your use case involves using external version types.
*
* @param ignoreVersionConflicts true to suppress version conflicts, false to surface version
* conflict errors.
* @return the {@link BulkIO} with version conflict handling configured
*/
public BulkIO withIgnoreVersionConflicts(boolean ignoreVersionConflicts) {
Set<String> allowedResponseErrors = getAllowedResponseErrors();
if (allowedResponseErrors == null) {
allowedResponseErrors = new HashSet<>();
}
if (ignoreVersionConflicts) {
allowedResponseErrors.add(VERSION_CONFLICT_ERROR);
}
return builder().setAllowedResponseErrors(allowedResponseErrors).build();
}
/**
* Provide a set of textual error types which can be contained in Bulk API response
* items[].error.type field. Any element in @param allowableResponseErrorTypes will suppress
* errors of the same type in Bulk responses.
*
* <p>See also
* https:
*
* @param allowableResponseErrorTypes
* @return the {@link BulkIO} with allowable response errors set
*/
public BulkIO withAllowableResponseErrors(@Nullable Set<String> allowableResponseErrorTypes) {
if (allowableResponseErrorTypes == null) {
allowableResponseErrorTypes = new HashSet<>();
}
return builder().setAllowedResponseErrors(allowableResponseErrorTypes).build();
}
/**
* If using {@link BulkIO
* time before buffered elements are emitted to Elasticsearch as a Bulk API request. If this
* config is not set, Bulk requests will not be issued until {@link BulkIO
* number of documents have been buffered. This may result in higher latency in particular if
* your max batch size is set to a large value and your pipeline input is low volume.
*
* @param maxBufferingDuration the maximum duration to wait before sending any buffered
* documents to Elasticsearch, regardless of maxBatchSize.
* @return the {@link BulkIO} with maximum buffering duration set
*/
public BulkIO withMaxBufferingDuration(Duration maxBufferingDuration) {
LOG.warn(
"Use of withMaxBufferingDuration requires withUseStatefulBatches(true). "
+ "Setting that automatically.");
return builder()
.setUseStatefulBatches(true)
.setMaxBufferingDuration(maxBufferingDuration)
.build();
}
/**
* Whether or not to use Stateful Processing to ensure bulk requests have the desired number of
* entities i.e. as close to the maxBatchSize as possible. By default without this feature
* enabled, Bulk requests will not contain more than maxBatchSize entities, but the lower bound
* of batch size is determined by Beam Runner bundle sizes, which may be as few as 1.
*
* @param useStatefulBatches true enables the use of Stateful Processing to ensure that batches
* are as close to the maxBatchSize as possible.
* @return the {@link BulkIO} with Stateful Processing enabled or disabled
*/
public BulkIO withUseStatefulBatches(boolean useStatefulBatches) {
return builder().setUseStatefulBatches(useStatefulBatches).build();
}
/**
* When using {@link BulkIO
* batches are maintained per-key-per-window. BE AWARE that low values for @param
* maxParallelRequestsPerWindow, in particular if the input data has a finite number of windows,
* can reduce parallelism greatly. If data is globally windowed and @param
* maxParallelRequestsPerWindow is set to 1,there will only ever be 1 request in flight. Having
* only a single request in flight can be beneficial for ensuring an Elasticsearch cluster is
* not overwhelmed by parallel requests,but may not work for all use cases. If this number is
* less than the number of maximum workers in your pipeline, the IO work will result in a
* sub-distribution of the last write step with most of the runners.
*
* @param maxParallelRequestsPerWindow the maximum number of parallel bulk requests for a window
* of data
* @return the {@link BulkIO} with maximum parallel bulk requests per window set
*/
public BulkIO withMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow) {
checkArgument(
maxParallelRequestsPerWindow > 0, "parameter value must be positive " + "a integer");
return builder().setMaxParallelRequestsPerWindow(maxParallelRequestsPerWindow).build();
}
/**
* Creates batches of documents using Stateful Processing based on user configurable settings of
* withMaxBufferingDuration and withMaxParallelRequestsPerWindow.
*
* <p>Mostly exists for testability of withMaxParallelRequestsPerWindow.
*/
@VisibleForTesting
static class StatefulBatching
extends PTransform<PCollection<String>, PCollection<KV<Integer, Iterable<String>>>> {
final BulkIO spec;
private StatefulBatching(BulkIO bulkSpec) {
spec = bulkSpec;
}
public static StatefulBatching fromSpec(BulkIO spec) {
return new StatefulBatching(spec);
}
@Override
public PCollection<KV<Integer, Iterable<String>>> expand(PCollection<String> input) {
GroupIntoBatches<Integer, String> groupIntoBatches =
GroupIntoBatches.ofSize(spec.getMaxBatchSize());
if (spec.getMaxBufferingDuration() != null) {
groupIntoBatches =
groupIntoBatches.withMaxBufferingDuration(spec.getMaxBufferingDuration());
}
return input
.apply(ParDo.of(new Reshuffle.AssignShardFn<>(spec.getMaxParallelRequestsPerWindow())))
.apply(groupIntoBatches);
}
}
@Override
public PDone expand(PCollection<String> input) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
checkState(connectionConfiguration != null, "withConnectionConfiguration() is required");
if (getUseStatefulBatches()) {
input.apply(StatefulBatching.fromSpec(this)).apply(ParDo.of(new BulkIOStatefulFn(this)));
} else {
input.apply(ParDo.of(new BulkIOBundleFn(this)));
}
return PDone.in(input.getPipeline());
}
static class BulkIOBundleFn extends BulkIOBaseFn<String> {
@VisibleForTesting
BulkIOBundleFn(BulkIO bulkSpec) {
super(bulkSpec);
}
@ProcessElement
public void processElement(ProcessContext context) throws Exception {
String bulkApiEntity = context.element();
addAndMaybeFlush(bulkApiEntity);
}
}
/*
Intended for use in conjunction with {@link GroupIntoBatches}
*/
static class BulkIOStatefulFn extends BulkIOBaseFn<KV<Integer, Iterable<String>>> {
@VisibleForTesting
BulkIOStatefulFn(BulkIO bulkSpec) {
super(bulkSpec);
}
@ProcessElement
public void processElement(ProcessContext context) throws Exception {
Iterable<String> bulkApiEntities = context.element().getValue();
for (String bulkApiEntity : bulkApiEntities) {
addAndMaybeFlush(bulkApiEntity);
}
}
}
/** {@link DoFn} to for the {@link BulkIO} transform. */
@VisibleForTesting
private abstract static class BulkIOBaseFn<T> extends DoFn<T, Void> {
private static final Duration RETRY_INITIAL_BACKOFF = Duration.standardSeconds(5);
private transient FluentBackoff retryBackoff;
private BulkIO spec;
private transient RestClient restClient;
private ArrayList<String> batch;
long currentBatchSizeBytes;
protected BulkIOBaseFn(BulkIO bulkSpec) {
this.spec = bulkSpec;
}
@Setup
public void setup() throws IOException {
ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration();
restClient = connectionConfiguration.createClient();
retryBackoff =
FluentBackoff.DEFAULT.withMaxRetries(0).withInitialBackoff(RETRY_INITIAL_BACKOFF);
if (spec.getRetryConfiguration() != null) {
retryBackoff =
FluentBackoff.DEFAULT
.withInitialBackoff(RETRY_INITIAL_BACKOFF)
.withMaxRetries(spec.getRetryConfiguration().getMaxAttempts() - 1)
.withMaxCumulativeBackoff(spec.getRetryConfiguration().getMaxDuration());
}
}
@StartBundle
public void startBundle(StartBundleContext context) {
batch = new ArrayList<>();
currentBatchSizeBytes = 0;
}
@FinishBundle
public void finishBundle(FinishBundleContext context)
throws IOException, InterruptedException {
flushBatch();
}
protected void addAndMaybeFlush(String bulkApiEntity)
throws IOException, InterruptedException {
batch.add(bulkApiEntity);
currentBatchSizeBytes += bulkApiEntity.getBytes(StandardCharsets.UTF_8).length;
if (batch.size() >= spec.getMaxBatchSize()
|| currentBatchSizeBytes >= spec.getMaxBatchSizeBytes()) {
flushBatch();
}
}
private boolean isRetryableClientException(Throwable t) {
return t.getCause() instanceof ConnectTimeoutException
|| t.getCause() instanceof SocketTimeoutException
|| t.getCause() instanceof ConnectionClosedException
|| t.getCause() instanceof ConnectException;
}
private void flushBatch() throws IOException, InterruptedException {
if (batch.isEmpty()) {
return;
}
LOG.info(
"ElasticsearchIO batch size: {}, batch size bytes: {}",
batch.size(),
currentBatchSizeBytes);
StringBuilder bulkRequest = new StringBuilder();
for (String json : batch) {
bulkRequest.append(json);
}
batch.clear();
currentBatchSizeBytes = 0L;
Response response = null;
HttpEntity responseEntity = null;
String endPoint = spec.getConnectionConfiguration().getBulkEndPoint();
HttpEntity requestBody =
new NStringEntity(bulkRequest.toString(), ContentType.APPLICATION_JSON);
try {
Request request = new Request("POST", endPoint);
request.addParameters(Collections.emptyMap());
request.setEntity(requestBody);
response = restClient.performRequest(request);
responseEntity = new BufferedHttpEntity(response.getEntity());
} catch (java.io.IOException ex) {
if (spec.getRetryConfiguration() == null || !isRetryableClientException(ex)) {
throw ex;
}
LOG.error("Caught ES timeout, retrying", ex);
}
if (spec.getRetryConfiguration() != null
&& (response == null
|| responseEntity == null
|| spec.getRetryConfiguration().getRetryPredicate().test(responseEntity))) {
if (responseEntity != null
&& spec.getRetryConfiguration().getRetryPredicate().test(responseEntity)) {
LOG.warn("ES Cluster is responding with HTP 429 - TOO_MANY_REQUESTS.");
}
responseEntity = handleRetry("POST", endPoint, Collections.emptyMap(), requestBody);
}
checkForErrors(responseEntity, spec.getAllowedResponseErrors());
}
/** retry request based on retry configuration policy. */
private HttpEntity handleRetry(
String method, String endpoint, Map<String, String> params, HttpEntity requestBody)
throws IOException, InterruptedException {
Response response;
HttpEntity responseEntity = null;
Sleeper sleeper = Sleeper.DEFAULT;
BackOff backoff = retryBackoff.backoff();
int attempt = 0;
while (BackOffUtils.next(sleeper, backoff)) {
LOG.warn(String.format(RETRY_ATTEMPT_LOG, ++attempt));
try {
Request request = new Request(method, endpoint);
request.addParameters(params);
request.setEntity(requestBody);
response = restClient.performRequest(request);
responseEntity = new BufferedHttpEntity(response.getEntity());
} catch (java.io.IOException ex) {
if (isRetryableClientException(ex)) {
LOG.error("Caught ES timeout, retrying", ex);
continue;
}
}
if (!Objects.requireNonNull(spec.getRetryConfiguration())
.getRetryPredicate()
.test(responseEntity)) {
return responseEntity;
} else {
LOG.warn("ES Cluster is responding with HTP 429 - TOO_MANY_REQUESTS.");
}
}
throw new IOException(String.format(RETRY_FAILED_LOG, attempt));
}
@Teardown
public void closeClient() throws IOException {
if (restClient != null) {
restClient.close();
}
}
}
}
static int getBackendVersion(ConnectionConfiguration connectionConfiguration) {
try (RestClient restClient = connectionConfiguration.createClient()) {
Request request = new Request("GET", "");
Response response = restClient.performRequest(request);
JsonNode jsonNode = parseResponse(response.getEntity());
int backendVersion =
Integer.parseInt(jsonNode.path("version").path("number").asText().substring(0, 1));
checkArgument(
(VALID_CLUSTER_VERSIONS.contains(backendVersion)),
"The Elasticsearch version to connect to is %s.x. "
+ "This version of the ElasticsearchIO is only compatible with "
+ "Elasticsearch v7.x, v6.x, v5.x and v2.x",
backendVersion);
return backendVersion;
} catch (IOException e) {
throw new IllegalArgumentException("Cannot get Elasticsearch version", e);
}
}
}
|
class BoundedElasticsearchReader extends BoundedSource.BoundedReader<String> {
private final BoundedElasticsearchSource source;
private RestClient restClient;
private String current;
private String scrollId;
private ListIterator<String> batchIterator;
private BoundedElasticsearchReader(BoundedElasticsearchSource source) {
this.source = source;
}
@Override
|
class BoundedElasticsearchReader extends BoundedSource.BoundedReader<String> {
private final BoundedElasticsearchSource source;
private RestClient restClient;
private String current;
private String scrollId;
private ListIterator<String> batchIterator;
private BoundedElasticsearchReader(BoundedElasticsearchSource source) {
this.source = source;
}
@Override
|
Both JDK client and Netty client have to do the same logic for converting response byte array to string. It would be better to put this in Core utils somewhere to reduce duplication and if there are any fixes or updates to this logic, we don't need to update in two places.
|
public Mono<String> getBodyAsString() {
return getBodyAsByteArray().map(bytes -> {
if (bytes.length >= 3 && bytes[0] == (byte) 239 && bytes[1] == (byte) 187 && bytes[2] == (byte) 191) {
return new String(bytes, 3, bytes.length - 3, StandardCharsets.UTF_8);
} else if (bytes.length >= 2 && bytes[0] == (byte) 254 && bytes[1] == (byte) 255) {
return new String(bytes, 2, bytes.length - 2, StandardCharsets.UTF_16BE);
} else if (bytes.length >= 2 && bytes[0] == (byte) 255 && bytes[1] == (byte) 254) {
return new String(bytes, 2, bytes.length - 2, StandardCharsets.UTF_16LE);
} else {
/*
* Attempt to retrieve the default charset from the 'Content-Encoding' header, if the value isn't
* present or invalid fallback to 'UTF-8' for the default charset.
*/
try {
String contentType = reactorNettyResponse.responseHeaders()
.get("Content-Type", "charset=UTF-8");
Matcher charsetMatcher = CHARSET_PATTERN.matcher(contentType);
if (charsetMatcher.find()) {
return new String(bytes, Charset.forName(charsetMatcher.group(1)));
} else {
return new String(bytes, StandardCharsets.UTF_8);
}
} catch (IllegalCharsetNameException | UnsupportedCharsetException ex) {
return new String(bytes, StandardCharsets.UTF_8);
}
}
});
}
|
return getBodyAsByteArray().map(bytes -> {
|
public Mono<String> getBodyAsString() {
return getBodyAsByteArray().map(bytes ->
CoreUtils.bomAwareToString(bytes, reactorNettyResponse.responseHeaders().get("Content-Type")));
}
|
class ReactorNettyHttpResponse extends HttpResponse {
private final HttpClientResponse reactorNettyResponse;
private final Connection reactorNettyConnection;
private final boolean disableBufferCopy;
ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection,
HttpRequest httpRequest, boolean disableBufferCopy) {
super(httpRequest);
this.reactorNettyResponse = reactorNettyResponse;
this.reactorNettyConnection = reactorNettyConnection;
this.disableBufferCopy = disableBufferCopy;
}
@Override
public int getStatusCode() {
return reactorNettyResponse.status().code();
}
@Override
public String getHeaderValue(String name) {
return reactorNettyResponse.responseHeaders().get(name);
}
@Override
public HttpHeaders getHeaders() {
HttpHeaders headers = new HttpHeaders();
reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue()));
return headers;
}
@Override
public Flux<ByteBuffer> getBody() {
return bodyIntern().doFinally(s -> {
if (!reactorNettyConnection.isDisposed()) {
reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose);
}
}).map(byteBuf -> this.disableBufferCopy ? byteBuf.nioBuffer() : deepCopyBuffer(byteBuf));
}
@Override
public Mono<byte[]> getBodyAsByteArray() {
return bodyIntern().aggregate().asByteArray().doFinally(s -> {
if (!reactorNettyConnection.isDisposed()) {
reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose);
}
});
}
@Override
@Override
public Mono<String> getBodyAsString(Charset charset) {
return bodyIntern().aggregate().asString(charset).doFinally(s -> {
if (!reactorNettyConnection.isDisposed()) {
reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose);
}
});
}
@Override
public void close() {
if (!reactorNettyConnection.isDisposed()) {
reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose);
}
}
private ByteBufFlux bodyIntern() {
return reactorNettyConnection.inbound().receive();
}
Connection internConnection() {
return reactorNettyConnection;
}
private static ByteBuffer deepCopyBuffer(ByteBuf byteBuf) {
ByteBuffer buffer = ByteBuffer.allocate(byteBuf.readableBytes());
byteBuf.readBytes(buffer);
buffer.rewind();
return buffer;
}
}
|
class ReactorNettyHttpResponse extends HttpResponse {
private final HttpClientResponse reactorNettyResponse;
private final Connection reactorNettyConnection;
private final boolean disableBufferCopy;
ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection,
HttpRequest httpRequest, boolean disableBufferCopy) {
super(httpRequest);
this.reactorNettyResponse = reactorNettyResponse;
this.reactorNettyConnection = reactorNettyConnection;
this.disableBufferCopy = disableBufferCopy;
}
@Override
public int getStatusCode() {
return reactorNettyResponse.status().code();
}
@Override
public String getHeaderValue(String name) {
return reactorNettyResponse.responseHeaders().get(name);
}
@Override
public HttpHeaders getHeaders() {
HttpHeaders headers = new HttpHeaders();
reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue()));
return headers;
}
@Override
public Flux<ByteBuffer> getBody() {
return bodyIntern().doFinally(s -> {
if (!reactorNettyConnection.isDisposed()) {
reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose);
}
}).map(byteBuf -> this.disableBufferCopy ? byteBuf.nioBuffer() : deepCopyBuffer(byteBuf));
}
@Override
public Mono<byte[]> getBodyAsByteArray() {
return bodyIntern().aggregate().asByteArray().doFinally(s -> {
if (!reactorNettyConnection.isDisposed()) {
reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose);
}
});
}
@Override
@Override
public Mono<String> getBodyAsString(Charset charset) {
return bodyIntern().aggregate().asString(charset).doFinally(s -> {
if (!reactorNettyConnection.isDisposed()) {
reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose);
}
});
}
@Override
public void close() {
if (!reactorNettyConnection.isDisposed()) {
reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose);
}
}
private ByteBufFlux bodyIntern() {
return reactorNettyConnection.inbound().receive();
}
Connection internConnection() {
return reactorNettyConnection;
}
private static ByteBuffer deepCopyBuffer(ByteBuf byteBuf) {
ByteBuffer buffer = ByteBuffer.allocate(byteBuf.readableBytes());
byteBuf.readBytes(buffer);
buffer.rewind();
return buffer;
}
}
|
Yes, sorry (I changed it but then accidentially rolled back together with type changes for other comment).
|
public void testMetadataOperationLogged() throws IOException {
TestingStateChangelogWriter writer = new TestingStateChangelogWriter();
InternalKeyContextImpl<String> keyContext =
new InternalKeyContextImpl<>(KeyGroupRange.of(1, 1000), 1000);
StateChangeLogger<String, Namespace> logger = getLogger(writer, keyContext);
List<Tuple2<Integer, StateChangeOperation>> expectedAppends = new ArrayList<>();
expectedAppends.add(
Tuple2.of(-1 /* see AbstractStateChangeLogger
int numOpTypes = StateChangeOperation.values().length;
for (int i = 0; i < numOpTypes * 7; i++) {
String element = Integer.toString(i);
StateChangeOperation operation = StateChangeOperation.byCode((byte) (i % numOpTypes));
log(operation, element, logger, keyContext).ifPresent(expectedAppends::add);
}
assertEquals(expectedAppends, writer.appends);
}
|
Tuple2.of(-1 /* see AbstractStateChangeLogger
|
public void testMetadataOperationLogged() throws IOException {
TestingStateChangelogWriter writer = new TestingStateChangelogWriter();
InternalKeyContextImpl<String> keyContext =
new InternalKeyContextImpl<>(KeyGroupRange.of(1, 1000), 1000);
StateChangeLogger<String, Namespace> logger = getLogger(writer, keyContext);
List<Tuple2<Integer, StateChangeOperation>> expectedAppends = new ArrayList<>();
expectedAppends.add(Tuple2.of(COMMON_KEY_GROUP, METADATA));
int numOpTypes = StateChangeOperation.values().length;
for (int i = 0; i < numOpTypes * 7; i++) {
String element = Integer.toString(i);
StateChangeOperation operation = StateChangeOperation.byCode((byte) (i % numOpTypes));
log(operation, element, logger, keyContext).ifPresent(expectedAppends::add);
}
assertEquals(expectedAppends, writer.appends);
}
|
class StateChangeLoggerTestBase<Namespace> {
/** A basic test for appending the metadata on first state access. */
@Test
protected abstract StateChangeLogger<String, Namespace> getLogger(
TestingStateChangelogWriter writer, InternalKeyContextImpl<String> keyContext);
protected Optional<Tuple2<Integer, StateChangeOperation>> log(
StateChangeOperation op,
String element,
StateChangeLogger<String, Namespace> logger,
InternalKeyContextImpl<String> keyContext)
throws IOException {
keyContext.setCurrentKey(element);
Namespace namespace = getNamespace(element);
switch (op) {
case ADD:
logger.valueAdded(element, namespace);
break;
case ADD_ELEMENT:
logger.valueElementAdded(w -> {}, namespace);
break;
case REMOVE_ELEMENT:
logger.valueElementRemoved(w -> {}, namespace);
break;
case CLEAR:
logger.valueCleared(namespace);
break;
case SET:
logger.valueUpdated(element, namespace);
break;
case SET_INTERNAL:
logger.valueUpdatedInternal(element, namespace);
break;
case ADD_OR_UPDATE_ELEMENT:
logger.valueElementAddedOrUpdated(w -> {}, namespace);
break;
default:
return Optional.empty();
}
return Optional.of(Tuple2.of(keyContext.getCurrentKeyGroupIndex(), op));
}
protected abstract Namespace getNamespace(String element);
@SuppressWarnings("rawtypes")
protected static class TestingStateChangelogWriter implements StateChangelogWriter {
private final List<Tuple2<Integer, StateChangeOperation>> appends = new ArrayList<>();
@Override
public void append(int keyGroup, byte[] value) {
appends.add(Tuple2.of(keyGroup, StateChangeOperation.byCode(value[0])));
}
@Override
public SequenceNumber lastAppendedSequenceNumber() {
throw new UnsupportedOperationException();
}
@Override
public CompletableFuture<?> persist(SequenceNumber from) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void truncate(SequenceNumber to) {}
@Override
public void confirm(SequenceNumber from, SequenceNumber to) {}
@Override
public void reset(SequenceNumber from, SequenceNumber to) {}
@Override
public void close() {}
}
}
|
class StateChangeLoggerTestBase<Namespace> {
/** A basic test for appending the metadata on first state access. */
@Test
protected abstract StateChangeLogger<String, Namespace> getLogger(
TestingStateChangelogWriter writer, InternalKeyContextImpl<String> keyContext);
protected Optional<Tuple2<Integer, StateChangeOperation>> log(
StateChangeOperation op,
String element,
StateChangeLogger<String, Namespace> logger,
InternalKeyContextImpl<String> keyContext)
throws IOException {
keyContext.setCurrentKey(element);
Namespace namespace = getNamespace(element);
switch (op) {
case ADD:
logger.valueAdded(element, namespace);
break;
case ADD_ELEMENT:
logger.valueElementAdded(w -> {}, namespace);
break;
case REMOVE_ELEMENT:
logger.valueElementRemoved(w -> {}, namespace);
break;
case CLEAR:
logger.valueCleared(namespace);
break;
case SET:
logger.valueUpdated(element, namespace);
break;
case SET_INTERNAL:
logger.valueUpdatedInternal(element, namespace);
break;
case ADD_OR_UPDATE_ELEMENT:
logger.valueElementAddedOrUpdated(w -> {}, namespace);
break;
default:
return Optional.empty();
}
return Optional.of(Tuple2.of(keyContext.getCurrentKeyGroupIndex(), op));
}
protected abstract Namespace getNamespace(String element);
@SuppressWarnings("rawtypes")
protected static class TestingStateChangelogWriter implements StateChangelogWriter {
private final List<Tuple2<Integer, StateChangeOperation>> appends = new ArrayList<>();
@Override
public void append(int keyGroup, byte[] value) {
appends.add(Tuple2.of(keyGroup, StateChangeOperation.byCode(value[0])));
}
@Override
public SequenceNumber lastAppendedSequenceNumber() {
throw new UnsupportedOperationException();
}
@Override
public CompletableFuture<?> persist(SequenceNumber from) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void truncate(SequenceNumber to) {}
@Override
public void confirm(SequenceNumber from, SequenceNumber to) {}
@Override
public void reset(SequenceNumber from, SequenceNumber to) {}
@Override
public void close() {}
}
}
|
:( Waiting for a brain reboot
|
protected double maintain() {
Exception lastException = null;
int attempts = 0;
int failures = 0;
var metrics = collectClusterMetrics();
for (var application : applications.asList()) {
for (var instance : application.instances().values()) {
for (var deployment : instance.productionDeployments().values()) {
if (shuttingDown()) return 0.0;
try {
attempts++;
var bcpGroups = BcpGroup.groupsFrom(instance, application.deploymentSpec());
var patch = new ApplicationPatch();
addTrafficShare(deployment, bcpGroups, patch);
addBcpGroupInfo(deployment.zone().region(), metrics.get(instance.id()), bcpGroups, patch);
nodeRepository.patchApplication(deployment.zone(), instance.id(), patch);
}
catch (Exception e) {
failures++;
lastException = e;
}
}
}
}
double successFactorDeviation = asSuccessFactorDeviation(attempts, failures);
if ( successFactorDeviation == 1.0 )
log.log(Level.WARNING, "Could not update traffic share on any applications", lastException);
else if ( successFactorDeviation > 0.1 )
log.log(Level.FINE, "Could not update traffic share on all applications", lastException);
return successFactorDeviation;
}
|
log.log(Level.FINE, "Could not update traffic share on all applications", lastException);
|
protected double maintain() {
Exception lastException = null;
int attempts = 0;
int failures = 0;
var metrics = collectClusterMetrics();
for (var application : applications.asList()) {
for (var instance : application.instances().values()) {
for (var deployment : instance.productionDeployments().values()) {
if (shuttingDown()) return 0.0;
try {
attempts++;
var bcpGroups = BcpGroup.groupsFrom(instance, application.deploymentSpec());
var patch = new ApplicationPatch();
addTrafficShare(deployment, bcpGroups, patch);
addBcpGroupInfo(deployment.zone().region(), metrics.get(instance.id()), bcpGroups, patch);
nodeRepository.patchApplication(deployment.zone(), instance.id(), patch);
}
catch (Exception e) {
failures++;
lastException = e;
}
}
}
}
double successFactorDeviation = asSuccessFactorDeviation(attempts, failures);
if ( successFactorDeviation == -successFactorBaseline )
log.log(Level.WARNING, "Could not update traffic share on any applications", lastException);
else if ( successFactorDeviation < -0.1 )
log.log(Level.FINE, "Could not update traffic share on all applications", lastException);
return successFactorDeviation;
}
|
class BcpGroupUpdater extends ControllerMaintainer {
private final ApplicationController applications;
private final NodeRepository nodeRepository;
public BcpGroupUpdater(Controller controller, Duration duration, Double successFactorBaseline) {
super(controller, duration, successFactorBaseline);
this.applications = controller.applications();
this.nodeRepository = controller.serviceRegistry().configServer().nodeRepository();
}
@Override
/** Adds deployment traffic share to the given patch. */
private void addTrafficShare(Deployment deployment, List<BcpGroup> bcpGroups, ApplicationPatch patch) {
double currentReadShare = 0;
double maxReadShare = 0;
for (BcpGroup group : bcpGroups) {
if ( ! group.contains(deployment.zone().region())) continue;
double deploymentQps = deployment.metrics().queriesPerSecond();
double groupQps = group.totalQps();
double fraction = group.fraction(deployment.zone().region());
currentReadShare += groupQps == 0 ? 0 : fraction * deploymentQps / groupQps;
maxReadShare += group.size() == 1
? currentReadShare
: fraction * ( deploymentQps + group.maxQpsExcluding(deployment.zone().region()) / (group.size() - 1) ) / groupQps;
}
patch.currentReadShare = currentReadShare;
patch.maxReadShare = maxReadShare;
}
private Map<ApplicationId, Map<ClusterSpec.Id, ClusterDeploymentMetrics>> collectClusterMetrics() {
Map<ApplicationId, Map<ClusterSpec.Id, ClusterDeploymentMetrics>> metrics = new HashMap<>();
for (var deploymentEntry : new HashMap<>(controller().applications().deploymentInfo()).entrySet()) {
if ( ! deploymentEntry.getKey().zoneId().environment().isProduction()) continue;
var appEntry = metrics.computeIfAbsent(deploymentEntry.getKey().applicationId(), __ -> new HashMap<>());
for (var clusterEntry : deploymentEntry.getValue().clusters().entrySet()) {
var clusterMetrics = appEntry.computeIfAbsent(clusterEntry.getKey(), __ -> new ClusterDeploymentMetrics());
clusterMetrics.put(deploymentEntry.getKey().zoneId().region(),
new DeploymentMetrics(clusterEntry.getValue().target().metrics().queryRate(),
clusterEntry.getValue().target().metrics().growthRateHeadroom(),
clusterEntry.getValue().target().metrics().cpuCostPerQuery()));
}
}
return metrics;
}
/** Adds bcp group info to the given patch, for any clusters where we have information. */
private void addBcpGroupInfo(RegionName regionToUpdate, Map<ClusterSpec.Id, ClusterDeploymentMetrics> metrics,
List<BcpGroup> bcpGroups, ApplicationPatch patch) {
if (metrics == null) return;
for (var clusterEntry : metrics.entrySet()) {
addClusterBcpGroupInfo(clusterEntry.getKey(), clusterEntry.getValue(), regionToUpdate, bcpGroups, patch);
}
}
private void addClusterBcpGroupInfo(ClusterSpec.Id id, ClusterDeploymentMetrics metrics,
RegionName regionToUpdate, List<BcpGroup> bcpGroups, ApplicationPatch patch) {
var weightedSumOfMaxMetrics = DeploymentMetrics.empty();
double sumOfCompleteMemberships = 0;
for (BcpGroup bcpGroup : bcpGroups) {
if ( ! bcpGroup.contains(regionToUpdate)) continue;
var groupMetrics = metrics.subsetOf(bcpGroup);
if ( ! groupMetrics.isCompleteExcluding(regionToUpdate, bcpGroup)) continue;
var max = groupMetrics.maxQueryRateExcluding(regionToUpdate, bcpGroup);
if (max.isEmpty()) continue;
weightedSumOfMaxMetrics = weightedSumOfMaxMetrics.add(max.get().multipliedBy(bcpGroup.fraction(regionToUpdate)));
sumOfCompleteMemberships += bcpGroup.fraction(regionToUpdate);
}
if (sumOfCompleteMemberships > 0)
patch.clusters.put(id.value(), weightedSumOfMaxMetrics.dividedBy(sumOfCompleteMemberships).asClusterPatch());
}
/**
* A set of regions which will take over traffic from each other if one of them fails.
* Each region will take an equal share (modulated by fraction) of the failing region's traffic.
*
* A regions membership in a group may be partial, represented by a fraction [0, 1],
* in which case the other regions will collectively only take that fraction of the failing regions traffic,
* and symmetrically, the region will only take its fraction of its share of traffic of any other failing region.
*/
private static class BcpGroup {
/** The instance which has this group. */
private final Instance instance;
/** Regions in this group, with their fractions. */
private final Map<RegionName, Double> regions;
/** Creates a group of a subset of the deployments in this instance. */
private BcpGroup(Instance instance, Map<RegionName, Double> regions) {
this.instance = instance;
this.regions = regions;
}
/** Returns the sum of the fractional memberships of this. */
double size() {
return regions.values().stream().mapToDouble(f -> f).sum();
}
Set<RegionName> regions() { return regions.keySet(); }
double fraction(RegionName region) {
return regions.getOrDefault(region, 0.0);
}
boolean contains(RegionName region) {
return regions.containsKey(region);
}
double totalQps() {
return instance.productionDeployments().values().stream()
.mapToDouble(i -> i.metrics().queriesPerSecond()).sum();
}
double maxQpsExcluding(RegionName region) {
return instance.productionDeployments().values().stream()
.filter(d -> ! d.zone().region().equals(region))
.mapToDouble(d -> d.metrics().queriesPerSecond() * fraction(d.zone().region()))
.max()
.orElse(0);
}
private static Bcp bcpOf(InstanceName instanceName, DeploymentSpec deploymentSpec) {
var instanceSpec = deploymentSpec.instance(instanceName);
if (instanceSpec.isEmpty()) return deploymentSpec.bcp();
return instanceSpec.get().bcp().orElse(deploymentSpec.bcp());
}
private static Map<RegionName, Double> regionsFrom(Instance instance) {
return instance.productionDeployments().values().stream()
.collect(Collectors.toMap(deployment -> deployment.zone().region(), __ -> 1.0));
}
private static Map<RegionName, Double> regionsFrom(Bcp.Group groupSpec) {
return groupSpec.members().stream()
.collect(Collectors.toMap(member -> member.region(), member -> member.fraction()));
}
static List<BcpGroup> groupsFrom(Instance instance, DeploymentSpec deploymentSpec) {
Bcp bcp = bcpOf(instance.name(), deploymentSpec);
if (bcp.isEmpty())
return List.of(new BcpGroup(instance, regionsFrom(instance)));
return bcp.groups().stream().map(groupSpec -> new BcpGroup(instance, regionsFrom(groupSpec))).toList();
}
}
record ApplicationClusterKey(ApplicationId application, ClusterSpec.Id cluster) { }
static class ClusterDeploymentMetrics {
private final Map<RegionName, DeploymentMetrics> deploymentMetrics;
public ClusterDeploymentMetrics() {
this.deploymentMetrics = new ConcurrentHashMap<>();
}
public ClusterDeploymentMetrics(Map<RegionName, DeploymentMetrics> deploymentMetrics) {
this.deploymentMetrics = new ConcurrentHashMap<>(deploymentMetrics);
}
void put(RegionName region, DeploymentMetrics metrics) {
deploymentMetrics.put(region, metrics);
}
ClusterDeploymentMetrics subsetOf(BcpGroup group) {
Map<RegionName, DeploymentMetrics> filteredMetrics = new HashMap<>();
for (var entry : deploymentMetrics.entrySet()) {
if (group.contains(entry.getKey()))
filteredMetrics.put(entry.getKey(), entry.getValue());
}
return new ClusterDeploymentMetrics(filteredMetrics);
}
/** Returns whether this has deployment metrics for each of the deployments in the given instance. */
boolean isCompleteExcluding(RegionName regionToExclude, BcpGroup bcpGroup) {
return regionsExcluding(regionToExclude, bcpGroup).allMatch(region -> deploymentMetrics.containsKey(region));
}
/** Returns the metrics with the max query rate among the given instance, if any. */
Optional<DeploymentMetrics> maxQueryRateExcluding(RegionName regionToExclude, BcpGroup bcpGroup) {
return regionsExcluding(regionToExclude, bcpGroup)
.map(region -> deploymentMetrics.get(region))
.max(Comparator.comparingDouble(m -> m.queryRate));
}
private Stream<RegionName> regionsExcluding(RegionName regionToExclude, BcpGroup bcpGroup) {
return bcpGroup.regions().stream()
.filter(region -> ! region.equals(regionToExclude));
}
}
/** Metrics for a given application, cluster and deployment. */
record DeploymentMetrics(double queryRate, double growthRateHeadroom, double cpuCostPerQuery) {
public ApplicationPatch.ClusterPatch asClusterPatch() {
return new ApplicationPatch.ClusterPatch(new ApplicationPatch.BcpGroupInfo(queryRate, growthRateHeadroom, cpuCostPerQuery));
}
DeploymentMetrics dividedBy(double d) {
return new DeploymentMetrics(queryRate / d, growthRateHeadroom / d, cpuCostPerQuery / d);
}
DeploymentMetrics multipliedBy(double m) {
return new DeploymentMetrics(queryRate * m, growthRateHeadroom * m, cpuCostPerQuery * m);
}
DeploymentMetrics add(DeploymentMetrics other) {
return new DeploymentMetrics(queryRate + other.queryRate,
growthRateHeadroom + other.growthRateHeadroom,
cpuCostPerQuery + other.cpuCostPerQuery);
}
public static DeploymentMetrics empty() { return new DeploymentMetrics(0, 0, 0); }
}
}
|
class BcpGroupUpdater extends ControllerMaintainer {
private final ApplicationController applications;
private final NodeRepository nodeRepository;
private final Double successFactorBaseline;
public BcpGroupUpdater(Controller controller, Duration duration, Double successFactorBaseline) {
super(controller, duration, successFactorBaseline);
this.applications = controller.applications();
this.nodeRepository = controller.serviceRegistry().configServer().nodeRepository();
this.successFactorBaseline = successFactorBaseline;
}
@Override
/** Adds deployment traffic share to the given patch. */
private void addTrafficShare(Deployment deployment, List<BcpGroup> bcpGroups, ApplicationPatch patch) {
double currentReadShare = 0;
double maxReadShare = 0;
for (BcpGroup group : bcpGroups) {
if ( ! group.contains(deployment.zone().region())) continue;
double deploymentQps = deployment.metrics().queriesPerSecond();
double groupQps = group.totalQps();
double fraction = group.fraction(deployment.zone().region());
currentReadShare += groupQps == 0 ? 0 : fraction * deploymentQps / groupQps;
maxReadShare += group.size() == 1
? currentReadShare
: fraction * ( deploymentQps + group.maxQpsExcluding(deployment.zone().region()) / (group.size() - 1) ) / groupQps;
}
patch.currentReadShare = currentReadShare;
patch.maxReadShare = maxReadShare;
}
private Map<ApplicationId, Map<ClusterSpec.Id, ClusterDeploymentMetrics>> collectClusterMetrics() {
Map<ApplicationId, Map<ClusterSpec.Id, ClusterDeploymentMetrics>> metrics = new HashMap<>();
for (var deploymentEntry : new HashMap<>(controller().applications().deploymentInfo()).entrySet()) {
if ( ! deploymentEntry.getKey().zoneId().environment().isProduction()) continue;
var appEntry = metrics.computeIfAbsent(deploymentEntry.getKey().applicationId(), __ -> new HashMap<>());
for (var clusterEntry : deploymentEntry.getValue().clusters().entrySet()) {
var clusterMetrics = appEntry.computeIfAbsent(clusterEntry.getKey(), __ -> new ClusterDeploymentMetrics());
clusterMetrics.put(deploymentEntry.getKey().zoneId().region(),
new DeploymentMetrics(clusterEntry.getValue().target().metrics().queryRate(),
clusterEntry.getValue().target().metrics().growthRateHeadroom(),
clusterEntry.getValue().target().metrics().cpuCostPerQuery()));
}
}
return metrics;
}
/** Adds bcp group info to the given patch, for any clusters where we have information. */
private void addBcpGroupInfo(RegionName regionToUpdate, Map<ClusterSpec.Id, ClusterDeploymentMetrics> metrics,
List<BcpGroup> bcpGroups, ApplicationPatch patch) {
if (metrics == null) return;
for (var clusterEntry : metrics.entrySet()) {
addClusterBcpGroupInfo(clusterEntry.getKey(), clusterEntry.getValue(), regionToUpdate, bcpGroups, patch);
}
}
private void addClusterBcpGroupInfo(ClusterSpec.Id id, ClusterDeploymentMetrics metrics,
RegionName regionToUpdate, List<BcpGroup> bcpGroups, ApplicationPatch patch) {
var weightedSumOfMaxMetrics = DeploymentMetrics.empty();
double sumOfCompleteMemberships = 0;
for (BcpGroup bcpGroup : bcpGroups) {
if ( ! bcpGroup.contains(regionToUpdate)) continue;
var groupMetrics = metrics.subsetOf(bcpGroup);
if ( ! groupMetrics.isCompleteExcluding(regionToUpdate, bcpGroup)) continue;
var max = groupMetrics.maxQueryRateExcluding(regionToUpdate, bcpGroup);
if (max.isEmpty()) continue;
weightedSumOfMaxMetrics = weightedSumOfMaxMetrics.add(max.get().multipliedBy(bcpGroup.fraction(regionToUpdate)));
sumOfCompleteMemberships += bcpGroup.fraction(regionToUpdate);
}
if (sumOfCompleteMemberships > 0)
patch.clusters.put(id.value(), weightedSumOfMaxMetrics.dividedBy(sumOfCompleteMemberships).asClusterPatch());
}
/**
* A set of regions which will take over traffic from each other if one of them fails.
* Each region will take an equal share (modulated by fraction) of the failing region's traffic.
*
* A regions membership in a group may be partial, represented by a fraction [0, 1],
* in which case the other regions will collectively only take that fraction of the failing regions traffic,
* and symmetrically, the region will only take its fraction of its share of traffic of any other failing region.
*/
private static class BcpGroup {
/** The instance which has this group. */
private final Instance instance;
/** Regions in this group, with their fractions. */
private final Map<RegionName, Double> regions;
/** Creates a group of a subset of the deployments in this instance. */
private BcpGroup(Instance instance, Map<RegionName, Double> regions) {
this.instance = instance;
this.regions = regions;
}
/** Returns the sum of the fractional memberships of this. */
double size() {
return regions.values().stream().mapToDouble(f -> f).sum();
}
Set<RegionName> regions() { return regions.keySet(); }
double fraction(RegionName region) {
return regions.getOrDefault(region, 0.0);
}
boolean contains(RegionName region) {
return regions.containsKey(region);
}
double totalQps() {
return instance.productionDeployments().values().stream()
.mapToDouble(i -> i.metrics().queriesPerSecond()).sum();
}
double maxQpsExcluding(RegionName region) {
return instance.productionDeployments().values().stream()
.filter(d -> ! d.zone().region().equals(region))
.mapToDouble(d -> d.metrics().queriesPerSecond() * fraction(d.zone().region()))
.max()
.orElse(0);
}
private static Bcp bcpOf(InstanceName instanceName, DeploymentSpec deploymentSpec) {
var instanceSpec = deploymentSpec.instance(instanceName);
if (instanceSpec.isEmpty()) return deploymentSpec.bcp();
return instanceSpec.get().bcp().orElse(deploymentSpec.bcp());
}
private static Map<RegionName, Double> regionsFrom(Instance instance) {
return instance.productionDeployments().values().stream()
.collect(Collectors.toMap(deployment -> deployment.zone().region(), __ -> 1.0));
}
private static Map<RegionName, Double> regionsFrom(Bcp.Group groupSpec) {
return groupSpec.members().stream()
.collect(Collectors.toMap(member -> member.region(), member -> member.fraction()));
}
static List<BcpGroup> groupsFrom(Instance instance, DeploymentSpec deploymentSpec) {
Bcp bcp = bcpOf(instance.name(), deploymentSpec);
if (bcp.isEmpty())
return List.of(new BcpGroup(instance, regionsFrom(instance)));
return bcp.groups().stream().map(groupSpec -> new BcpGroup(instance, regionsFrom(groupSpec))).toList();
}
}
record ApplicationClusterKey(ApplicationId application, ClusterSpec.Id cluster) { }
static class ClusterDeploymentMetrics {
private final Map<RegionName, DeploymentMetrics> deploymentMetrics;
public ClusterDeploymentMetrics() {
this.deploymentMetrics = new ConcurrentHashMap<>();
}
public ClusterDeploymentMetrics(Map<RegionName, DeploymentMetrics> deploymentMetrics) {
this.deploymentMetrics = new ConcurrentHashMap<>(deploymentMetrics);
}
void put(RegionName region, DeploymentMetrics metrics) {
deploymentMetrics.put(region, metrics);
}
ClusterDeploymentMetrics subsetOf(BcpGroup group) {
Map<RegionName, DeploymentMetrics> filteredMetrics = new HashMap<>();
for (var entry : deploymentMetrics.entrySet()) {
if (group.contains(entry.getKey()))
filteredMetrics.put(entry.getKey(), entry.getValue());
}
return new ClusterDeploymentMetrics(filteredMetrics);
}
/** Returns whether this has deployment metrics for each of the deployments in the given instance. */
boolean isCompleteExcluding(RegionName regionToExclude, BcpGroup bcpGroup) {
return regionsExcluding(regionToExclude, bcpGroup).allMatch(region -> deploymentMetrics.containsKey(region));
}
/** Returns the metrics with the max query rate among the given instance, if any. */
Optional<DeploymentMetrics> maxQueryRateExcluding(RegionName regionToExclude, BcpGroup bcpGroup) {
return regionsExcluding(regionToExclude, bcpGroup)
.map(region -> deploymentMetrics.get(region))
.max(Comparator.comparingDouble(m -> m.queryRate));
}
private Stream<RegionName> regionsExcluding(RegionName regionToExclude, BcpGroup bcpGroup) {
return bcpGroup.regions().stream()
.filter(region -> ! region.equals(regionToExclude));
}
}
/** Metrics for a given application, cluster and deployment. */
record DeploymentMetrics(double queryRate, double growthRateHeadroom, double cpuCostPerQuery) {
public ApplicationPatch.ClusterPatch asClusterPatch() {
return new ApplicationPatch.ClusterPatch(new ApplicationPatch.BcpGroupInfo(queryRate, growthRateHeadroom, cpuCostPerQuery));
}
DeploymentMetrics dividedBy(double d) {
return new DeploymentMetrics(queryRate / d, growthRateHeadroom / d, cpuCostPerQuery / d);
}
DeploymentMetrics multipliedBy(double m) {
return new DeploymentMetrics(queryRate * m, growthRateHeadroom * m, cpuCostPerQuery * m);
}
DeploymentMetrics add(DeploymentMetrics other) {
return new DeploymentMetrics(queryRate + other.queryRate,
growthRateHeadroom + other.growthRateHeadroom,
cpuCostPerQuery + other.cpuCostPerQuery);
}
public static DeploymentMetrics empty() { return new DeploymentMetrics(0, 0, 0); }
}
}
|
By just looking at the code, I would say `newProducedType` should be initialized with only physical columns. Not sure what the tests say to this. But `toSourceRowDataType` includes also computed column which makes no sense for the DynamicTableSource as the source doesn't know this concept. After the spec is applied in the loop, the type changes to `PHYSICAL COLUMNS + METADATA COLUMNS`. In any case, the producedType should always contain what comes out of the source (not scan).
|
private DynamicTableSource getTableSource(FlinkContext context, FlinkTypeFactory typeFactory) {
if (tableSource == null) {
DynamicTableSourceFactory factory =
context.getModuleManager()
.getFactory(Module::getTableSourceFactory)
.orElse(null);
if (factory == null) {
Catalog catalog =
context.getCatalogManager()
.getCatalog(contextResolvedTable.getIdentifier().getCatalogName())
.orElse(null);
factory =
FactoryUtil.getDynamicTableFactory(DynamicTableSourceFactory.class, catalog)
.orElse(null);
}
tableSource =
FactoryUtil.createDynamicTableSource(
factory,
contextResolvedTable.getIdentifier(),
contextResolvedTable.getResolvedTable(),
loadOptionsFromCatalogTable(contextResolvedTable, context),
context.getTableConfig(),
context.getClassLoader(),
contextResolvedTable.isTemporary());
if (sourceAbilities != null) {
RowType newProducedType =
(RowType)
contextResolvedTable
.getResolvedSchema()
.toSourceRowDataType()
.getLogicalType();
for (SourceAbilitySpec spec : sourceAbilities) {
SourceAbilityContext sourceAbilityContext =
new SourceAbilityContext(context, typeFactory, newProducedType);
spec.apply(tableSource, sourceAbilityContext);
if (spec.getProducedType().isPresent()) {
newProducedType = spec.getProducedType().get();
}
}
}
}
return tableSource;
}
|
.getLogicalType();
|
private DynamicTableSource getTableSource(FlinkContext context, FlinkTypeFactory typeFactory) {
if (tableSource == null) {
DynamicTableSourceFactory factory =
context.getModuleManager()
.getFactory(Module::getTableSourceFactory)
.orElse(null);
if (factory == null) {
Catalog catalog =
context.getCatalogManager()
.getCatalog(contextResolvedTable.getIdentifier().getCatalogName())
.orElse(null);
factory =
FactoryUtil.getDynamicTableFactory(DynamicTableSourceFactory.class, catalog)
.orElse(null);
}
tableSource =
FactoryUtil.createDynamicTableSource(
factory,
contextResolvedTable.getIdentifier(),
contextResolvedTable.getResolvedTable(),
loadOptionsFromCatalogTable(contextResolvedTable, context),
context.getTableConfig(),
context.getClassLoader(),
contextResolvedTable.isTemporary());
if (sourceAbilities != null) {
RowType newProducedType =
(RowType)
contextResolvedTable
.getResolvedSchema()
.toPhysicalRowDataType()
.getLogicalType();
for (SourceAbilitySpec spec : sourceAbilities) {
SourceAbilityContext sourceAbilityContext =
new SourceAbilityContext(context, typeFactory, newProducedType);
spec.apply(tableSource, sourceAbilityContext);
if (spec.getProducedType().isPresent()) {
newProducedType = spec.getProducedType().get();
}
}
}
}
return tableSource;
}
|
class DynamicTableSourceSpec extends DynamicTableSpecBase {
public static final String FIELD_NAME_CATALOG_TABLE = "table";
public static final String FIELD_NAME_SOURCE_ABILITIES = "abilities";
private final ContextResolvedTable contextResolvedTable;
private final @Nullable List<SourceAbilitySpec> sourceAbilities;
private DynamicTableSource tableSource;
@JsonCreator
public DynamicTableSourceSpec(
@JsonProperty(FIELD_NAME_CATALOG_TABLE) ContextResolvedTable contextResolvedTable,
@Nullable @JsonProperty(FIELD_NAME_SOURCE_ABILITIES)
List<SourceAbilitySpec> sourceAbilities) {
this.contextResolvedTable = contextResolvedTable;
this.sourceAbilities = sourceAbilities;
}
public ScanTableSource getScanTableSource(FlinkContext context, FlinkTypeFactory typeFactory) {
DynamicTableSource tableSource = getTableSource(context, typeFactory);
if (tableSource instanceof ScanTableSource) {
return (ScanTableSource) tableSource;
} else {
throw new TableException(
String.format(
"%s is not a ScanTableSource.\nPlease check it.",
tableSource.getClass().getName()));
}
}
public LookupTableSource getLookupTableSource(
FlinkContext context, FlinkTypeFactory typeFactory) {
DynamicTableSource tableSource = getTableSource(context, typeFactory);
if (tableSource instanceof LookupTableSource) {
return (LookupTableSource) tableSource;
} else {
throw new TableException(
String.format(
"%s is not a LookupTableSource.\nPlease check it.",
tableSource.getClass().getName()));
}
}
@JsonGetter(FIELD_NAME_CATALOG_TABLE)
public ContextResolvedTable getContextResolvedTable() {
return contextResolvedTable;
}
@JsonGetter(FIELD_NAME_SOURCE_ABILITIES)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@Nullable
public List<SourceAbilitySpec> getSourceAbilities() {
return sourceAbilities;
}
public void setTableSource(DynamicTableSource tableSource) {
this.tableSource = tableSource;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DynamicTableSourceSpec that = (DynamicTableSourceSpec) o;
return Objects.equals(contextResolvedTable, that.contextResolvedTable)
&& Objects.equals(sourceAbilities, that.sourceAbilities)
&& Objects.equals(tableSource, that.tableSource);
}
@Override
public int hashCode() {
return Objects.hash(contextResolvedTable, sourceAbilities, tableSource);
}
@Override
public String toString() {
return "DynamicTableSourceSpec{"
+ "contextResolvedTable="
+ contextResolvedTable
+ ", sourceAbilities="
+ sourceAbilities
+ ", tableSource="
+ tableSource
+ '}';
}
}
|
class DynamicTableSourceSpec extends DynamicTableSpecBase {
public static final String FIELD_NAME_CATALOG_TABLE = "table";
public static final String FIELD_NAME_SOURCE_ABILITIES = "abilities";
private final ContextResolvedTable contextResolvedTable;
private final @Nullable List<SourceAbilitySpec> sourceAbilities;
private DynamicTableSource tableSource;
@JsonCreator
public DynamicTableSourceSpec(
@JsonProperty(FIELD_NAME_CATALOG_TABLE) ContextResolvedTable contextResolvedTable,
@Nullable @JsonProperty(FIELD_NAME_SOURCE_ABILITIES)
List<SourceAbilitySpec> sourceAbilities) {
this.contextResolvedTable = contextResolvedTable;
this.sourceAbilities = sourceAbilities;
}
public ScanTableSource getScanTableSource(FlinkContext context, FlinkTypeFactory typeFactory) {
DynamicTableSource tableSource = getTableSource(context, typeFactory);
if (tableSource instanceof ScanTableSource) {
return (ScanTableSource) tableSource;
} else {
throw new TableException(
String.format(
"%s is not a ScanTableSource.\nPlease check it.",
tableSource.getClass().getName()));
}
}
public LookupTableSource getLookupTableSource(
FlinkContext context, FlinkTypeFactory typeFactory) {
DynamicTableSource tableSource = getTableSource(context, typeFactory);
if (tableSource instanceof LookupTableSource) {
return (LookupTableSource) tableSource;
} else {
throw new TableException(
String.format(
"%s is not a LookupTableSource.\nPlease check it.",
tableSource.getClass().getName()));
}
}
@JsonGetter(FIELD_NAME_CATALOG_TABLE)
public ContextResolvedTable getContextResolvedTable() {
return contextResolvedTable;
}
@JsonGetter(FIELD_NAME_SOURCE_ABILITIES)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@Nullable
public List<SourceAbilitySpec> getSourceAbilities() {
return sourceAbilities;
}
public void setTableSource(DynamicTableSource tableSource) {
this.tableSource = tableSource;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DynamicTableSourceSpec that = (DynamicTableSourceSpec) o;
return Objects.equals(contextResolvedTable, that.contextResolvedTable)
&& Objects.equals(sourceAbilities, that.sourceAbilities)
&& Objects.equals(tableSource, that.tableSource);
}
@Override
public int hashCode() {
return Objects.hash(contextResolvedTable, sourceAbilities, tableSource);
}
@Override
public String toString() {
return "DynamicTableSourceSpec{"
+ "contextResolvedTable="
+ contextResolvedTable
+ ", sourceAbilities="
+ sourceAbilities
+ ", tableSource="
+ tableSource
+ '}';
}
}
|
@mssfang please revert the changes where you got rid of the static import such as here. Once you've done that, please let me know what CheckStyle tests fail and we can work to modify that.
|
public void onDelivery(Event event) {
Delivery delivery = event.getDelivery();
while (delivery != null) {
Sender sender = (Sender) delivery.getLink();
if (TRACE_LOGGER.isTraceEnabled()) {
TRACE_LOGGER.trace(
"onDelivery linkName[" + sender.getName()
+ "], unsettled[" + sender.getUnsettled() + "], credit[" + sender.getRemoteCredit() + "], deliveryState[" + delivery.getRemoteState()
+ "], delivery.isBuffered[" + delivery.isBuffered() + "], delivery.id[" + new String(delivery.getTag(), StandardCharsets.UTF_8) + "]");
}
msgSender.onSendComplete(delivery);
delivery.settle();
delivery = sender.current();
}
}
|
+ "], delivery.isBuffered[" + delivery.isBuffered() + "], delivery.id[" + new String(delivery.getTag(), StandardCharsets.UTF_8) + "]");
|
public void onDelivery(Event event) {
Delivery delivery = event.getDelivery();
while (delivery != null) {
Sender sender = (Sender) delivery.getLink();
if (TRACE_LOGGER.isTraceEnabled()) {
TRACE_LOGGER.trace(
"onDelivery linkName[" + sender.getName()
+ "], unsettled[" + sender.getUnsettled() + "], credit[" + sender.getRemoteCredit() + "], deliveryState[" + delivery.getRemoteState()
+ "], delivery.isBuffered[" + delivery.isBuffered() + "], delivery.id[" + new String(delivery.getTag(), UTF_8) + "]");
}
msgSender.onSendComplete(delivery);
delivery.settle();
delivery = sender.current();
}
}
|
class SendLinkHandler extends BaseLinkHandler {
private static final Logger TRACE_LOGGER = LoggerFactory.getLogger(SendLinkHandler.class);
private final AmqpSender msgSender;
private final Object firstFlow;
private boolean isFirstFlow;
public SendLinkHandler(final AmqpSender sender) {
super(sender);
this.msgSender = sender;
this.firstFlow = new Object();
this.isFirstFlow = true;
}
@Override
public void onLinkLocalOpen(Event event) {
Link link = event.getLink();
if (link instanceof Sender) {
Sender sender = (Sender) link;
if (TRACE_LOGGER.isInfoEnabled()) {
TRACE_LOGGER.info(String.format("onLinkLocalOpen linkName[%s], localTarget[%s]", sender.getName(), sender.getTarget()));
}
}
}
@Override
public void onLinkRemoteOpen(Event event) {
Link link = event.getLink();
if (link instanceof Sender) {
Sender sender = (Sender) link;
if (link.getRemoteTarget() != null) {
if (TRACE_LOGGER.isInfoEnabled()) {
TRACE_LOGGER.info(String.format(Locale.US, "onLinkRemoteOpen linkName[%s], remoteTarget[%s]", sender.getName(), link.getRemoteTarget()));
}
synchronized (this.firstFlow) {
this.isFirstFlow = false;
this.msgSender.onOpenComplete(null);
}
} else {
if (TRACE_LOGGER.isInfoEnabled()) {
TRACE_LOGGER.info(
String.format(Locale.US, "onLinkRemoteOpen linkName[%s], remoteTarget[null], remoteSource[null], action[waitingForError]", sender.getName()));
}
}
}
}
@Override
@Override
public void onLinkFlow(Event event) {
if (this.isFirstFlow) {
synchronized (this.firstFlow) {
if (this.isFirstFlow) {
this.msgSender.onOpenComplete(null);
this.isFirstFlow = false;
}
}
}
Sender sender = event.getSender();
this.msgSender.onFlow(sender.getRemoteCredit());
if (TRACE_LOGGER.isDebugEnabled()) {
TRACE_LOGGER.debug("onLinkFlow linkName[" + sender.getName() + "], unsettled[" + sender.getUnsettled() + "], credit[" + sender.getCredit() + "]");
}
}
}
|
class SendLinkHandler extends BaseLinkHandler {
private static final Logger TRACE_LOGGER = LoggerFactory.getLogger(SendLinkHandler.class);
private final AmqpSender msgSender;
private final Object firstFlow;
private boolean isFirstFlow;
public SendLinkHandler(final AmqpSender sender) {
super(sender);
this.msgSender = sender;
this.firstFlow = new Object();
this.isFirstFlow = true;
}
@Override
public void onLinkLocalOpen(Event event) {
Link link = event.getLink();
if (link instanceof Sender) {
Sender sender = (Sender) link;
if (TRACE_LOGGER.isInfoEnabled()) {
TRACE_LOGGER.info(String.format("onLinkLocalOpen linkName[%s], localTarget[%s]", sender.getName(), sender.getTarget()));
}
}
}
@Override
public void onLinkRemoteOpen(Event event) {
Link link = event.getLink();
if (link instanceof Sender) {
Sender sender = (Sender) link;
if (link.getRemoteTarget() != null) {
if (TRACE_LOGGER.isInfoEnabled()) {
TRACE_LOGGER.info(String.format(Locale.US, "onLinkRemoteOpen linkName[%s], remoteTarget[%s]", sender.getName(), link.getRemoteTarget()));
}
synchronized (this.firstFlow) {
this.isFirstFlow = false;
this.msgSender.onOpenComplete(null);
}
} else {
if (TRACE_LOGGER.isInfoEnabled()) {
TRACE_LOGGER.info(
String.format(Locale.US, "onLinkRemoteOpen linkName[%s], remoteTarget[null], remoteSource[null], action[waitingForError]", sender.getName()));
}
}
}
}
@Override
@Override
public void onLinkFlow(Event event) {
if (this.isFirstFlow) {
synchronized (this.firstFlow) {
if (this.isFirstFlow) {
this.msgSender.onOpenComplete(null);
this.isFirstFlow = false;
}
}
}
Sender sender = event.getSender();
this.msgSender.onFlow(sender.getRemoteCredit());
if (TRACE_LOGGER.isDebugEnabled()) {
TRACE_LOGGER.debug("onLinkFlow linkName[" + sender.getName() + "], unsettled[" + sender.getUnsettled() + "], credit[" + sender.getCredit() + "]");
}
}
}
|
@radcortez could you have a look at this? I wonder if it's not a side effect of one of your patches and might be undesired?
|
public OidcClient apply(String tokenRequestUri, Throwable t) {
if (t != null) {
throw toOidcClientException(authServerUri.toString(), t);
}
if (tokenRequestUri == null) {
throw new ConfigurationException(
"OpenId Connect Provider token endpoint URL is not configured and can not be discovered");
}
MultiMap tokenGrantParams = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap());
String grantType = oidcConfig.grant.getType() == Grant.Type.CLIENT
? OidcConstants.CLIENT_CREDENTIALS_GRANT
: OidcConstants.PASSWORD_GRANT;
setGrantClientParams(oidcConfig, tokenGrantParams, grantType);
if (oidcConfig.getGrantOptions() != null) {
Map<String, String> grantOptions = oidcConfig.getGrantOptions()
.get(oidcConfig.grant.getType().name().toLowerCase());
if (oidcConfig.grant.getType() == Grant.Type.PASSWORD) {
tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_USERNAME,
grantOptions.get(OidcConstants.PASSWORD_GRANT_USERNAME));
tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_PASSWORD,
grantOptions.get(OidcConstants.PASSWORD_GRANT_PASSWORD));
} else if (grantOptions != null && oidcConfig.grant.getType() == Grant.Type.CLIENT) {
tokenGrantParams.addAll(grantOptions);
}
}
MultiMap commonRefreshGrantParams = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap());
setGrantClientParams(oidcConfig, commonRefreshGrantParams, OidcConstants.REFRESH_TOKEN_GRANT);
return new OidcClientImpl(client, tokenRequestUri, grantType, tokenGrantParams,
commonRefreshGrantParams,
oidcConfig);
}
|
if (oidcConfig.getGrantOptions() != null) {
|
public OidcClient apply(String tokenRequestUri, Throwable t) {
if (t != null) {
throw toOidcClientException(authServerUri.toString(), t);
}
if (tokenRequestUri == null) {
throw new ConfigurationException(
"OpenId Connect Provider token endpoint URL is not configured and can not be discovered");
}
MultiMap tokenGrantParams = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap());
String grantType = oidcConfig.grant.getType() == Grant.Type.CLIENT
? OidcConstants.CLIENT_CREDENTIALS_GRANT
: OidcConstants.PASSWORD_GRANT;
setGrantClientParams(oidcConfig, tokenGrantParams, grantType);
if (oidcConfig.getGrantOptions() != null) {
Map<String, String> grantOptions = oidcConfig.getGrantOptions()
.get(oidcConfig.grant.getType().name().toLowerCase());
if (oidcConfig.grant.getType() == Grant.Type.PASSWORD) {
tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_USERNAME,
grantOptions.get(OidcConstants.PASSWORD_GRANT_USERNAME));
tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_PASSWORD,
grantOptions.get(OidcConstants.PASSWORD_GRANT_PASSWORD));
} else if (grantOptions != null && oidcConfig.grant.getType() == Grant.Type.CLIENT) {
tokenGrantParams.addAll(grantOptions);
}
}
MultiMap commonRefreshGrantParams = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap());
setGrantClientParams(oidcConfig, commonRefreshGrantParams, OidcConstants.REFRESH_TOKEN_GRANT);
return new OidcClientImpl(client, tokenRequestUri, grantType, tokenGrantParams,
commonRefreshGrantParams,
oidcConfig);
}
|
class OidcClientRecorder {
private static final Logger LOG = Logger.getLogger(OidcClientRecorder.class);
private static final String DEFAULT_OIDC_CLIENT_ID = "Default";
private static final Duration CONNECTION_BACKOFF_DURATION = Duration.ofSeconds(2);
public OidcClients setup(OidcClientsConfig oidcClientsConfig, TlsConfig tlsConfig, Supplier<Vertx> vertx) {
String defaultClientId = oidcClientsConfig.defaultClient.getId().orElse(DEFAULT_OIDC_CLIENT_ID);
OidcClient defaultClient = createOidcClient(oidcClientsConfig.defaultClient, defaultClientId, tlsConfig, vertx);
Map<String, OidcClient> staticOidcClients = new HashMap<>();
for (Map.Entry<String, OidcClientConfig> config : oidcClientsConfig.namedClients.entrySet()) {
OidcCommonUtils.verifyConfigurationId(defaultClientId, config.getKey(), config.getValue().getId());
staticOidcClients.put(config.getKey(),
createOidcClient(config.getValue(), config.getKey(), tlsConfig, vertx));
}
return new OidcClientsImpl(defaultClient, staticOidcClients,
new Function<OidcClientConfig, Uni<OidcClient>>() {
@Override
public Uni<OidcClient> apply(OidcClientConfig config) {
return createOidcClientUni(config, config.getId().get(), tlsConfig, vertx);
}
});
}
public Supplier<OidcClient> createOidcClientBean(OidcClients clients) {
return new Supplier<OidcClient>() {
@Override
public OidcClient get() {
return clients.getClient();
}
};
}
public Supplier<OidcClients> createOidcClientsBean(OidcClients clients) {
return new Supplier<OidcClients>() {
@Override
public OidcClients get() {
return clients;
}
};
}
protected static OidcClient createOidcClient(OidcClientConfig oidcConfig, String oidcClientId,
TlsConfig tlsConfig, Supplier<Vertx> vertx) {
return createOidcClientUni(oidcConfig, oidcClientId, tlsConfig, vertx).await().indefinitely();
}
protected static Uni<OidcClient> createOidcClientUni(OidcClientConfig oidcConfig, String oidcClientId,
TlsConfig tlsConfig, Supplier<Vertx> vertx) {
if (!oidcConfig.isClientEnabled()) {
String message = String.format("'%s' client configuration is disabled", oidcClientId);
LOG.debug(message);
return Uni.createFrom().item(new DisabledOidcClient(message));
}
if (!oidcConfig.getId().isPresent()) {
oidcConfig.setId(oidcClientId);
}
try {
OidcCommonUtils.verifyCommonConfiguration(oidcConfig, false);
} catch (Throwable t) {
String message = String.format("'%s' client configuration is not initialized", oidcClientId);
LOG.debug(message);
return Uni.createFrom().item(new DisabledOidcClient(message));
}
String authServerUriString = OidcCommonUtils.getAuthServerUrl(oidcConfig);
WebClientOptions options = new WebClientOptions();
URI authServerUri = URI.create(authServerUriString);
OidcCommonUtils.setHttpClientOptions(oidcConfig, tlsConfig, options);
WebClient client = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx.get()), options);
Uni<String> tokenRequestUriUni = null;
if (!oidcConfig.discoveryEnabled) {
tokenRequestUriUni = Uni.createFrom()
.item(OidcCommonUtils.getOidcEndpointUrl(authServerUri.toString(), oidcConfig.tokenPath));
} else {
tokenRequestUriUni = discoverTokenRequestUri(client, authServerUri.toString(), oidcConfig);
}
return tokenRequestUriUni.onItemOrFailure()
.transform(new BiFunction<String, Throwable, OidcClient>() {
@Override
});
}
private static void setGrantClientParams(OidcClientConfig oidcConfig, MultiMap grantParams, String grantType) {
grantParams.add(OidcConstants.GRANT_TYPE, grantType);
Credentials creds = oidcConfig.getCredentials();
if (OidcCommonUtils.isClientSecretPostAuthRequired(creds)) {
grantParams.add(OidcConstants.CLIENT_ID, oidcConfig.clientId.get());
grantParams.add(OidcConstants.CLIENT_SECRET, OidcCommonUtils.clientSecret(creds));
}
if (oidcConfig.getScopes().isPresent()) {
grantParams.add(OidcConstants.TOKEN_SCOPE, oidcConfig.getScopes().get().stream().collect(Collectors.joining(" ")));
}
}
private static Uni<String> discoverTokenRequestUri(WebClient client, String authServerUrl, OidcClientConfig oidcConfig) {
final String discoveryUrl = authServerUrl + "/.well-known/openid-configuration";
final long connectionRetryCount = OidcCommonUtils.getConnectionRetryCount(oidcConfig);
final long expireInDelay = OidcCommonUtils.getConnectionDelayInMillis(oidcConfig);
if (connectionRetryCount > 1) {
LOG.infof("Connecting to IDP for up to %d times every 2 seconds", connectionRetryCount);
}
return client.getAbs(discoveryUrl).send().onItem().transform(resp -> {
if (resp.statusCode() == 200) {
JsonObject json = resp.bodyAsJsonObject();
return json.getString("token_endpoint");
} else {
LOG.tracef("Discovery has failed, status code: %d", resp.statusCode());
return null;
}
}).onFailure(ConnectException.class)
.retry()
.withBackOff(CONNECTION_BACKOFF_DURATION, CONNECTION_BACKOFF_DURATION)
.expireIn(expireInDelay);
}
protected static OidcClientException toOidcClientException(String authServerUrlString, Throwable cause) {
return new OidcClientException(OidcCommonUtils.formatConnectionErrorMessage(authServerUrlString), cause);
}
private static class DisabledOidcClient implements OidcClient {
String message;
DisabledOidcClient(String message) {
this.message = message;
}
@Override
public Uni<Tokens> getTokens() {
throw new DisabledOidcClientException(message);
}
@Override
public Uni<Tokens> refreshTokens(String refreshToken) {
throw new DisabledOidcClientException(message);
}
@Override
public void close() throws IOException {
throw new DisabledOidcClientException(message);
}
}
}
|
class OidcClientRecorder {
private static final Logger LOG = Logger.getLogger(OidcClientRecorder.class);
private static final String DEFAULT_OIDC_CLIENT_ID = "Default";
private static final Duration CONNECTION_BACKOFF_DURATION = Duration.ofSeconds(2);
public OidcClients setup(OidcClientsConfig oidcClientsConfig, TlsConfig tlsConfig, Supplier<Vertx> vertx) {
String defaultClientId = oidcClientsConfig.defaultClient.getId().orElse(DEFAULT_OIDC_CLIENT_ID);
OidcClient defaultClient = createOidcClient(oidcClientsConfig.defaultClient, defaultClientId, tlsConfig, vertx);
Map<String, OidcClient> staticOidcClients = new HashMap<>();
for (Map.Entry<String, OidcClientConfig> config : oidcClientsConfig.namedClients.entrySet()) {
OidcCommonUtils.verifyConfigurationId(defaultClientId, config.getKey(), config.getValue().getId());
staticOidcClients.put(config.getKey(),
createOidcClient(config.getValue(), config.getKey(), tlsConfig, vertx));
}
return new OidcClientsImpl(defaultClient, staticOidcClients,
new Function<OidcClientConfig, Uni<OidcClient>>() {
@Override
public Uni<OidcClient> apply(OidcClientConfig config) {
return createOidcClientUni(config, config.getId().get(), tlsConfig, vertx);
}
});
}
public Supplier<OidcClient> createOidcClientBean(OidcClients clients) {
return new Supplier<OidcClient>() {
@Override
public OidcClient get() {
return clients.getClient();
}
};
}
public Supplier<OidcClients> createOidcClientsBean(OidcClients clients) {
return new Supplier<OidcClients>() {
@Override
public OidcClients get() {
return clients;
}
};
}
protected static OidcClient createOidcClient(OidcClientConfig oidcConfig, String oidcClientId,
TlsConfig tlsConfig, Supplier<Vertx> vertx) {
return createOidcClientUni(oidcConfig, oidcClientId, tlsConfig, vertx).await().indefinitely();
}
protected static Uni<OidcClient> createOidcClientUni(OidcClientConfig oidcConfig, String oidcClientId,
TlsConfig tlsConfig, Supplier<Vertx> vertx) {
if (!oidcConfig.isClientEnabled()) {
String message = String.format("'%s' client configuration is disabled", oidcClientId);
LOG.debug(message);
return Uni.createFrom().item(new DisabledOidcClient(message));
}
if (!oidcConfig.getId().isPresent()) {
oidcConfig.setId(oidcClientId);
}
try {
OidcCommonUtils.verifyCommonConfiguration(oidcConfig, false);
} catch (Throwable t) {
String message = String.format("'%s' client configuration is not initialized", oidcClientId);
LOG.debug(message);
return Uni.createFrom().item(new DisabledOidcClient(message));
}
String authServerUriString = OidcCommonUtils.getAuthServerUrl(oidcConfig);
WebClientOptions options = new WebClientOptions();
URI authServerUri = URI.create(authServerUriString);
OidcCommonUtils.setHttpClientOptions(oidcConfig, tlsConfig, options);
WebClient client = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx.get()), options);
Uni<String> tokenRequestUriUni = null;
if (!oidcConfig.discoveryEnabled) {
tokenRequestUriUni = Uni.createFrom()
.item(OidcCommonUtils.getOidcEndpointUrl(authServerUri.toString(), oidcConfig.tokenPath));
} else {
tokenRequestUriUni = discoverTokenRequestUri(client, authServerUri.toString(), oidcConfig);
}
return tokenRequestUriUni.onItemOrFailure()
.transform(new BiFunction<String, Throwable, OidcClient>() {
@Override
});
}
private static void setGrantClientParams(OidcClientConfig oidcConfig, MultiMap grantParams, String grantType) {
grantParams.add(OidcConstants.GRANT_TYPE, grantType);
Credentials creds = oidcConfig.getCredentials();
if (OidcCommonUtils.isClientSecretPostAuthRequired(creds)) {
grantParams.add(OidcConstants.CLIENT_ID, oidcConfig.clientId.get());
grantParams.add(OidcConstants.CLIENT_SECRET, OidcCommonUtils.clientSecret(creds));
}
if (oidcConfig.getScopes().isPresent()) {
grantParams.add(OidcConstants.TOKEN_SCOPE, oidcConfig.getScopes().get().stream().collect(Collectors.joining(" ")));
}
}
private static Uni<String> discoverTokenRequestUri(WebClient client, String authServerUrl, OidcClientConfig oidcConfig) {
final String discoveryUrl = authServerUrl + "/.well-known/openid-configuration";
final long connectionRetryCount = OidcCommonUtils.getConnectionRetryCount(oidcConfig);
final long expireInDelay = OidcCommonUtils.getConnectionDelayInMillis(oidcConfig);
if (connectionRetryCount > 1) {
LOG.infof("Connecting to IDP for up to %d times every 2 seconds", connectionRetryCount);
}
return client.getAbs(discoveryUrl).send().onItem().transform(resp -> {
if (resp.statusCode() == 200) {
JsonObject json = resp.bodyAsJsonObject();
return json.getString("token_endpoint");
} else {
LOG.tracef("Discovery has failed, status code: %d", resp.statusCode());
return null;
}
}).onFailure(ConnectException.class)
.retry()
.withBackOff(CONNECTION_BACKOFF_DURATION, CONNECTION_BACKOFF_DURATION)
.expireIn(expireInDelay);
}
protected static OidcClientException toOidcClientException(String authServerUrlString, Throwable cause) {
return new OidcClientException(OidcCommonUtils.formatConnectionErrorMessage(authServerUrlString), cause);
}
private static class DisabledOidcClient implements OidcClient {
String message;
DisabledOidcClient(String message) {
this.message = message;
}
@Override
public Uni<Tokens> getTokens() {
throw new DisabledOidcClientException(message);
}
@Override
public Uni<Tokens> refreshTokens(String refreshToken) {
throw new DisabledOidcClientException(message);
}
@Override
public void close() throws IOException {
throw new DisabledOidcClientException(message);
}
}
}
|
reach ```suggestion for (ZoneId zone : controller.zoneRegistry().zones().reachable().ids()) { ```
|
private static Slime toSlime(Controller controller) {
try {
Slime slime = new Slime();
Cursor root = slime.setObject();
Cursor zonesArray = root.setArray("zones");
for (ZoneId zone : controller.zoneRegistry().zones().all().ids()) {
NodeRepoStats stats = controller.serviceRegistry().configServer().nodeRepository().getStats(zone);
if (stats.applicationStats().isEmpty()) continue;
Cursor zoneObject = zonesArray.addObject();
zoneObject.setString("id", zone.toString());
toSlime(stats.load(), zoneObject.setObject("load"));
toSlime(stats.load(), zoneObject.setObject("activeLoad"));
Cursor applicationsArray = zoneObject.setArray("applications");
for (var applicationStats : stats.applicationStats())
toSlime(applicationStats, applicationsArray.addObject());
}
return slime;
}
catch (Exception e) {
e.printStackTrace();
throw e;
}
}
|
for (ZoneId zone : controller.zoneRegistry().zones().all().ids()) {
|
private static Slime toSlime(Controller controller) {
Slime slime = new Slime();
Cursor root = slime.setObject();
Cursor zonesArray = root.setArray("zones");
for (ZoneId zone : controller.zoneRegistry().zones().reachable().ids()) {
NodeRepoStats stats = controller.serviceRegistry().configServer().nodeRepository().getStats(zone);
if (stats.applicationStats().isEmpty()) continue;
Cursor zoneObject = zonesArray.addObject();
zoneObject.setString("id", zone.toString());
toSlime(stats.load(), zoneObject.setObject("load"));
toSlime(stats.activeLoad(), zoneObject.setObject("activeLoad"));
Cursor applicationsArray = zoneObject.setArray("applications");
for (var applicationStats : stats.applicationStats())
toSlime(applicationStats, applicationsArray.addObject());
}
return slime;
}
|
class StatsResponse extends SlimeJsonResponse {
public StatsResponse(Controller controller) {
super(toSlime(controller));
}
private static void toSlime(ApplicationStats stats, Cursor applicationObject) {
applicationObject.setString("id", stats.id().toFullString());
toSlime(stats.load(), applicationObject.setObject("load"));
applicationObject.setDouble("cost", stats.cost());
applicationObject.setDouble("unutilizedCost", stats.unutilizedCost());
}
private static void toSlime(Load load, Cursor loadObject) {
loadObject.setDouble("cpu", load.cpu());
loadObject.setDouble("memory", load.memory());
loadObject.setDouble("disk", load.disk());
}
}
|
class StatsResponse extends SlimeJsonResponse {
public StatsResponse(Controller controller) {
super(toSlime(controller));
}
private static void toSlime(ApplicationStats stats, Cursor applicationObject) {
applicationObject.setString("id", stats.id().toFullString());
toSlime(stats.load(), applicationObject.setObject("load"));
applicationObject.setDouble("cost", stats.cost());
applicationObject.setDouble("unutilizedCost", stats.unutilizedCost());
}
private static void toSlime(Load load, Cursor loadObject) {
loadObject.setDouble("cpu", load.cpu());
loadObject.setDouble("memory", load.memory());
loadObject.setDouble("disk", load.disk());
}
}
|
It is likely that`value++` is time-consuming if there is a vast range. Here are some of my ideas. For instance,the shardings are as follows, 1,2 | 3,4,5 | 6,7,8,9 | 10,11 So the `range.partition.split.value` is `2,5,9,11` Imaging the query condition is `2 =< x <= 9`, then we just need to calcute 2==2 and 9<11, therefore the results is `0,1,2`. That is, we can get the critical values from `range.partition.split.value`, so we only need to compare the bottom point and the top point with those critical values.
|
public Collection<String> doSharding(final Collection<String> availableTargetNames, final RangeShardingValue<Long> shardingValue) {
Preconditions.checkNotNull(properties.get(RANGE_PARTITION_SPLIT_VALUE), "Range sharding algorithm range partition split value cannot be null.");
Map<Integer, Range<Long>> partitionRangeMap = getPartitionRangeMap();
Collection<String> result = new LinkedHashSet<>(availableTargetNames.size());
for (long value = shardingValue.getValueRange().lowerEndpoint(); value <= shardingValue.getValueRange().upperEndpoint(); value++) {
for (String each : availableTargetNames) {
if (each.endsWith(getPartition(partitionRangeMap, value))) {
result.add(each);
}
}
}
return result;
}
|
for (long value = shardingValue.getValueRange().lowerEndpoint(); value <= shardingValue.getValueRange().upperEndpoint(); value++) {
|
public Collection<String> doSharding(final Collection<String> availableTargetNames, final RangeShardingValue<Long> shardingValue) {
Preconditions.checkNotNull(properties.get(RANGE_PARTITION_SPLIT_VALUE), "Range sharding algorithm range partition split value cannot be null.");
Map<Integer, Range<Long>> partitionRangeMap = getPartitionRangeMap();
Collection<String> result = new LinkedHashSet<>(availableTargetNames.size());
int lowerEndpointPartition = getPartition(partitionRangeMap, shardingValue.getValueRange().lowerEndpoint());
int upperEndpointPartition = getPartition(partitionRangeMap, shardingValue.getValueRange().upperEndpoint());
for (int partition = lowerEndpointPartition; partition <= upperEndpointPartition; partition++) {
for (String each : availableTargetNames) {
if (each.endsWith(partition + "")) {
result.add(each);
}
}
}
return result;
}
|
class RangeShardingAlgorithm implements StandardShardingAlgorithm<Long> {
private static final String RANGE_PARTITION_SPLIT_VALUE = "range.partition.split.value";
private Properties properties = new Properties();
@Override
public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Long> shardingValue) {
Preconditions.checkNotNull(properties.get(RANGE_PARTITION_SPLIT_VALUE), "Range sharding algorithm range partition split value cannot be null.");
Map<Integer, Range<Long>> partitionRangeMap = getPartitionRangeMap();
for (String each : availableTargetNames) {
if (each.endsWith(getPartition(partitionRangeMap, shardingValue.getValue()))) {
return each;
}
}
throw new UnsupportedOperationException();
}
@Override
private Map<Integer, Range<Long>> getPartitionRangeMap() {
List<Long> splitValues = Splitter.on(",").trimResults().splitToList(properties.get(RANGE_PARTITION_SPLIT_VALUE).toString())
.stream().map(Longs::tryParse).filter(Objects::nonNull).sorted().collect(Collectors.toList());
Preconditions.checkArgument(CollectionUtils.isNotEmpty(splitValues), "Range sharding algorithm range partition split value is not valid.");
Map<Integer, Range<Long>> partitionRangeMap = Maps.newHashMapWithExpectedSize(splitValues.size() + 1);
for (int i = 0; i < splitValues.size(); i++) {
Long splitValue = splitValues.get(i);
if (i == 0) {
partitionRangeMap.put(i, Range.lessThan(splitValue));
} else {
Long previousSplitValue = splitValues.get(i - 1);
partitionRangeMap.put(i, Range.closedOpen(previousSplitValue, splitValue));
}
if (i == splitValues.size() - 1) {
partitionRangeMap.put(i + 1, Range.atLeast(splitValue));
}
}
return partitionRangeMap;
}
private String getPartition(final Map<Integer, Range<Long>> partitionRangeMap, final Long value) {
for (Map.Entry<Integer, Range<Long>> entry : partitionRangeMap.entrySet()) {
if (entry.getValue().contains(value)) {
return entry.getKey().toString();
}
}
return partitionRangeMap.keySet().stream().mapToInt(Integer::valueOf).max().toString();
}
@Override
public String getType() {
return "RANGE";
}
@Override
public Properties getProperties() {
return properties;
}
@Override
public void setProperties(final Properties properties) {
this.properties = properties;
}
}
|
class RangeShardingAlgorithm implements StandardShardingAlgorithm<Long> {
private static final String RANGE_PARTITION_SPLIT_VALUE = "range.partition.split.value";
@Getter
@Setter
private Properties properties = new Properties();
@Override
public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Long> shardingValue) {
Preconditions.checkNotNull(properties.get(RANGE_PARTITION_SPLIT_VALUE), "Range sharding algorithm range partition split value cannot be null.");
Map<Integer, Range<Long>> partitionRangeMap = getPartitionRangeMap();
for (String each : availableTargetNames) {
if (each.endsWith(getPartition(partitionRangeMap, shardingValue.getValue()) + "")) {
return each;
}
}
throw new UnsupportedOperationException();
}
@Override
private Map<Integer, Range<Long>> getPartitionRangeMap() {
List<Long> splitValues = Splitter.on(",").trimResults().splitToList(properties.get(RANGE_PARTITION_SPLIT_VALUE).toString())
.stream().map(Longs::tryParse).filter(Objects::nonNull).sorted().collect(Collectors.toList());
Preconditions.checkArgument(CollectionUtils.isNotEmpty(splitValues), "Range sharding algorithm range partition split value is not valid.");
Map<Integer, Range<Long>> partitionRangeMap = Maps.newHashMapWithExpectedSize(splitValues.size() + 1);
for (int i = 0; i < splitValues.size(); i++) {
Long splitValue = splitValues.get(i);
if (i == 0) {
partitionRangeMap.put(i, Range.lessThan(splitValue));
} else {
Long previousSplitValue = splitValues.get(i - 1);
partitionRangeMap.put(i, Range.closedOpen(previousSplitValue, splitValue));
}
if (i == splitValues.size() - 1) {
partitionRangeMap.put(i + 1, Range.atLeast(splitValue));
}
}
return partitionRangeMap;
}
private Integer getPartition(final Map<Integer, Range<Long>> partitionRangeMap, final Long value) {
for (Map.Entry<Integer, Range<Long>> entry : partitionRangeMap.entrySet()) {
if (entry.getValue().contains(value)) {
return entry.getKey();
}
}
throw new UnsupportedOperationException();
}
@Override
public String getType() {
return "RANGE";
}
}
|
Hmm... good point. So far I could not figure out a solution. However I am wondering how important it is we cover all different configurations. In the end we are pretty much testing that java serialization works well here. Is that crucial that we test all possible values?
|
public void testSavepoint() throws Exception {
final Random rnd = new Random();
final byte[] locationBytes = new byte[rnd.nextInt(41) + 1];
rnd.nextBytes(locationBytes);
final SnapshotType[] snapshotTypes = {
CHECKPOINT,
FULL_CHECKPOINT,
SavepointType.savepoint(SavepointFormatType.CANONICAL),
SavepointType.suspend(SavepointFormatType.CANONICAL),
SavepointType.terminate(SavepointFormatType.CANONICAL)
};
final CheckpointOptions options =
new CheckpointOptions(
snapshotTypes[rnd.nextInt(snapshotTypes.length)],
new CheckpointStorageLocationReference(locationBytes));
final CheckpointOptions copy = CommonTestUtils.createCopySerializable(options);
assertEquals(options.getCheckpointType(), copy.getCheckpointType());
assertArrayEquals(locationBytes, copy.getTargetLocation().getReferenceBytes());
}
|
};
|
public void testSavepoint() throws Exception {
final Random rnd = new Random();
final byte[] locationBytes = new byte[rnd.nextInt(41) + 1];
rnd.nextBytes(locationBytes);
final SnapshotType[] snapshotTypes = {
CHECKPOINT,
FULL_CHECKPOINT,
SavepointType.savepoint(SavepointFormatType.CANONICAL),
SavepointType.suspend(SavepointFormatType.CANONICAL),
SavepointType.terminate(SavepointFormatType.CANONICAL)
};
final CheckpointOptions options =
new CheckpointOptions(
snapshotTypes[rnd.nextInt(snapshotTypes.length)],
new CheckpointStorageLocationReference(locationBytes));
final CheckpointOptions copy = CommonTestUtils.createCopySerializable(options);
assertEquals(options.getCheckpointType(), copy.getCheckpointType());
assertArrayEquals(locationBytes, copy.getTargetLocation().getReferenceBytes());
}
|
class CheckpointOptionsTest {
@Test
public void testDefaultCheckpoint() throws Exception {
final CheckpointOptions options = CheckpointOptions.forCheckpointWithDefaultLocation();
assertEquals(CheckpointType.CHECKPOINT, options.getCheckpointType());
assertTrue(options.getTargetLocation().isDefaultReference());
final CheckpointOptions copy = CommonTestUtils.createCopySerializable(options);
assertEquals(CheckpointType.CHECKPOINT, copy.getCheckpointType());
assertTrue(copy.getTargetLocation().isDefaultReference());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testSavepointNeedsAlignment() {
new CheckpointOptions(
SavepointType.savepoint(SavepointFormatType.CANONICAL),
CheckpointStorageLocationReference.getDefault(),
AlignmentType.UNALIGNED,
0);
}
@Test
public void testCheckpointNeedsAlignment() {
CheckpointStorageLocationReference location =
CheckpointStorageLocationReference.getDefault();
assertFalse(
new CheckpointOptions(
CHECKPOINT,
location,
AlignmentType.UNALIGNED,
NO_ALIGNED_CHECKPOINT_TIME_OUT)
.needsAlignment());
assertTrue(
new CheckpointOptions(
CHECKPOINT,
location,
AlignmentType.ALIGNED,
NO_ALIGNED_CHECKPOINT_TIME_OUT)
.needsAlignment());
assertTrue(
new CheckpointOptions(
CHECKPOINT,
location,
AlignmentType.FORCED_ALIGNED,
NO_ALIGNED_CHECKPOINT_TIME_OUT)
.needsAlignment());
assertFalse(
new CheckpointOptions(
CHECKPOINT,
location,
AlignmentType.AT_LEAST_ONCE,
NO_ALIGNED_CHECKPOINT_TIME_OUT)
.needsAlignment());
}
@Test
public void testCheckpointIsTimeoutable() {
CheckpointStorageLocationReference location =
CheckpointStorageLocationReference.getDefault();
assertTimeoutable(
CheckpointOptions.alignedWithTimeout(CheckpointType.CHECKPOINT, location, 10),
false,
true,
10);
assertTimeoutable(
CheckpointOptions.unaligned(CheckpointType.CHECKPOINT, location),
true,
false,
NO_ALIGNED_CHECKPOINT_TIME_OUT);
assertTimeoutable(
CheckpointOptions.alignedWithTimeout(CheckpointType.CHECKPOINT, location, 10)
.withUnalignedUnsupported(),
false,
false,
10);
assertTimeoutable(
CheckpointOptions.unaligned(CheckpointType.CHECKPOINT, location)
.withUnalignedUnsupported(),
false,
false,
NO_ALIGNED_CHECKPOINT_TIME_OUT);
}
@Test
public void testForceAlignmentIsReversable() {
CheckpointStorageLocationReference location =
CheckpointStorageLocationReference.getDefault();
assertReversable(
CheckpointOptions.alignedWithTimeout(CheckpointType.CHECKPOINT, location, 10),
true);
assertReversable(CheckpointOptions.unaligned(CheckpointType.CHECKPOINT, location), true);
assertReversable(CheckpointOptions.alignedNoTimeout(CHECKPOINT, location), false);
assertReversable(
CheckpointOptions.alignedNoTimeout(
SavepointType.savepoint(SavepointFormatType.CANONICAL), location),
false);
assertReversable(CheckpointOptions.notExactlyOnce(CHECKPOINT, location), false);
assertReversable(
CheckpointOptions.notExactlyOnce(
SavepointType.savepoint(SavepointFormatType.CANONICAL), location),
false);
}
private void assertReversable(CheckpointOptions options, boolean forceHasEffect) {
assertEquals(
"all non-forced options support unaligned mode",
options,
options.withUnalignedSupported());
CheckpointOptions unalignedUnsupported = options.withUnalignedUnsupported();
if (forceHasEffect) {
assertNotEquals("expected changes in the options", options, unalignedUnsupported);
} else {
assertEquals("not expected changes to the options", options, unalignedUnsupported);
}
assertEquals(
"expected fully reversable options",
options,
unalignedUnsupported.withUnalignedSupported());
}
private void assertTimeoutable(
CheckpointOptions options, boolean isUnaligned, boolean isTimeoutable, long timeout) {
assertTrue("exactly once", options.isExactlyOnceMode());
assertEquals("need alignment", !isUnaligned, options.needsAlignment());
assertEquals("unaligned", isUnaligned, options.isUnalignedCheckpoint());
assertEquals("timeoutable", isTimeoutable, options.isTimeoutable());
assertEquals("timeout", timeout, options.getAlignedCheckpointTimeout());
}
}
|
class CheckpointOptionsTest {
@Test
public void testDefaultCheckpoint() throws Exception {
final CheckpointOptions options = CheckpointOptions.forCheckpointWithDefaultLocation();
assertEquals(CheckpointType.CHECKPOINT, options.getCheckpointType());
assertTrue(options.getTargetLocation().isDefaultReference());
final CheckpointOptions copy = CommonTestUtils.createCopySerializable(options);
assertEquals(CheckpointType.CHECKPOINT, copy.getCheckpointType());
assertTrue(copy.getTargetLocation().isDefaultReference());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testSavepointNeedsAlignment() {
new CheckpointOptions(
SavepointType.savepoint(SavepointFormatType.CANONICAL),
CheckpointStorageLocationReference.getDefault(),
AlignmentType.UNALIGNED,
0);
}
@Test
public void testCheckpointNeedsAlignment() {
CheckpointStorageLocationReference location =
CheckpointStorageLocationReference.getDefault();
assertFalse(
new CheckpointOptions(
CHECKPOINT,
location,
AlignmentType.UNALIGNED,
NO_ALIGNED_CHECKPOINT_TIME_OUT)
.needsAlignment());
assertTrue(
new CheckpointOptions(
CHECKPOINT,
location,
AlignmentType.ALIGNED,
NO_ALIGNED_CHECKPOINT_TIME_OUT)
.needsAlignment());
assertTrue(
new CheckpointOptions(
CHECKPOINT,
location,
AlignmentType.FORCED_ALIGNED,
NO_ALIGNED_CHECKPOINT_TIME_OUT)
.needsAlignment());
assertFalse(
new CheckpointOptions(
CHECKPOINT,
location,
AlignmentType.AT_LEAST_ONCE,
NO_ALIGNED_CHECKPOINT_TIME_OUT)
.needsAlignment());
}
@Test
public void testCheckpointIsTimeoutable() {
CheckpointStorageLocationReference location =
CheckpointStorageLocationReference.getDefault();
assertTimeoutable(
CheckpointOptions.alignedWithTimeout(CheckpointType.CHECKPOINT, location, 10),
false,
true,
10);
assertTimeoutable(
CheckpointOptions.unaligned(CheckpointType.CHECKPOINT, location),
true,
false,
NO_ALIGNED_CHECKPOINT_TIME_OUT);
assertTimeoutable(
CheckpointOptions.alignedWithTimeout(CheckpointType.CHECKPOINT, location, 10)
.withUnalignedUnsupported(),
false,
false,
10);
assertTimeoutable(
CheckpointOptions.unaligned(CheckpointType.CHECKPOINT, location)
.withUnalignedUnsupported(),
false,
false,
NO_ALIGNED_CHECKPOINT_TIME_OUT);
}
@Test
public void testForceAlignmentIsReversable() {
CheckpointStorageLocationReference location =
CheckpointStorageLocationReference.getDefault();
assertReversable(
CheckpointOptions.alignedWithTimeout(CheckpointType.CHECKPOINT, location, 10),
true);
assertReversable(CheckpointOptions.unaligned(CheckpointType.CHECKPOINT, location), true);
assertReversable(CheckpointOptions.alignedNoTimeout(CHECKPOINT, location), false);
assertReversable(
CheckpointOptions.alignedNoTimeout(
SavepointType.savepoint(SavepointFormatType.CANONICAL), location),
false);
assertReversable(CheckpointOptions.notExactlyOnce(CHECKPOINT, location), false);
assertReversable(
CheckpointOptions.notExactlyOnce(
SavepointType.savepoint(SavepointFormatType.CANONICAL), location),
false);
}
private void assertReversable(CheckpointOptions options, boolean forceHasEffect) {
assertEquals(
"all non-forced options support unaligned mode",
options,
options.withUnalignedSupported());
CheckpointOptions unalignedUnsupported = options.withUnalignedUnsupported();
if (forceHasEffect) {
assertNotEquals("expected changes in the options", options, unalignedUnsupported);
} else {
assertEquals("not expected changes to the options", options, unalignedUnsupported);
}
assertEquals(
"expected fully reversable options",
options,
unalignedUnsupported.withUnalignedSupported());
}
private void assertTimeoutable(
CheckpointOptions options, boolean isUnaligned, boolean isTimeoutable, long timeout) {
assertTrue("exactly once", options.isExactlyOnceMode());
assertEquals("need alignment", !isUnaligned, options.needsAlignment());
assertEquals("unaligned", isUnaligned, options.isUnalignedCheckpoint());
assertEquals("timeoutable", isTimeoutable, options.isTimeoutable());
assertEquals("timeout", timeout, options.getAlignedCheckpointTimeout());
}
}
|
Notable change since the last round of reviews: The documentation for `Source`'s `getCurrent`, `getCurrentRecordId` and `getCurrentTimestamp` include the following: > * @throws NoSuchElementException if the reader is at the beginning of the input and {@link > * #start} or {@link #advance} wasn't called, or if the last {@link #start} or {@link > * #advance} returned {@code false}. This states that if `advance()` returns `false`, subsequent calls to these should throw `NoSuchElementException`. As such, I'm explicitly setting these to `null`.
|
public boolean advance() throws IOException {
try {
Channel channel = connectionHandler.getChannel();
GetResponse delivery = channel.basicGet(queueName, false);
if (delivery == null) {
current = null;
currentRecordId = null;
currentTimestamp = null;
checkpointMark.advanceWatermark(Instant.now());
return false;
}
if (source.spec.useCorrelationId()) {
String correlationId = delivery.getProps().getCorrelationId();
if (correlationId == null) {
throw new IOException(
"RabbitMqIO.Read uses message correlation ID, but received "
+ "message has a null correlation ID");
}
currentRecordId = correlationId.getBytes(StandardCharsets.UTF_8);
}
long deliveryTag = delivery.getEnvelope().getDeliveryTag();
checkpointMark.sessionIds.add(deliveryTag);
current = new RabbitMqMessage(source.spec.routingKey(), delivery);
Date deliveryTimestamp = delivery.getProps().getTimestamp();
currentTimestamp =
(deliveryTimestamp != null) ? new Instant(deliveryTimestamp) : Instant.now();
checkpointMark.advanceWatermark(currentTimestamp);
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
}
return true;
}
|
current = null;
|
public boolean advance() throws IOException {
try {
Channel channel = connectionHandler.getChannel();
GetResponse delivery = channel.basicGet(queueName, false);
if (delivery == null) {
current = null;
currentRecordId = null;
currentTimestamp = null;
checkpointMark.advanceWatermark(Instant.now());
return false;
}
if (source.spec.useCorrelationId()) {
String correlationId = delivery.getProps().getCorrelationId();
if (correlationId == null) {
throw new IOException(
"RabbitMqIO.Read uses message correlation ID, but received "
+ "message has a null correlation ID");
}
currentRecordId = correlationId.getBytes(StandardCharsets.UTF_8);
}
long deliveryTag = delivery.getEnvelope().getDeliveryTag();
checkpointMark.sessionIds.add(deliveryTag);
current = new RabbitMqMessage(source.spec.routingKey(), delivery);
Date deliveryTimestamp = delivery.getProps().getTimestamp();
currentTimestamp =
(deliveryTimestamp != null) ? new Instant(deliveryTimestamp) : Instant.now();
checkpointMark.advanceWatermark(currentTimestamp);
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
}
return true;
}
|
class UnboundedRabbitMqReader
extends UnboundedSource.UnboundedReader<RabbitMqMessage> {
private final RabbitMQSource source;
private RabbitMqMessage current;
private byte[] currentRecordId;
private ConnectionHandler connectionHandler;
private String queueName;
private Instant currentTimestamp;
private final RabbitMQCheckpointMark checkpointMark;
UnboundedRabbitMqReader(RabbitMQSource source, RabbitMQCheckpointMark checkpointMark)
throws IOException {
this.source = source;
this.current = null;
this.checkpointMark = checkpointMark != null ? checkpointMark : new RabbitMQCheckpointMark();
}
@Override
public Instant getWatermark() {
return checkpointMark.latestTimestamp;
}
@Override
public UnboundedSource.CheckpointMark getCheckpointMark() {
return checkpointMark;
}
@Override
public RabbitMQSource getCurrentSource() {
return source;
}
@Override
public byte[] getCurrentRecordId() {
if (current == null) {
throw new NoSuchElementException();
}
if (currentRecordId != null) {
return currentRecordId;
} else {
return "".getBytes(StandardCharsets.UTF_8);
}
}
@Override
public Instant getCurrentTimestamp() {
if (currentTimestamp == null) {
throw new NoSuchElementException();
}
return currentTimestamp;
}
@Override
public RabbitMqMessage getCurrent() {
if (current == null) {
throw new NoSuchElementException();
}
return current;
}
@Override
public boolean start() throws IOException {
try {
connectionHandler = new ConnectionHandler(source.spec.uri());
connectionHandler.start();
Channel channel = connectionHandler.getChannel();
queueName = source.spec.queue();
if (source.spec.queueDeclare()) {
channel.queueDeclare(queueName, false, false, false, null);
}
if (source.spec.exchange() != null) {
if (source.spec.exchangeDeclare()) {
channel.exchangeDeclare(source.spec.exchange(), source.spec.exchangeType());
}
if (queueName == null) {
queueName = channel.queueDeclare().getQueue();
}
channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey());
}
checkpointMark.channel = channel;
channel.txSelect();
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
}
return advance();
}
@Override
@Override
public void close() throws IOException {
if (connectionHandler != null) {
connectionHandler.stop();
}
}
}
|
class UnboundedRabbitMqReader
extends UnboundedSource.UnboundedReader<RabbitMqMessage> {
private final RabbitMQSource source;
private RabbitMqMessage current;
private byte[] currentRecordId;
private ConnectionHandler connectionHandler;
private String queueName;
private Instant currentTimestamp;
private final RabbitMQCheckpointMark checkpointMark;
UnboundedRabbitMqReader(RabbitMQSource source, RabbitMQCheckpointMark checkpointMark)
throws IOException {
this.source = source;
this.current = null;
this.checkpointMark = checkpointMark != null ? checkpointMark : new RabbitMQCheckpointMark();
}
@Override
public Instant getWatermark() {
return checkpointMark.latestTimestamp;
}
@Override
public UnboundedSource.CheckpointMark getCheckpointMark() {
return checkpointMark;
}
@Override
public RabbitMQSource getCurrentSource() {
return source;
}
@Override
public byte[] getCurrentRecordId() {
if (current == null) {
throw new NoSuchElementException();
}
if (currentRecordId != null) {
return currentRecordId;
} else {
return "".getBytes(StandardCharsets.UTF_8);
}
}
@Override
public Instant getCurrentTimestamp() {
if (currentTimestamp == null) {
throw new NoSuchElementException();
}
return currentTimestamp;
}
@Override
public RabbitMqMessage getCurrent() {
if (current == null) {
throw new NoSuchElementException();
}
return current;
}
@Override
public boolean start() throws IOException {
try {
connectionHandler = new ConnectionHandler(source.spec.uri());
connectionHandler.start();
Channel channel = connectionHandler.getChannel();
queueName = source.spec.queue();
if (source.spec.queueDeclare()) {
channel.queueDeclare(queueName, false, false, false, null);
}
if (source.spec.exchange() != null) {
if (source.spec.exchangeDeclare()) {
channel.exchangeDeclare(source.spec.exchange(), source.spec.exchangeType());
}
if (queueName == null) {
queueName = channel.queueDeclare().getQueue();
}
channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey());
}
checkpointMark.channel = channel;
channel.txSelect();
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
}
return advance();
}
@Override
@Override
public void close() throws IOException {
if (connectionHandler != null) {
connectionHandler.stop();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.